blob: e06efac6df2c91e2917adb5dcc9a288eabe00679 [file] [log] [blame] [edit]
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "ACodec"
#ifdef __LP64__
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
#include <android_media_codec.h>
#include <inttypes.h>
#include <utils/Trace.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
#include <gui/Surface.h>
#include <media/stagefright/ACodec.h>
#include <media/stagefright/foundation/avc_utils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecConstants.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/RenderedFrameInfo.h>
#include <media/stagefright/SurfaceUtils.h>
#include <media/hardware/HardwareAPI.h>
#include <media/MediaBufferHolder.h>
#include <media/OMXBuffer.h>
#include <media/omx/1.0/Conversion.h>
#include <media/omx/1.0/WOmxNode.h>
#include <hidlmemory/mapping.h>
#include <media/openmax/OMX_AudioExt.h>
#include <media/openmax/OMX_VideoExt.h>
#include <media/openmax/OMX_Component.h>
#include <media/openmax/OMX_IndexExt.h>
#include <media/openmax/OMX_AsString.h>
#include "include/ACodecBufferChannel.h"
#include "include/DataConverter.h"
#include "include/SecureBuffer.h"
#include "include/SharedMemoryBuffer.h"
#include <media/stagefright/omx/OMXUtils.h>
#include <server_configurable_flags/get_flags.h>
namespace android {
typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
using hardware::media::omx::V1_0::Status;
using server_configurable_flags::GetServerConfigurableFlag;
enum {
kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
};
namespace {
constexpr char TUNNEL_PEEK_KEY[] = "android._trigger-tunnel-peek";
constexpr char TUNNEL_PEEK_SET_LEGACY_KEY[] = "android._tunnel-peek-set-legacy";
}
static bool areRenderMetricsEnabled() {
std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
return v == "true";
}
// OMX errors are directly mapped into status_t range if
// there is no corresponding MediaError status code.
// Use the statusFromOMXError(int32_t omxError) function.
//
// Currently this is a direct map.
// See frameworks/native/include/media/openmax/OMX_Core.h
//
// Vendor OMX errors from 0x90000000 - 0x9000FFFF
// Extension OMX errors from 0x8F000000 - 0x90000000
// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current)
//
// returns true if err is a recognized OMX error code.
// as OMX error is OMX_S32, this is an int32_t type
static inline bool isOMXError(int32_t err) {
return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
}
// converts an OMX error to a status_t
static inline status_t statusFromOMXError(int32_t omxError) {
switch (omxError) {
case OMX_ErrorInvalidComponentName:
case OMX_ErrorComponentNotFound:
return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
default:
return isOMXError(omxError) ? omxError : 0; // no translation required
}
}
static inline status_t statusFromBinderStatus(hardware::Return<Status> &&status) {
if (status.isOk()) {
return static_cast<status_t>(status.withDefault(Status::UNKNOWN_ERROR));
} else if (status.isDeadObject()) {
return DEAD_OBJECT;
}
// Other exception
return UNKNOWN_ERROR;
}
// checks and converts status_t to a non-side-effect status_t
static inline status_t makeNoSideEffectStatus(status_t err) {
switch (err) {
// the following errors have side effects and may come
// from other code modules. Remap for safety reasons.
case INVALID_OPERATION:
case DEAD_OBJECT:
return UNKNOWN_ERROR;
default:
return err;
}
}
static OMX_VIDEO_CONTROLRATETYPE getVideoBitrateMode(const sp<AMessage> &msg) {
int32_t tmp;
if (msg->findInt32("bitrate-mode", &tmp)) {
// explicitly translate from MediaCodecInfo.EncoderCapabilities.
// BITRATE_MODE_* into OMX bitrate mode.
switch (tmp) {
//BITRATE_MODE_CQ
case 0: return OMX_Video_ControlRateConstantQuality;
//BITRATE_MODE_VBR
case 1: return OMX_Video_ControlRateVariable;
//BITRATE_MODE_CBR
case 2: return OMX_Video_ControlRateConstant;
default: break;
}
}
return OMX_Video_ControlRateVariable;
}
static bool findVideoBitrateControlInfo(const sp<AMessage> &msg,
OMX_VIDEO_CONTROLRATETYPE *mode, int32_t *bitrate, int32_t *quality) {
*mode = getVideoBitrateMode(msg);
bool isCQ = (*mode == OMX_Video_ControlRateConstantQuality);
return (!isCQ && msg->findInt32("bitrate", bitrate))
|| (isCQ && msg->findInt32("quality", quality));
}
struct MessageList : public RefBase {
MessageList() {
}
virtual ~MessageList() {
}
std::list<sp<AMessage> > &getList() { return mList; }
private:
std::list<sp<AMessage> > mList;
DISALLOW_EVIL_CONSTRUCTORS(MessageList);
};
static sp<DataConverter> getCopyConverter() {
static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited
static sp<DataConverter> sCopyConverter; // zero-inited
pthread_once(&once, [](){ sCopyConverter = new DataConverter(); });
return sCopyConverter;
}
struct CodecObserver : public BnOMXObserver {
explicit CodecObserver(const sp<AMessage> &msg) : mNotify(msg) {}
// from IOMXObserver
virtual void onMessages(const std::list<omx_message> &messages) {
if (messages.empty()) {
return;
}
sp<AMessage> notify = mNotify->dup();
sp<MessageList> msgList = new MessageList();
for (std::list<omx_message>::const_iterator it = messages.cbegin();
it != messages.cend(); ++it) {
const omx_message &omx_msg = *it;
sp<AMessage> msg = new AMessage;
msg->setInt32("type", omx_msg.type);
switch (omx_msg.type) {
case omx_message::EVENT:
{
msg->setInt32("event", omx_msg.u.event_data.event);
msg->setInt32("data1", omx_msg.u.event_data.data1);
msg->setInt32("data2", omx_msg.u.event_data.data2);
break;
}
case omx_message::EMPTY_BUFFER_DONE:
{
msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
msg->setInt32("fence_fd", omx_msg.fenceFd);
break;
}
case omx_message::FILL_BUFFER_DONE:
{
msg->setInt32(
"buffer", omx_msg.u.extended_buffer_data.buffer);
msg->setInt32(
"range_offset",
omx_msg.u.extended_buffer_data.range_offset);
msg->setInt32(
"range_length",
omx_msg.u.extended_buffer_data.range_length);
msg->setInt32(
"flags",
omx_msg.u.extended_buffer_data.flags);
msg->setInt64(
"timestamp",
omx_msg.u.extended_buffer_data.timestamp);
msg->setInt32(
"fence_fd", omx_msg.fenceFd);
break;
}
case omx_message::FRAME_RENDERED:
{
msg->setInt64(
"media_time_us", omx_msg.u.render_data.timestamp);
msg->setInt64(
"system_nano", omx_msg.u.render_data.nanoTime);
break;
}
default:
ALOGE("Unrecognized message type: %d", omx_msg.type);
break;
}
msgList->getList().push_back(msg);
}
notify->setObject("messages", msgList);
notify->post();
}
protected:
virtual ~CodecObserver() {}
private:
const sp<AMessage> mNotify;
DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::BaseState : public AState {
explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
protected:
enum PortMode {
KEEP_BUFFERS,
RESUBMIT_BUFFERS,
FREE_BUFFERS,
};
ACodec *mCodec;
virtual PortMode getPortMode(OMX_U32 portIndex);
virtual void stateExited();
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual void onOutputBufferDrained(const sp<AMessage> &msg);
virtual void onInputBufferFilled(const sp<AMessage> &msg);
void postFillThisBuffer(BufferInfo *info);
void maybePostExtraOutputMetadataBufferRequest() {
if (!mPendingExtraOutputMetadataBufferRequest) {
(new AMessage(kWhatSubmitExtraOutputMetadataBuffer, mCodec))->post();
mPendingExtraOutputMetadataBufferRequest = true;
}
}
void setSurfaceParameters(const sp<AMessage> &msg);
private:
// Handles an OMX message. Returns true iff message was handled.
bool onOMXMessage(const sp<AMessage> &msg);
// Handles a list of messages. Returns true iff messages were handled.
bool onOMXMessageList(const sp<AMessage> &msg);
// returns true iff this message is for this component and the component is alive
bool checkOMXMessage(const sp<AMessage> &msg);
bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);
bool onOMXFillBufferDone(
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
int64_t timeUs,
int fenceFd);
virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
void getMoreInputDataIfPossible();
bool mPendingExtraOutputMetadataBufferRequest;
DISALLOW_EVIL_CONSTRUCTORS(BaseState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::DeathNotifier :
public IBinder::DeathRecipient,
public ::android::hardware::hidl_death_recipient {
explicit DeathNotifier(const sp<AMessage> &notify)
: mNotify(notify) {
}
virtual void binderDied(const wp<IBinder> &) {
mNotify->post();
}
virtual void serviceDied(
uint64_t /* cookie */,
const wp<::android::hidl::base::V1_0::IBase>& /* who */) {
mNotify->post();
}
protected:
virtual ~DeathNotifier() {}
private:
sp<AMessage> mNotify;
DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier);
};
struct ACodec::UninitializedState : public ACodec::BaseState {
explicit UninitializedState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
private:
void onSetup(const sp<AMessage> &msg);
bool onAllocateComponent(const sp<AMessage> &msg);
sp<DeathNotifier> mDeathNotifier;
DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::LoadedState : public ACodec::BaseState {
explicit LoadedState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
private:
friend struct ACodec::UninitializedState;
bool onConfigureComponent(const sp<AMessage> &msg);
void onCreateInputSurface(const sp<AMessage> &msg);
void onSetInputSurface(const sp<AMessage> &msg);
void onStart();
void onShutdown(bool keepComponentAllocated);
status_t setupInputSurface();
DISALLOW_EVIL_CONSTRUCTORS(LoadedState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::LoadedToIdleState : public ACodec::BaseState {
explicit LoadedToIdleState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual void stateEntered();
private:
status_t allocateBuffers();
DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::IdleToExecutingState : public ACodec::BaseState {
explicit IdleToExecutingState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual void stateEntered();
private:
DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::ExecutingState : public ACodec::BaseState {
explicit ExecutingState(ACodec *codec);
void submitRegularOutputBuffers();
void submitOutputMetaBuffers();
void submitOutputBuffers();
// Submit output buffers to the decoder, submit input buffers to client
// to fill with data.
void resume();
// Returns true iff input and output buffers are in play.
bool active() const { return mActive; }
protected:
virtual PortMode getPortMode(OMX_U32 portIndex);
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
private:
bool mActive;
DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
explicit OutputPortSettingsChangedState(ACodec *codec);
protected:
virtual PortMode getPortMode(OMX_U32 portIndex);
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
private:
DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
explicit ExecutingToIdleState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual void onOutputBufferDrained(const sp<AMessage> &msg);
virtual void onInputBufferFilled(const sp<AMessage> &msg);
private:
void changeStateIfWeOwnAllBuffers();
bool mComponentNowIdle;
DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::IdleToLoadedState : public ACodec::BaseState {
explicit IdleToLoadedState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
private:
DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
};
////////////////////////////////////////////////////////////////////////////////
struct ACodec::FlushingState : public ACodec::BaseState {
explicit FlushingState(ACodec *codec);
protected:
virtual bool onMessageReceived(const sp<AMessage> &msg);
virtual void stateEntered();
virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
virtual void onOutputBufferDrained(const sp<AMessage> &msg);
virtual void onInputBufferFilled(const sp<AMessage> &msg);
private:
bool mFlushComplete[2];
void changeStateIfWeOwnAllBuffers();
DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
};
////////////////////////////////////////////////////////////////////////////////
void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) {
if (mFenceFd >= 0) {
ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s",
mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
}
mFenceFd = fenceFd;
mIsReadFence = false;
}
void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) {
if (mFenceFd >= 0) {
ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s",
mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
}
mFenceFd = fenceFd;
mIsReadFence = true;
}
void ACodec::BufferInfo::checkWriteFence(const char *dbg) {
if (mFenceFd >= 0 && mIsReadFence) {
ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg);
}
}
void ACodec::BufferInfo::checkReadFence(const char *dbg) {
if (mFenceFd >= 0 && !mIsReadFence) {
ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg);
}
}
////////////////////////////////////////////////////////////////////////////////
ACodec::ACodec()
: mSampleRate(0),
mNodeGeneration(0),
mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
mIsWindowToDisplay(false),
mHasPresentFenceTimes(false),
mUsingNativeWindow(false),
mNativeWindowUsageBits(0),
mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
mIsVideo(false),
mIsImage(false),
mIsEncoder(false),
mFatalError(false),
mShutdownInProgress(false),
mExplicitShutdown(false),
mIsLegacyVP9Decoder(false),
mIsStreamCorruptFree(false),
mIsLowLatency(false),
mEncoderDelay(0),
mEncoderPadding(0),
mRotationDegrees(0),
mChannelMaskPresent(false),
mChannelMask(0),
mDequeueCounter(0),
mMetadataBuffersToSubmit(0),
mNumUndequeuedBuffers(0),
mRepeatFrameDelayUs(-1LL),
mMaxPtsGapUs(0LL),
mMaxFps(-1),
mFps(-1.0),
mCaptureFps(-1.0),
mCreateInputBuffersSuspended(false),
mTunneled(false),
mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0),
mDescribeHDR10PlusInfoIndex((OMX_INDEXTYPE)0),
mStateGeneration(0),
mVendorExtensionsStatus(kExtensionsUnchecked) {
memset(&mLastHDRStaticInfo, 0, sizeof(mLastHDRStaticInfo));
mUninitializedState = new UninitializedState(this);
mLoadedState = new LoadedState(this);
mLoadedToIdleState = new LoadedToIdleState(this);
mIdleToExecutingState = new IdleToExecutingState(this);
mExecutingState = new ExecutingState(this);
mOutputPortSettingsChangedState =
new OutputPortSettingsChangedState(this);
mExecutingToIdleState = new ExecutingToIdleState(this);
mIdleToLoadedState = new IdleToLoadedState(this);
mFlushingState = new FlushingState(this);
mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
mInputEOSResult = OK;
mPortMode[kPortIndexInput] = IOMX::kPortModePresetByteBuffer;
mPortMode[kPortIndexOutput] = IOMX::kPortModePresetByteBuffer;
memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
changeState(mUninitializedState);
}
ACodec::~ACodec() {
}
void ACodec::initiateSetup(const sp<AMessage> &msg) {
msg->setWhat(kWhatSetup);
msg->setTarget(this);
msg->post();
}
std::shared_ptr<BufferChannelBase> ACodec::getBufferChannel() {
if (!mBufferChannel) {
mBufferChannel = std::make_shared<ACodecBufferChannel>(
new AMessage(kWhatInputBufferFilled, this),
new AMessage(kWhatOutputBufferDrained, this),
new AMessage(kWhatPollForRenderedBuffers, this));
}
return mBufferChannel;
}
void ACodec::signalSetParameters(const sp<AMessage> &params) {
sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
msg->setMessage("params", params);
msg->post();
}
void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
msg->setWhat(kWhatAllocateComponent);
msg->setTarget(this);
msg->post();
}
void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
msg->setWhat(kWhatConfigureComponent);
msg->setTarget(this);
msg->post();
}
status_t ACodec::setSurface(const sp<Surface> &surface, uint32_t /*generation*/) {
sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
msg->setObject("surface", surface);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
if (err == OK) {
(void)response->findInt32("err", &err);
}
return err;
}
void ACodec::initiateCreateInputSurface() {
(new AMessage(kWhatCreateInputSurface, this))->post();
}
void ACodec::initiateSetInputSurface(
const sp<PersistentSurface> &surface) {
sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
msg->setObject("input-surface", surface);
msg->post();
}
void ACodec::signalEndOfInputStream() {
(new AMessage(kWhatSignalEndOfInputStream, this))->post();
}
void ACodec::initiateStart() {
(new AMessage(kWhatStart, this))->post();
}
void ACodec::signalFlush() {
ALOGV("[%s] signalFlush", mComponentName.c_str());
(new AMessage(kWhatFlush, this))->post();
}
void ACodec::signalResume() {
(new AMessage(kWhatResume, this))->post();
}
void ACodec::initiateShutdown(bool keepComponentAllocated) {
sp<AMessage> msg = new AMessage(kWhatShutdown, this);
msg->setInt32("keepComponentAllocated", keepComponentAllocated);
msg->post();
if (!keepComponentAllocated) {
// ensure shutdown completes in 3 seconds
(new AMessage(kWhatReleaseCodecInstance, this))->post(3000000);
}
}
void ACodec::signalRequestIDRFrame() {
(new AMessage(kWhatRequestIDRFrame, this))->post();
}
// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
// Some codecs may return input buffers before having them processed.
// This causes a halt if we already signaled an EOS on the input
// port. For now keep submitting an output buffer if there was an
// EOS on the input port, but not yet on the output port.
void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {
if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
mMetadataBuffersToSubmit > 0) {
(new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post();
}
}
status_t ACodec::handleSetSurface(const sp<Surface> &surface) {
// allow keeping unset surface
if (surface == NULL) {
if (mNativeWindow != NULL) {
ALOGW("cannot unset a surface");
return INVALID_OPERATION;
}
return OK;
}
// cannot switch from bytebuffers to surface
if (mNativeWindow == NULL) {
ALOGW("component was not configured with a surface");
return INVALID_OPERATION;
}
ANativeWindow *nativeWindow = surface.get();
// if we have not yet started the codec, we can simply set the native window
if (mBuffers[kPortIndexInput].size() == 0) {
mNativeWindow = surface;
initializeFrameTracking();
return OK;
}
// we do not support changing a tunneled surface after start
if (mTunneled) {
ALOGW("cannot change tunneled surface");
return INVALID_OPERATION;
}
int usageBits = 0;
// no need to reconnect as we will not dequeue all buffers
status_t err = setupNativeWindowSizeFormatAndUsage(
nativeWindow, &usageBits, !storingMetadataInDecodedBuffers());
if (err != OK) {
return err;
}
int ignoredFlags = kVideoGrallocUsage;
// New output surface is not allowed to add new usage flag except ignored ones.
if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) {
ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits);
return BAD_VALUE;
}
// get min undequeued count. We cannot switch to a surface that has a higher
// undequeued count than we allocated.
int minUndequeuedBuffers = 0;
err = nativeWindow->query(
nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
&minUndequeuedBuffers);
if (err != 0) {
ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
strerror(-err), -err);
return err;
}
if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) {
ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)",
minUndequeuedBuffers, mNumUndequeuedBuffers);
return BAD_VALUE;
}
// we cannot change the number of output buffers while OMX is running
// set up surface to the same count
std::vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
ALOGV("setting up surface for %zu buffers", buffers.size());
err = native_window_set_buffer_count(nativeWindow, buffers.size());
if (err != 0) {
ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
-err);
return err;
}
// need to enable allocation when attaching
surface->getIGraphicBufferProducer()->allowAllocation(true);
// dequeueBuffer cannot time out
surface->setDequeueTimeout(-1);
// for meta data mode, we move dequeud buffers to the new surface.
// for non-meta mode, we must move all registered buffers
for (size_t i = 0; i < buffers.size(); ++i) {
const BufferInfo &info = buffers[i];
// skip undequeued buffers for meta data mode
if (storingMetadataInDecodedBuffers()
&& info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGV("skipping buffer");
continue;
}
ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer());
err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer());
if (err != OK) {
ALOGE("failed to attach buffer %p to the new surface: %s (%d)",
info.mGraphicBuffer->getNativeBuffer(),
strerror(-err), -err);
return err;
}
}
// cancel undequeued buffers to new surface
if (!storingMetadataInDecodedBuffers()) {
for (size_t i = 0; i < buffers.size(); ++i) {
BufferInfo &info = buffers[i];
if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
err = nativeWindow->cancelBuffer(
nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd);
info.mFenceFd = -1;
if (err != OK) {
ALOGE("failed to cancel buffer %p to the new surface: %s (%d)",
info.mGraphicBuffer->getNativeBuffer(),
strerror(-err), -err);
return err;
}
}
}
// disallow further allocation
(void)surface->getIGraphicBufferProducer()->allowAllocation(false);
}
// push blank buffers to previous window if requested
if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) {
pushBlankBuffersToNativeWindow(mNativeWindow.get());
}
mNativeWindow = nativeWindow;
mNativeWindowUsageBits = usageBits;
initializeFrameTracking();
return OK;
}
status_t ACodec::setPortMode(int32_t portIndex, IOMX::PortMode mode) {
status_t err = mOMXNode->setPortMode(portIndex, mode);
if (err != OK) {
ALOGE("[%s] setPortMode on %s to %s failed w/ err %d",
mComponentName.c_str(),
portIndex == kPortIndexInput ? "input" : "output",
asString(mode),
err);
return err;
}
mPortMode[portIndex] = mode;
return OK;
}
status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
CHECK(mAllocator[portIndex] == NULL);
CHECK(mBuffers[portIndex].empty());
status_t err;
if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
if (storingMetadataInDecodedBuffers()) {
err = allocateOutputMetadataBuffers();
} else {
err = allocateOutputBuffersFromNativeWindow();
}
} else {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
const IOMX::PortMode &mode = mPortMode[portIndex];
size_t bufSize = def.nBufferSize;
// Always allocate VideoNativeMetadata if using ANWBuffer.
// OMX might use gralloc source internally, but we don't share
// metadata buffer with OMX, OMX has its own headers.
if (mode == IOMX::kPortModeDynamicANWBuffer) {
bufSize = sizeof(VideoNativeMetadata);
} else if (mode == IOMX::kPortModeDynamicNativeHandle) {
bufSize = sizeof(VideoNativeHandleMetadata);
}
size_t conversionBufferSize = 0;
sp<DataConverter> converter = mConverter[portIndex];
if (converter != NULL) {
// here we assume conversions of max 4:1, so result fits in int32
if (portIndex == kPortIndexInput) {
conversionBufferSize = converter->sourceSize(bufSize);
} else {
conversionBufferSize = converter->targetSize(bufSize);
}
}
size_t alignment = 32; // This is the value currently returned by
// MemoryDealer::getAllocationAlignment().
// TODO: Fix this when Treble has
// MemoryHeap/MemoryDealer.
ALOGV("[%s] Allocating %u buffers of size %zu (from %u using %s) on %s port",
mComponentName.c_str(),
def.nBufferCountActual, bufSize, def.nBufferSize, asString(mode),
portIndex == kPortIndexInput ? "input" : "output");
// verify buffer sizes to avoid overflow in align()
if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) {
ALOGE("b/22885421");
return NO_MEMORY;
}
// don't modify bufSize as OMX may not expect it to increase after negotiation
size_t alignedSize = align(bufSize, alignment);
size_t alignedConvSize = align(conversionBufferSize, alignment);
if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) {
ALOGE("b/22885421");
return NO_MEMORY;
}
if (mode != IOMX::kPortModePresetSecureBuffer) {
mAllocator[portIndex] = TAllocator::getService("ashmem");
if (mAllocator[portIndex] == nullptr) {
ALOGE("hidl allocator on port %d is null",
(int)portIndex);
return NO_MEMORY;
}
// TODO: When Treble has MemoryHeap/MemoryDealer, we should
// specify the heap size to be
// def.nBufferCountActual * (alignedSize + alignedConvSize).
}
const sp<AMessage> &format =
portIndex == kPortIndexInput ? mInputFormat : mOutputFormat;
mBuffers[portIndex].reserve(def.nBufferCountActual);
for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
hidl_memory hidlMemToken;
sp<TMemory> hidlMem;
sp<IMemory> mem;
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_US;
info.mFenceFd = -1;
info.mGraphicBuffer = NULL;
info.mNewGraphicBuffer = false;
if (mode == IOMX::kPortModePresetSecureBuffer) {
void *ptr = NULL;
sp<NativeHandle> native_handle;
err = mOMXNode->allocateSecureBuffer(
portIndex, bufSize, &info.mBufferID,
&ptr, &native_handle);
info.mData = (native_handle == NULL)
? new SecureBuffer(format, ptr, bufSize)
: new SecureBuffer(format, native_handle, bufSize);
info.mCodecData = info.mData;
} else {
bool success;
auto transStatus = mAllocator[portIndex]->allocate(
bufSize,
[&success, &hidlMemToken](
bool s,
hidl_memory const& m) {
success = s;
hidlMemToken = m;
});
if (!transStatus.isOk()) {
ALOGE("hidl's AshmemAllocator failed at the "
"transport: %s",
transStatus.description().c_str());
return NO_MEMORY;
}
if (!success) {
return NO_MEMORY;
}
hidlMem = mapMemory(hidlMemToken);
if (hidlMem == nullptr) {
return NO_MEMORY;
}
err = mOMXNode->useBuffer(
portIndex, hidlMemToken, &info.mBufferID);
if (mode == IOMX::kPortModeDynamicANWBuffer) {
VideoNativeMetadata* metaData = (VideoNativeMetadata*)(
(void*)hidlMem->getPointer());
metaData->nFenceFd = -1;
}
info.mCodecData = new SharedMemoryBuffer(
format, hidlMem);
info.mCodecRef = hidlMem;
// if we require conversion, allocate conversion buffer for client use;
// otherwise, reuse codec buffer
if (mConverter[portIndex] != NULL) {
CHECK_GT(conversionBufferSize, (size_t)0);
bool success;
mAllocator[portIndex]->allocate(
conversionBufferSize,
[&success, &hidlMemToken](
bool s,
hidl_memory const& m) {
success = s;
hidlMemToken = m;
});
if (!success) {
return NO_MEMORY;
}
hidlMem = mapMemory(hidlMemToken);
if (hidlMem == nullptr) {
return NO_MEMORY;
}
info.mData = new SharedMemoryBuffer(format, hidlMem);
info.mMemRef = hidlMem;
} else {
info.mData = info.mCodecData;
info.mMemRef = info.mCodecRef;
}
}
mBuffers[portIndex].push_back(info);
}
}
}
if (err != OK) {
return err;
}
std::vector<ACodecBufferChannel::BufferAndId> array(mBuffers[portIndex].size());
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
array[i] = {mBuffers[portIndex][i].mData, mBuffers[portIndex][i].mBufferID};
}
if (portIndex == kPortIndexInput) {
mBufferChannel->setInputBufferArray(array);
} else if (portIndex == kPortIndexOutput) {
mBufferChannel->setOutputBufferArray(array);
} else {
TRESPASS();
}
return OK;
}
status_t ACodec::setupNativeWindowSizeFormatAndUsage(
ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */,
bool reconnect) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
status_t err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
OMX_INDEXTYPE index;
err = mOMXNode->getExtensionIndex(
"OMX.google.android.index.AndroidNativeBufferConsumerUsage",
&index);
if (err != OK) {
// allow failure
err = OK;
} else {
int usageBits = 0;
if (nativeWindow->query(
nativeWindow,
NATIVE_WINDOW_CONSUMER_USAGE_BITS,
&usageBits) == OK) {
OMX_PARAM_U32TYPE params;
InitOMXParams(&params);
params.nPortIndex = kPortIndexOutput;
params.nU32 = (OMX_U32)usageBits;
err = mOMXNode->setParameter(index, &params, sizeof(params));
if (err != OK) {
ALOGE("Fail to set AndroidNativeBufferConsumerUsage: %d", err);
return err;
}
}
}
OMX_U32 usage = 0;
err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage);
if (err != 0) {
ALOGW("querying usage flags from OMX IL component failed: %d", err);
// XXX: Currently this error is logged, but not fatal.
usage = 0;
}
int omxUsage = usage;
if (mFlags & kFlagIsGrallocUsageProtected) {
usage |= GRALLOC_USAGE_PROTECTED;
}
usage |= kVideoGrallocUsage;
*finalUsage = usage;
memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN;
ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
return setNativeWindowSizeFormatAndUsage(
nativeWindow,
def.format.video.nFrameWidth,
def.format.video.nFrameHeight,
def.format.video.eColorFormat,
mRotationDegrees,
usage,
reconnect);
}
status_t ACodec::configureOutputBuffersFromNativeWindow(
OMX_U32 *bufferCount, OMX_U32 *bufferSize,
OMX_U32 *minUndequeuedBuffers, bool preregister) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = kPortIndexOutput;
status_t err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
err = setupNativeWindowSizeFormatAndUsage(
mNativeWindow.get(), &mNativeWindowUsageBits,
preregister && !mTunneled /* reconnect */);
}
if (err != OK) {
mNativeWindowUsageBits = 0;
return err;
}
static_cast<Surface *>(mNativeWindow.get())->setDequeueTimeout(-1);
// Exits here for tunneled video playback codecs -- i.e. skips native window
// buffer allocation step as this is managed by the tunneled OMX omponent
// itself and explicitly sets def.nBufferCountActual to 0.
if (mTunneled) {
ALOGV("Tunneled Playback: skipping native window buffer allocation.");
def.nBufferCountActual = 0;
err = mOMXNode->setParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
*minUndequeuedBuffers = 0;
*bufferCount = 0;
*bufferSize = 0;
return err;
}
*minUndequeuedBuffers = 0;
err = mNativeWindow->query(
mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
(int *)minUndequeuedBuffers);
if (err != 0) {
ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
strerror(-err), -err);
return err;
}
// FIXME: assume that surface is controlled by app (native window
// returns the number for the case when surface is not controlled by app)
// FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
// For now, try to allocate 1 more buffer, but don't fail if unsuccessful
// Use conservative allocation while also trying to reduce starvation
//
// 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
// minimum needed for the consumer to be able to work
// 2. try to allocate two (2) additional buffers to reduce starvation from
// the consumer
// plus an extra buffer to account for incorrect minUndequeuedBufs
for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
OMX_U32 newBufferCount =
def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
def.nBufferCountActual = newBufferCount;
err = mOMXNode->setParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
*minUndequeuedBuffers += extraBuffers;
break;
}
ALOGW("[%s] setting nBufferCountActual to %u failed: %d",
mComponentName.c_str(), newBufferCount, err);
/* exit condition */
if (extraBuffers == 0) {
return err;
}
}
err = native_window_set_buffer_count(
mNativeWindow.get(), def.nBufferCountActual);
if (err != 0) {
ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
-err);
return err;
}
*bufferCount = def.nBufferCountActual;
*bufferSize = def.nBufferSize;
initializeFrameTracking();
return err;
}
status_t ACodec::allocateOutputBuffersFromNativeWindow() {
// This method only handles the non-metadata mode (or simulating legacy
// mode with metadata, which is transparent to ACodec).
CHECK(!storingMetadataInDecodedBuffers());
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */);
if (err != 0)
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
static_cast<Surface*>(mNativeWindow.get())
->getIGraphicBufferProducer()->allowAllocation(true);
ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
"output port",
mComponentName.c_str(), bufferCount, bufferSize);
// Dequeue buffers and send them to OMX
mBuffers[kPortIndexOutput].reserve(bufferCount);
for (OMX_U32 i = 0; i < bufferCount; i++) {
ANativeWindowBuffer *buf;
int fenceFd;
err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
if (err != 0) {
ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
break;
}
sp<GraphicBuffer> graphicBuffer(GraphicBuffer::from(buf));
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_US;
info.mFenceFd = fenceFd;
info.mIsReadFence = false;
info.mGraphicBuffer = graphicBuffer;
info.mNewGraphicBuffer = false;
info.mDequeuedAt = mDequeueCounter;
// TODO: We shouln't need to create MediaCodecBuffer. In metadata mode
// OMX doesn't use the shared memory buffer, but some code still
// access info.mData. Create an ABuffer as a placeholder.
info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize));
info.mCodecData = info.mData;
mBuffers[kPortIndexOutput].push_back(info);
IOMX::buffer_id bufferId;
err = mOMXNode->useBuffer(kPortIndexOutput, graphicBuffer, &bufferId);
if (err != 0) {
ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
"%d", i, err);
break;
}
mBuffers[kPortIndexOutput][i].mBufferID = bufferId;
ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
mComponentName.c_str(),
bufferId, graphicBuffer.get());
}
OMX_U32 cancelStart;
OMX_U32 cancelEnd;
if (err != OK) {
// If an error occurred while dequeuing we need to cancel any buffers
// that were dequeued. Also cancel all if we're in legacy metadata mode.
cancelStart = 0;
cancelEnd = mBuffers[kPortIndexOutput].size();
} else {
// Return the required minimum undequeued buffers to the native window.
cancelStart = bufferCount - minUndequeuedBuffers;
cancelEnd = bufferCount;
}
for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mStatus == BufferInfo::OWNED_BY_US) {
status_t error = cancelBufferToNativeWindow(info);
if (err == 0) {
err = error;
}
}
}
static_cast<Surface*>(mNativeWindow.get())
->getIGraphicBufferProducer()->allowAllocation(false);
return err;
}
status_t ACodec::allocateOutputMetadataBuffers() {
CHECK(storingMetadataInDecodedBuffers());
OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
status_t err = configureOutputBuffersFromNativeWindow(
&bufferCount, &bufferSize, &minUndequeuedBuffers,
mFlags & kFlagPreregisterMetadataBuffers /* preregister */);
if (err != OK)
return err;
mNumUndequeuedBuffers = minUndequeuedBuffers;
ALOGV("[%s] Allocating %u meta buffers on output port",
mComponentName.c_str(), bufferCount);
mBuffers[kPortIndexOutput].reserve(bufferCount);
for (OMX_U32 i = 0; i < bufferCount; i++) {
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
info.mFenceFd = -1;
info.mGraphicBuffer = NULL;
info.mNewGraphicBuffer = false;
info.mDequeuedAt = mDequeueCounter;
info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize));
// Initialize fence fd to -1 to avoid warning in freeBuffer().
((VideoNativeMetadata *)info.mData->base())->nFenceFd = -1;
info.mCodecData = info.mData;
err = mOMXNode->useBuffer(kPortIndexOutput, OMXBuffer::sPreset, &info.mBufferID);
mBuffers[kPortIndexOutput].push_back(info);
ALOGV("[%s] allocated meta buffer with ID %u",
mComponentName.c_str(), info.mBufferID);
}
mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
return err;
}
status_t ACodec::submitOutputMetadataBuffer() {
CHECK(storingMetadataInDecodedBuffers());
if (mMetadataBuffersToSubmit == 0)
return OK;
BufferInfo *info = dequeueBufferFromNativeWindow();
if (info == NULL) {
return ERROR_IO;
}
ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer->handle);
--mMetadataBuffersToSubmit;
info->checkWriteFence("submitOutputMetadataBuffer");
return fillBuffer(info);
}
status_t ACodec::waitForFence(int fd, const char *dbg ) {
status_t res = OK;
if (fd >= 0) {
sp<Fence> fence = new Fence(fd);
res = fence->wait(IOMX::kFenceTimeoutMs);
ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg);
}
return res;
}
// static
const char *ACodec::_asString(BufferInfo::Status s) {
switch (s) {
case BufferInfo::OWNED_BY_US: return "OUR";
case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT";
case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM";
case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM";
case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE";
case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED";
default: return "?";
}
}
void ACodec::dumpBuffers(OMX_U32 portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(),
portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size());
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
const BufferInfo &info = mBuffers[portIndex][i];
ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u",
i, info.mBufferID, info.mGraphicBuffer.get(),
info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(),
_asString(info.mStatus), info.mStatus, info.mDequeuedAt);
}
}
status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
ALOGV("[%s] Calling cancelBuffer on buffer %u",
mComponentName.c_str(), info->mBufferID);
info->checkWriteFence("cancelBufferToNativeWindow");
int err = mNativeWindow->cancelBuffer(
mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
info->mFenceFd = -1;
ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
mComponentName.c_str(), info->mBufferID);
// change ownership even if cancelBuffer fails
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
return err;
}
void ACodec::onFirstTunnelFrameReady() {
mCallback->onFirstTunnelFrameReady();
}
ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
ANativeWindowBuffer *buf;
CHECK(mNativeWindow.get() != NULL);
if (mTunneled) {
ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel"
" video playback mode mode!");
return NULL;
}
if (mFatalError) {
ALOGW("not dequeuing from native window due to fatal error");
return NULL;
}
int fenceFd = -1;
do {
status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
if (err != 0) {
ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err);
return NULL;
}
bool stale = false;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mGraphicBuffer != NULL &&
info->mGraphicBuffer->handle == buf->handle) {
// Since consumers can attach buffers to BufferQueues, it is possible
// that a known yet stale buffer can return from a surface that we
// once used. We can simply ignore this as we have already dequeued
// this buffer properly. NOTE: this does not eliminate all cases,
// e.g. it is possible that we have queued the valid buffer to the
// NW, and a stale copy of the same buffer gets dequeued - which will
// be treated as the valid buffer by ACodec.
if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
ALOGI("dequeued stale buffer %p. discarding", buf);
stale = true;
break;
}
ALOGV("dequeued buffer #%u with age %u, graphicBuffer %p",
(unsigned)(info - &mBuffers[kPortIndexOutput][0]),
mDequeueCounter - info->mDequeuedAt,
info->mGraphicBuffer->handle);
info->mStatus = BufferInfo::OWNED_BY_US;
info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
return info;
}
}
// It is also possible to receive a previously unregistered buffer
// in non-meta mode. These should be treated as stale buffers. The
// same is possible in meta mode, in which case, it will be treated
// as a normal buffer, which is not desirable.
// TODO: fix this.
if (!stale && !storingMetadataInDecodedBuffers()) {
ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf);
stale = true;
}
if (stale) {
// TODO: detach stale buffer, but there is no API yet to do it.
buf = NULL;
}
} while (buf == NULL);
// get oldest undequeued buffer
BufferInfo *oldest = NULL;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
BufferInfo *info = &mBuffers[kPortIndexOutput][i];
if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
(oldest == NULL ||
// avoid potential issues from counter rolling over
mDequeueCounter - info->mDequeuedAt >
mDequeueCounter - oldest->mDequeuedAt)) {
oldest = info;
}
}
// it is impossible dequeue a buffer when there are no buffers with ANW
CHECK(oldest != NULL);
// it is impossible to dequeue an unknown buffer in non-meta mode, as the
// while loop above does not complete
CHECK(storingMetadataInDecodedBuffers());
// discard buffer in LRU info and replace with new buffer
oldest->mGraphicBuffer = GraphicBuffer::from(buf);
oldest->mNewGraphicBuffer = true;
oldest->mStatus = BufferInfo::OWNED_BY_US;
oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
ALOGV("replaced oldest buffer #%u with age %u, graphicBuffer %p",
(unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
mDequeueCounter - oldest->mDequeuedAt,
oldest->mGraphicBuffer->handle);
return oldest;
}
void ACodec::initializeFrameTracking() {
mTrackedFrames.clear();
int isWindowToDisplay = 0;
mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER,
&isWindowToDisplay);
mIsWindowToDisplay = isWindowToDisplay == 1;
// No frame tracking is needed if we're not sending frames to the display
if (!mIsWindowToDisplay) {
// Return early so we don't call into SurfaceFlinger (requiring permissions)
return;
}
int hasPresentFenceTimes = 0;
mNativeWindow->query(mNativeWindow.get(), NATIVE_WINDOW_FRAME_TIMESTAMPS_SUPPORTS_PRESENT,
&hasPresentFenceTimes);
mHasPresentFenceTimes = hasPresentFenceTimes == 1;
if (!mHasPresentFenceTimes) {
ALOGI("Using latch times for frame rendered signals - present fences not supported");
}
status_t err = native_window_enable_frame_timestamps(mNativeWindow.get(), true);
if (err) {
ALOGE("Failed to enable frame timestamps (%d)", err);
}
}
void ACodec::trackReleasedFrame(int64_t frameId, int64_t mediaTimeUs, int64_t desiredRenderTimeNs) {
// If the render time is earlier than now, then we're suggesting it should be rendered ASAP,
// so track the frame as if the desired render time is now.
int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
if (desiredRenderTimeNs < nowNs) {
desiredRenderTimeNs = nowNs;
}
// If the render time is more than a second from now, then pretend the frame is supposed to be
// rendered immediately, because that's what SurfaceFlinger heuristics will do. This is a tight
// coupling, but is really the only way to optimize away unnecessary present fence checks in
// processRenderedFrames.
if (desiredRenderTimeNs > nowNs + 1*1000*1000*1000LL) {
desiredRenderTimeNs = nowNs;
}
// We've just queued a frame to the surface, so keep track of it and later check to see if it is
// actually rendered.
TrackedFrame frame;
frame.id = frameId;
frame.mediaTimeUs = mediaTimeUs;
frame.desiredRenderTimeNs = desiredRenderTimeNs;
mTrackedFrames.push_back(frame);
}
void ACodec::pollForRenderedFrames() {
std::list<RenderedFrameInfo> renderedFrameInfos;
// Scan all frames and check to see if the frames that SHOULD have been rendered by now, have,
// in fact, been rendered.
int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
while (!mTrackedFrames.empty()) {
TrackedFrame & frame = mTrackedFrames.front();
// Frames that should have been rendered at least 100ms in the past are checked
if (frame.desiredRenderTimeNs > nowNs - 100*1000*1000LL) {
break;
}
status_t err;
nsecs_t latchOrPresentTimeNs = NATIVE_WINDOW_TIMESTAMP_INVALID;
err = native_window_get_frame_timestamps(mNativeWindow.get(), frame.id,
/* outRequestedPresentTime */ nullptr, /* outAcquireTime */ nullptr,
mHasPresentFenceTimes ? nullptr : &latchOrPresentTimeNs, // latch time
/* outFirstRefreshStartTime */ nullptr, /* outLastRefreshStartTime */ nullptr,
/* outGpuCompositionDoneTime */ nullptr,
mHasPresentFenceTimes ? &latchOrPresentTimeNs : nullptr, // display present time,
/* outDequeueReadyTime */ nullptr, /* outReleaseTime */ nullptr);
if (err) {
ALOGE("Failed to get frame timestamps for %lld: %d", (long long) frame.id, err);
}
// If we don't have a render time by now, then consider the frame as dropped
if (latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_PENDING &&
latchOrPresentTimeNs != NATIVE_WINDOW_TIMESTAMP_INVALID) {
renderedFrameInfos.push_back(RenderedFrameInfo(frame.mediaTimeUs,
latchOrPresentTimeNs));
}
mTrackedFrames.pop_front();
}
if (!renderedFrameInfos.empty()) {
mCallback->onOutputFramesRendered(renderedFrameInfos);
}
}
status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
if (portIndex == kPortIndexInput) {
mBufferChannel->setInputBufferArray({});
} else {
mBufferChannel->setOutputBufferArray({});
}
status_t err = OK;
for (size_t i = mBuffers[portIndex].size(); i > 0;) {
i--;
status_t err2 = freeBuffer(portIndex, i);
if (err == OK) {
err = err2;
}
}
mAllocator[portIndex].clear();
return err;
}
status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
status_t err = OK;
for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
i--;
BufferInfo *info = &mBuffers[kPortIndexOutput][i];
// At this time some buffers may still be with the component
// or being drained.
if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
status_t err2 = freeBuffer(kPortIndexOutput, i);
if (err == OK) {
err = err2;
}
}
}
return err;
}
status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
BufferInfo *info = &mBuffers[portIndex][i];
status_t err = OK;
// there should not be any fences in the metadata
if (mPortMode[portIndex] == IOMX::kPortModeDynamicANWBuffer && info->mCodecData != NULL
&& info->mCodecData->size() >= sizeof(VideoNativeMetadata)) {
int fenceFd = ((VideoNativeMetadata *)info->mCodecData->base())->nFenceFd;
if (fenceFd >= 0) {
ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
}
}
switch (info->mStatus) {
case BufferInfo::OWNED_BY_US:
if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
(void)cancelBufferToNativeWindow(info);
}
FALLTHROUGH_INTENDED;
case BufferInfo::OWNED_BY_NATIVE_WINDOW:
err = mOMXNode->freeBuffer(portIndex, info->mBufferID);
break;
default:
ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus);
err = FAILED_TRANSACTION;
break;
}
if (info->mFenceFd >= 0) {
::close(info->mFenceFd);
}
// remove buffer even if mOMXNode->freeBuffer fails
mBuffers[portIndex].erase(mBuffers[portIndex].begin() + i);
return err;
}
ACodec::BufferInfo *ACodec::findBufferByID(
uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
BufferInfo *info = &mBuffers[portIndex][i];
if (info->mBufferID == bufferID) {
if (index != NULL) {
*index = i;
}
return info;
}
}
ALOGE("Could not find buffer with ID %u", bufferID);
return NULL;
}
status_t ACodec::fillBuffer(BufferInfo *info) {
status_t err;
// Even in dynamic ANW buffer mode, if the graphic buffer is not changing,
// send sPreset instead of the same graphic buffer, so that OMX server
// side doesn't update the meta. In theory it should make no difference,
// however when the same buffer is parcelled again, a new handle could be
// created on server side, and some decoder doesn't recognize the handle
// even if it's the same buffer.
if (!storingMetadataInDecodedBuffers() || !info->mNewGraphicBuffer) {
err = mOMXNode->fillBuffer(
info->mBufferID, OMXBuffer::sPreset, info->mFenceFd);
} else {
err = mOMXNode->fillBuffer(
info->mBufferID, info->mGraphicBuffer, info->mFenceFd);
}
info->mNewGraphicBuffer = false;
info->mFenceFd = -1;
if (err == OK) {
info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
}
return err;
}
status_t ACodec::setComponentRole(
bool isEncoder, const char *mime) {
const char *role = GetComponentRole(isEncoder, mime);
if (role == NULL) {
return BAD_VALUE;
}
status_t err = SetComponentRole(mOMXNode, role);
if (err != OK) {
ALOGW("[%s] Failed to set standard component role '%s'.",
mComponentName.c_str(), role);
}
return err;
}
status_t ACodec::configureCodec(
const char *mime, const sp<AMessage> &msg) {
int32_t encoder;
if (!msg->findInt32("encoder", &encoder)) {
encoder = false;
}
sp<AMessage> inputFormat = new AMessage;
sp<AMessage> outputFormat = new AMessage;
mConfigFormat = msg;
mIsEncoder = encoder;
mIsVideo = !strncasecmp(mime, "video/", 6);
mIsImage = !strncasecmp(mime, "image/", 6);
mPortMode[kPortIndexInput] = IOMX::kPortModePresetByteBuffer;
mPortMode[kPortIndexOutput] = IOMX::kPortModePresetByteBuffer;
status_t err = setComponentRole(encoder /* isEncoder */, mime);
if (err != OK) {
return err;
}
OMX_VIDEO_CONTROLRATETYPE bitrateMode;
int32_t bitrate = 0, quality;
// FLAC encoder or video encoder in constant quality mode doesn't need a
// bitrate, other encoders do.
if (encoder) {
if (mIsVideo || mIsImage) {
if (!findVideoBitrateControlInfo(msg, &bitrateMode, &bitrate, &quality)) {
return INVALID_OPERATION;
}
} else if (strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
&& !msg->findInt32("bitrate", &bitrate)) {
return INVALID_OPERATION;
}
}
// propagate bitrate to the output so that the muxer has it
if (encoder && msg->findInt32("bitrate", &bitrate)) {
// Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
// average bitrate. We've been setting both bitrate and max-bitrate to this same value.
outputFormat->setInt32("bitrate", bitrate);
outputFormat->setInt32("max-bitrate", bitrate);
}
int32_t storeMeta;
if (encoder) {
IOMX::PortMode mode = IOMX::kPortModePresetByteBuffer;
if (msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
&& storeMeta != kMetadataBufferTypeInvalid) {
if (storeMeta == kMetadataBufferTypeNativeHandleSource) {
mode = IOMX::kPortModeDynamicNativeHandle;
} else if (storeMeta == kMetadataBufferTypeANWBuffer ||
storeMeta == kMetadataBufferTypeGrallocSource) {
mode = IOMX::kPortModeDynamicANWBuffer;
} else {
return BAD_VALUE;
}
}
err = setPortMode(kPortIndexInput, mode);
if (err != OK) {
return err;
}
if (mode != IOMX::kPortModePresetByteBuffer) {
uint32_t usageBits;
if (mOMXNode->getParameter(
(OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
&usageBits, sizeof(usageBits)) == OK) {
inputFormat->setInt32(
"using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
}
}
}
int32_t lowLatency = 0;
if (msg->findInt32("low-latency", &lowLatency)) {
err = setLowLatency(lowLatency);
if (err != OK) {
return err;
}
}
int32_t prependSPSPPS = 0;
if (encoder && mIsVideo
&& msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
&& prependSPSPPS != 0) {
OMX_INDEXTYPE index;
err = mOMXNode->getExtensionIndex(
"OMX.google.android.index.prependSPSPPSToIDRFrames", &index);
if (err == OK) {
PrependSPSPPSToIDRFramesParams params;
InitOMXParams(&params);
params.bEnable = OMX_TRUE;
err = mOMXNode->setParameter(index, &params, sizeof(params));
}
if (err != OK) {
ALOGE("Encoder could not be configured to emit SPS/PPS before "
"IDR frames. (err %d)", err);
return err;
}
}
// Only enable metadata mode on encoder output if encoder can prepend
// sps/pps to idr frames, since in metadata mode the bitstream is in an
// opaque handle, to which we don't have access.
if (encoder && mIsVideo) {
OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
&& msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
&& storeMeta != 0);
if (mFlags & kFlagIsSecure) {
enable = OMX_TRUE;
}
err = setPortMode(kPortIndexOutput, enable ?
IOMX::kPortModePresetSecureBuffer : IOMX::kPortModePresetByteBuffer);
if (err != OK) {
return err;
}
if (!msg->findInt64(
KEY_REPEAT_PREVIOUS_FRAME_AFTER, &mRepeatFrameDelayUs)) {
mRepeatFrameDelayUs = -1LL;
}
if (!msg->findDouble("time-lapse-fps", &mCaptureFps)) {
float captureRate;
if (msg->findAsFloat(KEY_CAPTURE_RATE, &captureRate)) {
mCaptureFps = captureRate;
} else {
mCaptureFps = -1.0;
}
}
if (!msg->findInt32(
KEY_CREATE_INPUT_SURFACE_SUSPENDED,
(int32_t*)&mCreateInputBuffersSuspended)) {
mCreateInputBuffersSuspended = false;
}
}
if (encoder && (mIsVideo || mIsImage)) {
// only allow 32-bit value, since we pass it as U32 to OMX.
if (!msg->findInt64(KEY_MAX_PTS_GAP_TO_ENCODER, &mMaxPtsGapUs)) {
mMaxPtsGapUs = 0LL;
} else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < INT32_MIN) {
ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs);
mMaxPtsGapUs = 0LL;
}
if (!msg->findFloat(KEY_MAX_FPS_TO_ENCODER, &mMaxFps)) {
mMaxFps = -1;
}
// notify GraphicBufferSource to allow backward frames
if (mMaxPtsGapUs < 0LL) {
mMaxFps = -1;
}
}
// NOTE: we only use native window for video decoders
sp<RefBase> obj;
bool haveNativeWindow = msg->findObject("native-window", &obj)
&& obj != NULL && mIsVideo && !encoder;
mUsingNativeWindow = haveNativeWindow;
if (mIsVideo && !encoder) {
inputFormat->setInt32("adaptive-playback", false);
int32_t usageProtected;
if (msg->findInt32("protected", &usageProtected) && usageProtected) {
if (!haveNativeWindow) {
ALOGE("protected output buffers must be sent to an ANativeWindow");
return PERMISSION_DENIED;
}
mFlags |= kFlagIsGrallocUsageProtected;
mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
}
}
if (mFlags & kFlagIsSecure) {
// use native_handles for secure input buffers
err = setPortMode(kPortIndexInput, IOMX::kPortModePresetSecureBuffer);
if (err != OK) {
ALOGI("falling back to non-native_handles");
setPortMode(kPortIndexInput, IOMX::kPortModePresetByteBuffer);
err = OK; // ignore error for now
}
OMX_INDEXTYPE index;
if (mOMXNode->getExtensionIndex(
"OMX.google.android.index.preregisterMetadataBuffers", &index) == OK) {
OMX_CONFIG_BOOLEANTYPE param;
InitOMXParams(&param);
param.bEnabled = OMX_FALSE;
if (mOMXNode->getParameter(index, &param, sizeof(param)) == OK) {
if (param.bEnabled == OMX_TRUE) {
mFlags |= kFlagPreregisterMetadataBuffers;
}
}
}
}
if (haveNativeWindow) {
sp<ANativeWindow> nativeWindow =
static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));
// START of temporary support for automatic FRC - THIS WILL BE REMOVED
int32_t autoFrc;
if (msg->findInt32("auto-frc", &autoFrc)) {
bool enabled = autoFrc;
OMX_CONFIG_BOOLEANTYPE config;
InitOMXParams(&config);
config.bEnabled = (OMX_BOOL)enabled;
status_t temp = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
&config, sizeof(config));
if (temp == OK) {
outputFormat->setInt32("auto-frc", enabled);
} else if (enabled) {
ALOGI("codec does not support requested auto-frc (err %d)", temp);
}
}
// END of temporary support for automatic FRC
int32_t tunneled;
if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
tunneled != 0) {
ALOGI("Configuring TUNNELED video playback.");
mTunneled = true;
int32_t audioHwSync = 0;
if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
ALOGW("No Audio HW Sync provided for video tunnel");
}
err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
if (err != OK) {
ALOGE("configureTunneledVideoPlayback(%d,%p) failed!",
audioHwSync, nativeWindow.get());
return err;
}
int32_t maxWidth = 0, maxHeight = 0;
if (msg->findInt32("max-width", &maxWidth) &&
msg->findInt32("max-height", &maxHeight)) {
err = mOMXNode->prepareForAdaptivePlayback(
kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
if (err != OK) {
ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
mComponentName.c_str(), err);
// allow failure
err = OK;
} else {
inputFormat->setInt32("max-width", maxWidth);
inputFormat->setInt32("max-height", maxHeight);
inputFormat->setInt32("adaptive-playback", true);
}
}
} else {
ALOGV("Configuring CPU controlled video playback.");
mTunneled = false;
// Explicity reset the sideband handle of the window for
// non-tunneled video in case the window was previously used
// for a tunneled video playback.
err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
if (err != OK) {
ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
return err;
}
err = setPortMode(kPortIndexOutput, IOMX::kPortModeDynamicANWBuffer);
if (err != OK) {
// if adaptive playback has been requested, try JB fallback
// NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
// LARGE MEMORY REQUIREMENT
// we will not do adaptive playback on software accessed
// surfaces as they never had to respond to changes in the
// crop window, and we don't trust that they will be able to.
int usageBits = 0;
bool canDoAdaptivePlayback;
if (nativeWindow->query(
nativeWindow.get(),
NATIVE_WINDOW_CONSUMER_USAGE_BITS,
&usageBits) != OK) {
canDoAdaptivePlayback = false;
} else {
canDoAdaptivePlayback =
(usageBits &
(GRALLOC_USAGE_SW_READ_MASK |
GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
}
int32_t maxWidth = 0, maxHeight = 0;
if (canDoAdaptivePlayback &&
msg->findInt32("max-width", &maxWidth) &&
msg->findInt32("max-height", &maxHeight)) {
ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
mComponentName.c_str(), maxWidth, maxHeight);
err = mOMXNode->prepareForAdaptivePlayback(
kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
ALOGW_IF(err != OK,
"[%s] prepareForAdaptivePlayback failed w/ err %d",
mComponentName.c_str(), err);
if (err == OK) {
inputFormat->setInt32("max-width", maxWidth);
inputFormat->setInt32("max-height", maxHeight);
inputFormat->setInt32("adaptive-playback", true);
}
}
// allow failure
err = OK;
} else {
ALOGV("[%s] setPortMode on output to %s succeeded",
mComponentName.c_str(), asString(IOMX::kPortModeDynamicANWBuffer));
CHECK(storingMetadataInDecodedBuffers());
inputFormat->setInt32("adaptive-playback", true);
}
int32_t push;
if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
&& push != 0) {
mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
}
}
int32_t rotationDegrees;
if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
mRotationDegrees = rotationDegrees;
} else {
mRotationDegrees = 0;
}
}
AudioEncoding pcmEncoding = kAudioEncodingPcm16bit;
(void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
// invalid encodings will default to PCM-16bit in setupRawAudioFormat.
if (mIsVideo || mIsImage) {
// determine need for software renderer
bool usingSwRenderer = false;
if (haveNativeWindow) {
bool requiresSwRenderer = false;
OMX_PARAM_U32TYPE param;
InitOMXParams(&param);
param.nPortIndex = kPortIndexOutput;
status_t err = mOMXNode->getParameter(
(OMX_INDEXTYPE)OMX_IndexParamVideoAndroidRequiresSwRenderer,
&param, sizeof(param));
if (err == OK && param.nU32 == 1) {
requiresSwRenderer = true;
}
if (mComponentName.startsWith("OMX.google.") || requiresSwRenderer) {
usingSwRenderer = true;
haveNativeWindow = false;
(void)setPortMode(kPortIndexOutput, IOMX::kPortModePresetByteBuffer);
} else if (!storingMetadataInDecodedBuffers()) {
err = setPortMode(kPortIndexOutput, IOMX::kPortModePresetANWBuffer);
if (err != OK) {
return err;
}
}
}
if (encoder) {
err = setupVideoEncoder(mime, msg, outputFormat, inputFormat);
} else {
err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
}
if (err != OK) {
return err;
}
if (haveNativeWindow) {
mNativeWindow = static_cast<Surface *>(obj.get());
// fallback for devices that do not handle flex-YUV for native buffers
int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
if (msg->findInt32("color-format", &requestedColorFormat) &&
requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
status_t err = getPortFormat(kPortIndexOutput, outputFormat);
if (err != OK) {
return err;
}
int32_t colorFormat = OMX_COLOR_FormatUnused;
OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
if (!outputFormat->findInt32("color-format", &colorFormat)) {
ALOGE("output port did not have a color format (wrong domain?)");
return BAD_VALUE;
}
ALOGD("[%s] Requested output format %#x and got %#x.",
mComponentName.c_str(), requestedColorFormat, colorFormat);
if (!IsFlexibleColorFormat(
mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
|| flexibleEquivalent != (OMX_U32)requestedColorFormat) {
// device did not handle flex-YUV request for native window, fall back
// to SW renderer
ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
mNativeWindow.clear();
mNativeWindowUsageBits = 0;
haveNativeWindow = false;
usingSwRenderer = true;
// TODO: implement adaptive-playback support for bytebuffer mode.
// This is done by SW codecs, but most HW codecs don't support it.
err = setPortMode(kPortIndexOutput, IOMX::kPortModePresetByteBuffer);
inputFormat->setInt32("adaptive-playback", false);
if (mFlags & kFlagIsGrallocUsageProtected) {
// fallback is not supported for protected playback
err = PERMISSION_DENIED;
} else if (err == OK) {
err = setupVideoDecoder(
mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
}
}
}
}
if (usingSwRenderer) {
outputFormat->setInt32("using-sw-renderer", 1);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG) ||
!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)) {
int32_t numChannels, sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
// Since we did not always check for these, leave them optional
// and have the decoder figure it all out.
err = OK;
} else {
err = setupRawAudioFormat(
encoder ? kPortIndexInput : kPortIndexOutput,
sampleRate,
numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
int32_t numChannels, sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
err = INVALID_OPERATION;
} else {
int32_t isADTS, aacProfile;
int32_t sbrMode;
int32_t maxOutputChannelCount;
int32_t pcmLimiterEnable;
drcParams_t drc;
if (!msg->findInt32("is-adts", &isADTS)) {
isADTS = 0;
}
if (!msg->findInt32("aac-profile", &aacProfile)) {
aacProfile = OMX_AUDIO_AACObjectNull;
}
if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
sbrMode = -1;
}
if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
// check non AAC-specific key
if (!msg->findInt32("max-output-channel-count", &maxOutputChannelCount)) {
maxOutputChannelCount = -1;
}
}
if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
// value is unknown
pcmLimiterEnable = -1;
}
if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
// value is unknown
drc.encodedTargetLevel = -1;
}
if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
// value is unknown
drc.drcCut = -1;
}
if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
// value is unknown
drc.drcBoost = -1;
}
if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
// value is unknown
drc.heavyCompression = -1;
}
if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
// value is unknown
drc.targetRefLevel = -2;
}
if (!msg->findInt32("aac-drc-effect-type", &drc.effectType)) {
// value is unknown
drc.effectType = -2; // valid values are -1 and over
}
if (!msg->findInt32("aac-drc-album-mode", &drc.albumMode)) {
// value is unknown
drc.albumMode = -1; // valid values are 0 and 1
}
if (!msg->findInt32("aac-drc-output-loudness", &drc.outputLoudness)) {
// value is unknown
drc.outputLoudness = -1;
}
err = setupAACCodec(
encoder, numChannels, sampleRate, bitrate, aacProfile,
isADTS != 0, sbrMode, maxOutputChannelCount, drc,
pcmLimiterEnable);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
err = setupAMRCodec(encoder, false /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
err = setupAMRCodec(encoder, true /* isWAMR */, bitrate);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
// These are PCM-like formats with a fixed sample rate but
// a variable number of channels.
int32_t numChannels;
if (!msg->findInt32("channel-count", &numChannels)) {
err = INVALID_OPERATION;
} else {
int32_t sampleRate;
if (!msg->findInt32("sample-rate", &sampleRate)) {
sampleRate = 8000;
}
err = setupG711Codec(encoder, sampleRate, numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_OPUS)) {
int32_t numChannels = 1, sampleRate = 48000;
if (msg->findInt32("channel-count", &numChannels) &&
msg->findInt32("sample-rate", &sampleRate)) {
err = setupOpusCodec(encoder, sampleRate, numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
// numChannels needs to be set to properly communicate PCM values.
int32_t numChannels = 2, sampleRate = 44100, compressionLevel = -1;
if (encoder &&
(!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate))) {
ALOGE("missing channel count or sample rate for FLAC encoder");
err = INVALID_OPERATION;
} else {
if (encoder) {
if (!msg->findInt32(
"complexity", &compressionLevel) &&
!msg->findInt32(
"flac-compression-level", &compressionLevel)) {
compressionLevel = 5; // default FLAC compression level
} else if (compressionLevel < 0) {
ALOGW("compression level %d outside [0..8] range, "
"using 0",
compressionLevel);
compressionLevel = 0;
} else if (compressionLevel > 8) {
ALOGW("compression level %d outside [0..8] range, "
"using 8",
compressionLevel);
compressionLevel = 8;
}
}
err = setupFlacCodec(
encoder, numChannels, sampleRate, compressionLevel, pcmEncoding);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
int32_t numChannels, sampleRate;
if (encoder
|| !msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
err = INVALID_OPERATION;
} else {
err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
int32_t numChannels;
int32_t sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
err = INVALID_OPERATION;
} else {
err = setupAC3Codec(encoder, numChannels, sampleRate);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
int32_t numChannels;
int32_t sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
err = INVALID_OPERATION;
} else {
err = setupEAC3Codec(encoder, numChannels, sampleRate);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC4)) {
int32_t numChannels;
int32_t sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
err = INVALID_OPERATION;
} else {
err = setupAC4Codec(encoder, numChannels, sampleRate);
}
}
if (err != OK) {
return err;
}
if (!msg->findInt32("encoder-delay", &mEncoderDelay)) {
mEncoderDelay = 0;
}
if (!msg->findInt32("encoder-padding", &mEncoderPadding)) {
mEncoderPadding = 0;
}
if (msg->findInt32("channel-mask", &mChannelMask)) {
mChannelMaskPresent = true;
} else {
mChannelMaskPresent = false;
}
int32_t isCorruptFree = 0;
if (msg->findInt32("corrupt-free", &isCorruptFree)) {
mIsStreamCorruptFree = isCorruptFree == 1 ? true : false;
ALOGV("corrupt-free=[%d]", mIsStreamCorruptFree);
}
int32_t maxInputSize;
if (msg->findInt32("max-input-size", &maxInputSize)) {
err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
err = OK; // ignore error
} else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
err = setMinBufferSize(kPortIndexInput, 8192); // XXX
err = OK; // ignore error
}
int32_t priority;
if (msg->findInt32("priority", &priority)) {
err = setPriority(priority);
err = OK; // ignore error
}
int32_t rateInt = -1;
float rateFloat = -1;
if (!msg->findFloat("operating-rate", &rateFloat)) {
msg->findInt32("operating-rate", &rateInt);
rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound.
}
if (rateFloat > 0) {
err = setOperatingRate(rateFloat, mIsVideo);
err = OK; // ignore errors
}
if (err == OK) {
err = setVendorParameters(msg);
if (err != OK) {
return err;
}
}
// NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame.
mBaseOutputFormat = outputFormat;
mLastOutputFormat.clear();
err = getPortFormat(kPortIndexInput, inputFormat);
if (err == OK) {
err = getPortFormat(kPortIndexOutput, outputFormat);
if (err == OK) {
mInputFormat = inputFormat;
mOutputFormat = outputFormat;
}
}
// create data converters if needed
if (!mIsVideo && !mIsImage && err == OK) {
AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
if (encoder) {
(void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding);
if (mConverter[kPortIndexInput] != NULL) {
ALOGD("%s: encoder %s input format pcm encoding converter from %d to %d",
__func__, mComponentName.c_str(), pcmEncoding, codecPcmEncoding);
mInputFormat->setInt32("pcm-encoding", pcmEncoding);
}
} else {
(void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
if (mConverter[kPortIndexOutput] != NULL) {
ALOGD("%s: decoder %s output format pcm encoding converter from %d to %d",
__func__, mComponentName.c_str(), codecPcmEncoding, pcmEncoding);
mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
}
}
}
return err;
}
status_t ACodec::setLowLatency(int32_t lowLatency) {
if (mIsEncoder) {
ALOGE("encoder does not support low-latency");
return BAD_VALUE;
}
OMX_CONFIG_BOOLEANTYPE config;
InitOMXParams(&config);
config.bEnabled = (OMX_BOOL)(lowLatency != 0);
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigLowLatency,
&config, sizeof(config));
if (err != OK) {
ALOGE("decoder can not set low-latency to %d (err %d)", lowLatency, err);
}
mIsLowLatency = (lowLatency && err == OK);
return err;
}
status_t ACodec::setLatency(uint32_t latency) {
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nPortIndex = kPortIndexInput;
config.nU32 = (OMX_U32)latency;
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigLatency,
&config, sizeof(config));
return err;
}
status_t ACodec::getLatency(uint32_t *latency) {
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nPortIndex = kPortIndexInput;
status_t err = mOMXNode->getConfig(
(OMX_INDEXTYPE)OMX_IndexConfigLatency,
&config, sizeof(config));
if (err == OK) {
*latency = config.nU32;
}
return err;
}
status_t ACodec::setTunnelPeek(int32_t tunnelPeek) {
if (mIsEncoder) {
ALOGE("encoder does not support %s", TUNNEL_PEEK_KEY);
return BAD_VALUE;
}
if (!mTunneled) {
ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_KEY);
return BAD_VALUE;
}
OMX_CONFIG_BOOLEANTYPE tunnelPeekConfig;
InitOMXParams(&tunnelPeekConfig);
tunnelPeekConfig.bEnabled = (OMX_BOOL)(tunnelPeek != 0);
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeek,
&tunnelPeekConfig, sizeof(tunnelPeekConfig));
if (err != OK) {
ALOGE("decoder cannot set %s to %d (err %d)",
TUNNEL_PEEK_KEY, tunnelPeek, err);
}
return err;
}
status_t ACodec::setTunnelPeekLegacy(int32_t isLegacy) {
if (mIsEncoder) {
ALOGE("encoder does not support %s", TUNNEL_PEEK_SET_LEGACY_KEY);
return BAD_VALUE;
}
if (!mTunneled) {
ALOGE("%s is only supported in tunnel mode", TUNNEL_PEEK_SET_LEGACY_KEY);
return BAD_VALUE;
}
OMX_CONFIG_BOOLEANTYPE tunnelPeekLegacyModeConfig;
InitOMXParams(&tunnelPeekLegacyModeConfig);
tunnelPeekLegacyModeConfig.bEnabled = (OMX_BOOL)(isLegacy != 0);
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAndroidTunnelPeekLegacyMode,
&tunnelPeekLegacyModeConfig, sizeof(tunnelPeekLegacyModeConfig));
if (err != OK) {
ALOGE("decoder cannot set video peek legacy mode to %d (err %d)",
isLegacy, err);
}
return err;
}
status_t ACodec::setAudioPresentation(int32_t presentationId, int32_t programId) {
OMX_AUDIO_CONFIG_ANDROID_AUDIOPRESENTATION config;
InitOMXParams(&config);
config.nPresentationId = (OMX_S32)presentationId;
config.nProgramId = (OMX_S32)programId;
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAudioPresentation,
&config, sizeof(config));
return err;
}
status_t ACodec::setPriority(int32_t priority) {
if (priority < 0) {
return BAD_VALUE;
}
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nU32 = (OMX_U32)priority;
status_t temp = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigPriority,
&config, sizeof(config));
if (temp != OK) {
ALOGI("codec does not support config priority (err %d)", temp);
}
return OK;
}
status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) {
if (rateFloat < 0) {
return BAD_VALUE;
}
OMX_U32 rate;
if (isVideo) {
if (rateFloat > 65535) {
return BAD_VALUE;
}
rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f);
} else {
if (rateFloat > (float)UINT_MAX) {
return BAD_VALUE;
}
rate = (OMX_U32)(rateFloat);
}
OMX_PARAM_U32TYPE config;
InitOMXParams(&config);
config.nU32 = rate;
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
&config, sizeof(config));
if (err != OK) {
ALOGI("codec does not support config operating rate (err %d)", err);
}
return OK;
}
status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) {
OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
InitOMXParams(&params);
params.nPortIndex = kPortIndexOutput;
status_t err = mOMXNode->getConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, &params, sizeof(params));
if (err == OK) {
*intraRefreshPeriod = params.nRefreshPeriod;
return OK;
}
// Fallback to query through standard OMX index.
OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
InitOMXParams(&refreshParams);
refreshParams.nPortIndex = kPortIndexOutput;
refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
err = mOMXNode->getParameter(
OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams));
if (err != OK || refreshParams.nCirMBs == 0) {
*intraRefreshPeriod = 0;
return OK;
}
// Calculate period based on width and height
uint32_t width, height;
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
def.nPortIndex = kPortIndexOutput;
err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
*intraRefreshPeriod = 0;
return err;
}
width = video_def->nFrameWidth;
height = video_def->nFrameHeight;
// Use H.264/AVC MacroBlock size 16x16
*intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs);
return OK;
}
status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) {
OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
InitOMXParams(&params);
params.nPortIndex = kPortIndexOutput;
params.nRefreshPeriod = intraRefreshPeriod;
status_t err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, &params, sizeof(params));
if (err == OK) {
return OK;
}
// Only in configure state, a component could invoke setParameter.
if (!inConfigure) {
return INVALID_OPERATION;
} else {
ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str());
}
OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
InitOMXParams(&refreshParams);
refreshParams.nPortIndex = kPortIndexOutput;
refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
if (intraRefreshPeriod == 0) {
// 0 means disable intra refresh.
refreshParams.nCirMBs = 0;
} else {
// Calculate macroblocks that need to be intra coded base on width and height
uint32_t width, height;
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
def.nPortIndex = kPortIndexOutput;
err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
width = video_def->nFrameWidth;
height = video_def->nFrameHeight;
// Use H.264/AVC MacroBlock size 16x16
refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod);
}
err = mOMXNode->setParameter(
OMX_IndexParamVideoIntraRefresh,
&refreshParams, sizeof(refreshParams));
if (err != OK) {
return err;
}
return OK;
}
status_t ACodec::configureTemporalLayers(
const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) {
if (!mIsVideo || !mIsEncoder) {
return INVALID_OPERATION;
}
AString tsSchema;
if (!msg->findString("ts-schema", &tsSchema)) {
return OK;
}
unsigned int numLayers = 0;
unsigned int numBLayers = 0;
int tags;
char tmp;
OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
OMX_VIDEO_AndroidTemporalLayeringPatternNone;
if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &tmp) == 1
&& numLayers > 0) {
pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
} else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
&numLayers, &tmp, &numBLayers, &tmp))
&& (tags == 1 || (tags == 3 && tmp == '+'))
&& numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
numLayers += numBLayers;
pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
} else {
ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str());
return BAD_VALUE;
}
OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams;
InitOMXParams(&layerParams);
layerParams.nPortIndex = kPortIndexOutput;
status_t err = mOMXNode->getParameter(
(OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layerParams, sizeof(layerParams));
if (err != OK) {
return err;
} else if (!(layerParams.eSupportedPatterns & pattern)) {
return BAD_VALUE;
}
numLayers = min(numLayers, layerParams.nLayerCountMax);
numBLayers = min(numBLayers, layerParams.nBLayerCountMax);
if (!inConfigure) {
OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig;
InitOMXParams(&layerConfig);
layerConfig.nPortIndex = kPortIndexOutput;
layerConfig.ePattern = pattern;
layerConfig.nPLayerCountActual = numLayers - numBLayers;
layerConfig.nBLayerCountActual = numBLayers;
layerConfig.bBitrateRatiosSpecified = OMX_FALSE;
err = mOMXNode->setConfig(
(OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering,
&layerConfig, sizeof(layerConfig));
} else {
layerParams.ePattern = pattern;
layerParams.nPLayerCountActual = numLayers - numBLayers;
layerParams.nBLayerCountActual = numBLayers;
layerParams.bBitrateRatiosSpecified = OMX_FALSE;
layerParams.nLayerCountMax = numLayers;
layerParams.nBLayerCountMax = numBLayers;
err = mOMXNode->setParameter(
(OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layerParams, sizeof(layerParams));
}
AString configSchema;
if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) {
configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers);
} else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) {
configSchema = AStringPrintf("webrtc.vp8.%u", numLayers);
}
if (err != OK) {
ALOGW("Failed to set temporal layers to %s (requested %s)",
configSchema.c_str(), tsSchema.c_str());
return err;
}
err = mOMXNode->getParameter(
(OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
&layerParams, sizeof(layerParams));
if (err == OK) {
ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)",
tsSchema.c_str(), configSchema.c_str(),
asString(layerParams.ePattern), layerParams.ePattern,
layerParams.nPLayerCountActual, layerParams.nBLayerCountActual);
if (outputFormat.get() == mOutputFormat.get()) {
mOutputFormat = mOutputFormat->dup(); // trigger an output format change event
}
// assume we got what we configured
outputFormat->setString("ts-schema", configSchema);
}
return err;
}
status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
status_t err = mOMXNode->getParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
if (def.nBufferSize >= size) {
return OK;
}
def.nBufferSize = size;
err = mOMXNode->setParameter(
OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err != OK) {
return err;
}
err = mOMXNode->