blob: 900f74c1873b3f0f7125da3aa555bfa7eda67009 [file] [log] [blame]
/*
**
** Copyright 2007, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
#define LOG_TAG "AudioFlinger"
//#define LOG_NDEBUG 0
#include <math.h>
#include <signal.h>
#include <sys/time.h>
#include <sys/resource.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
#include <utils/Trace.h>
#include <binder/Parcel.h>
#include <binder/IPCThreadState.h>
#include <utils/String16.h>
#include <utils/threads.h>
#include <utils/Atomic.h>
#include <cutils/bitops.h>
#include <cutils/properties.h>
#include <cutils/compiler.h>
#undef ADD_BATTERY_DATA
#ifdef ADD_BATTERY_DATA
#include <media/IMediaPlayerService.h>
#include <media/IMediaDeathNotifier.h>
#endif
#include <private/media/AudioTrackShared.h>
#include <private/media/AudioEffectShared.h>
#include <system/audio.h>
#include <hardware/audio.h>
#include "AudioMixer.h"
#include "AudioFlinger.h"
#include "ServiceUtilities.h"
#include <media/EffectsFactoryApi.h>
#include <audio_effects/effect_visualizer.h>
#include <audio_effects/effect_ns.h>
#include <audio_effects/effect_aec.h>
#include <audio_utils/primitives.h>
#include <powermanager/PowerManager.h>
// #define DEBUG_CPU_USAGE 10 // log statistics every n wall clock seconds
#ifdef DEBUG_CPU_USAGE
#include <cpustats/CentralTendencyStatistics.h>
#include <cpustats/ThreadCpuUsage.h>
#endif
#include <common_time/cc_helper.h>
#include <common_time/local_clock.h>
#include "FastMixer.h"
// NBAIO implementations
#include "AudioStreamOutSink.h"
#include "MonoPipe.h"
#include "MonoPipeReader.h"
#include "Pipe.h"
#include "PipeReader.h"
#include "SourceAudioBufferProvider.h"
#ifdef HAVE_REQUEST_PRIORITY
#include "SchedulingPolicyService.h"
#endif
#ifdef SOAKER
#include "Soaker.h"
#endif
// ----------------------------------------------------------------------------
// Note: the following macro is used for extremely verbose logging message. In
// order to run with ALOG_ASSERT turned on, we need to have LOG_NDEBUG set to
// 0; but one side effect of this is to turn all LOGV's as well. Some messages
// are so verbose that we want to suppress them even when we have ALOG_ASSERT
// turned on. Do not uncomment the #def below unless you really know what you
// are doing and want to see all of the extremely verbose messages.
//#define VERY_VERY_VERBOSE_LOGGING
#ifdef VERY_VERY_VERBOSE_LOGGING
#define ALOGVV ALOGV
#else
#define ALOGVV(a...) do { } while(0)
#endif
namespace android {
static const char kDeadlockedString[] = "AudioFlinger may be deadlocked\n";
static const char kHardwareLockedString[] = "Hardware lock is taken\n";
static const float MAX_GAIN = 4096.0f;
static const uint32_t MAX_GAIN_INT = 0x1000;
// retry counts for buffer fill timeout
// 50 * ~20msecs = 1 second
static const int8_t kMaxTrackRetries = 50;
static const int8_t kMaxTrackStartupRetries = 50;
// allow less retry attempts on direct output thread.
// direct outputs can be a scarce resource in audio hardware and should
// be released as quickly as possible.
static const int8_t kMaxTrackRetriesDirect = 2;
static const int kDumpLockRetries = 50;
static const int kDumpLockSleepUs = 20000;
// don't warn about blocked writes or record buffer overflows more often than this
static const nsecs_t kWarningThrottleNs = seconds(5);
// RecordThread loop sleep time upon application overrun or audio HAL read error
static const int kRecordThreadSleepUs = 5000;
// maximum time to wait for setParameters to complete
static const nsecs_t kSetParametersTimeoutNs = seconds(2);
// minimum sleep time for the mixer thread loop when tracks are active but in underrun
static const uint32_t kMinThreadSleepTimeUs = 5000;
// maximum divider applied to the active sleep time in the mixer thread loop
static const uint32_t kMaxThreadSleepTimeShift = 2;
// minimum normal mix buffer size, expressed in milliseconds rather than frames
static const uint32_t kMinNormalMixBufferSizeMs = 20;
// maximum normal mix buffer size
static const uint32_t kMaxNormalMixBufferSizeMs = 24;
nsecs_t AudioFlinger::mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
// Whether to use fast mixer
static const enum {
FastMixer_Never, // never initialize or use: for debugging only
FastMixer_Always, // always initialize and use, even if not needed: for debugging only
// normal mixer multiplier is 1
FastMixer_Static, // initialize if needed, then use all the time if initialized,
// multiplier is calculated based on min & max normal mixer buffer size
FastMixer_Dynamic, // initialize if needed, then use dynamically depending on track load,
// multiplier is calculated based on min & max normal mixer buffer size
// FIXME for FastMixer_Dynamic:
// Supporting this option will require fixing HALs that can't handle large writes.
// For example, one HAL implementation returns an error from a large write,
// and another HAL implementation corrupts memory, possibly in the sample rate converter.
// We could either fix the HAL implementations, or provide a wrapper that breaks
// up large writes into smaller ones, and the wrapper would need to deal with scheduler.
} kUseFastMixer = FastMixer_Static;
static uint32_t gScreenState; // incremented by 2 when screen state changes, bit 0 == 1 means "off"
// AudioFlinger::setParameters() updates, other threads read w/o lock
// ----------------------------------------------------------------------------
#ifdef ADD_BATTERY_DATA
// To collect the amplifier usage
static void addBatteryData(uint32_t params) {
sp<IMediaPlayerService> service = IMediaDeathNotifier::getMediaPlayerService();
if (service == NULL) {
// it already logged
return;
}
service->addBatteryData(params);
}
#endif
static int load_audio_interface(const char *if_name, audio_hw_device_t **dev)
{
const hw_module_t *mod;
int rc;
rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
ALOGE_IF(rc, "%s couldn't load audio hw module %s.%s (%s)", __func__,
AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
if (rc) {
goto out;
}
rc = audio_hw_device_open(mod, dev);
ALOGE_IF(rc, "%s couldn't open audio hw device in %s.%s (%s)", __func__,
AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
if (rc) {
goto out;
}
if ((*dev)->common.version != AUDIO_DEVICE_API_VERSION_CURRENT) {
ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
rc = BAD_VALUE;
goto out;
}
return 0;
out:
*dev = NULL;
return rc;
}
// ----------------------------------------------------------------------------
AudioFlinger::AudioFlinger()
: BnAudioFlinger(),
mPrimaryHardwareDev(NULL),
mHardwareStatus(AUDIO_HW_IDLE), // see also onFirstRef()
mMasterVolume(1.0f),
mMasterVolumeSupportLvl(MVS_NONE),
mMasterMute(false),
mNextUniqueId(1),
mMode(AUDIO_MODE_INVALID),
mBtNrecIsOff(false)
{
}
void AudioFlinger::onFirstRef()
{
int rc = 0;
Mutex::Autolock _l(mLock);
/* TODO: move all this work into an Init() function */
char val_str[PROPERTY_VALUE_MAX] = { 0 };
if (property_get("ro.audio.flinger_standbytime_ms", val_str, NULL) >= 0) {
uint32_t int_val;
if (1 == sscanf(val_str, "%u", &int_val)) {
mStandbyTimeInNsecs = milliseconds(int_val);
ALOGI("Using %u mSec as standby time.", int_val);
} else {
mStandbyTimeInNsecs = kDefaultStandbyTimeInNsecs;
ALOGI("Using default %u mSec as standby time.",
(uint32_t)(mStandbyTimeInNsecs / 1000000));
}
}
mMode = AUDIO_MODE_NORMAL;
mMasterVolumeSW = 1.0;
mMasterVolume = 1.0;
mHardwareStatus = AUDIO_HW_IDLE;
}
AudioFlinger::~AudioFlinger()
{
while (!mRecordThreads.isEmpty()) {
// closeInput() will remove first entry from mRecordThreads
closeInput(mRecordThreads.keyAt(0));
}
while (!mPlaybackThreads.isEmpty()) {
// closeOutput() will remove first entry from mPlaybackThreads
closeOutput(mPlaybackThreads.keyAt(0));
}
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
// no mHardwareLock needed, as there are no other references to this
audio_hw_device_close(mAudioHwDevs.valueAt(i)->hwDevice());
delete mAudioHwDevs.valueAt(i);
}
}
static const char * const audio_interfaces[] = {
AUDIO_HARDWARE_MODULE_ID_PRIMARY,
AUDIO_HARDWARE_MODULE_ID_A2DP,
AUDIO_HARDWARE_MODULE_ID_USB,
};
#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
audio_hw_device_t* AudioFlinger::findSuitableHwDev_l(audio_module_handle_t module, uint32_t devices)
{
// if module is 0, the request comes from an old policy manager and we should load
// well known modules
if (module == 0) {
ALOGW("findSuitableHwDev_l() loading well know audio hw modules");
for (size_t i = 0; i < ARRAY_SIZE(audio_interfaces); i++) {
loadHwModule_l(audio_interfaces[i]);
}
} else {
// check a match for the requested module handle
AudioHwDevice *audioHwdevice = mAudioHwDevs.valueFor(module);
if (audioHwdevice != NULL) {
return audioHwdevice->hwDevice();
}
}
// then try to find a module supporting the requested device.
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
if ((dev->get_supported_devices(dev) & devices) == devices)
return dev;
}
return NULL;
}
status_t AudioFlinger::dumpClients(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
result.append("Clients:\n");
for (size_t i = 0; i < mClients.size(); ++i) {
sp<Client> client = mClients.valueAt(i).promote();
if (client != 0) {
snprintf(buffer, SIZE, " pid: %d\n", client->pid());
result.append(buffer);
}
}
result.append("Global session refs:\n");
result.append(" session pid count\n");
for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
AudioSessionRef *r = mAudioSessionRefs[i];
snprintf(buffer, SIZE, " %7d %3d %3d\n", r->mSessionid, r->mPid, r->mCnt);
result.append(buffer);
}
write(fd, result.string(), result.size());
return NO_ERROR;
}
status_t AudioFlinger::dumpInternals(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
hardware_call_state hardwareStatus = mHardwareStatus;
snprintf(buffer, SIZE, "Hardware status: %d\n"
"Standby Time mSec: %u\n",
hardwareStatus,
(uint32_t)(mStandbyTimeInNsecs / 1000000));
result.append(buffer);
write(fd, result.string(), result.size());
return NO_ERROR;
}
status_t AudioFlinger::dumpPermissionDenial(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
snprintf(buffer, SIZE, "Permission Denial: "
"can't dump AudioFlinger from pid=%d, uid=%d\n",
IPCThreadState::self()->getCallingPid(),
IPCThreadState::self()->getCallingUid());
result.append(buffer);
write(fd, result.string(), result.size());
return NO_ERROR;
}
static bool tryLock(Mutex& mutex)
{
bool locked = false;
for (int i = 0; i < kDumpLockRetries; ++i) {
if (mutex.tryLock() == NO_ERROR) {
locked = true;
break;
}
usleep(kDumpLockSleepUs);
}
return locked;
}
status_t AudioFlinger::dump(int fd, const Vector<String16>& args)
{
if (!dumpAllowed()) {
dumpPermissionDenial(fd, args);
} else {
// get state of hardware lock
bool hardwareLocked = tryLock(mHardwareLock);
if (!hardwareLocked) {
String8 result(kHardwareLockedString);
write(fd, result.string(), result.size());
} else {
mHardwareLock.unlock();
}
bool locked = tryLock(mLock);
// failed to lock - AudioFlinger is probably deadlocked
if (!locked) {
String8 result(kDeadlockedString);
write(fd, result.string(), result.size());
}
dumpClients(fd, args);
dumpInternals(fd, args);
// dump playback threads
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
mPlaybackThreads.valueAt(i)->dump(fd, args);
}
// dump record threads
for (size_t i = 0; i < mRecordThreads.size(); i++) {
mRecordThreads.valueAt(i)->dump(fd, args);
}
// dump all hardware devs
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
dev->dump(dev, fd);
}
if (locked) mLock.unlock();
}
return NO_ERROR;
}
sp<AudioFlinger::Client> AudioFlinger::registerPid_l(pid_t pid)
{
// If pid is already in the mClients wp<> map, then use that entry
// (for which promote() is always != 0), otherwise create a new entry and Client.
sp<Client> client = mClients.valueFor(pid).promote();
if (client == 0) {
client = new Client(this, pid);
mClients.add(pid, client);
}
return client;
}
// IAudioFlinger interface
sp<IAudioTrack> AudioFlinger::createTrack(
pid_t pid,
audio_stream_type_t streamType,
uint32_t sampleRate,
audio_format_t format,
uint32_t channelMask,
int frameCount,
IAudioFlinger::track_flags_t flags,
const sp<IMemory>& sharedBuffer,
audio_io_handle_t output,
pid_t tid,
int *sessionId,
status_t *status)
{
sp<PlaybackThread::Track> track;
sp<TrackHandle> trackHandle;
sp<Client> client;
status_t lStatus;
int lSessionId;
// client AudioTrack::set already implements AUDIO_STREAM_DEFAULT => AUDIO_STREAM_MUSIC,
// but if someone uses binder directly they could bypass that and cause us to crash
if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
ALOGE("createTrack() invalid stream type %d", streamType);
lStatus = BAD_VALUE;
goto Exit;
}
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
PlaybackThread *effectThread = NULL;
if (thread == NULL) {
ALOGE("unknown output thread");
lStatus = BAD_VALUE;
goto Exit;
}
client = registerPid_l(pid);
ALOGV("createTrack() sessionId: %d", (sessionId == NULL) ? -2 : *sessionId);
if (sessionId != NULL && *sessionId != AUDIO_SESSION_OUTPUT_MIX) {
// check if an effect chain with the same session ID is present on another
// output thread and move it here.
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
sp<PlaybackThread> t = mPlaybackThreads.valueAt(i);
if (mPlaybackThreads.keyAt(i) != output) {
uint32_t sessions = t->hasAudioSession(*sessionId);
if (sessions & PlaybackThread::EFFECT_SESSION) {
effectThread = t.get();
break;
}
}
}
lSessionId = *sessionId;
} else {
// if no audio session id is provided, create one here
lSessionId = nextUniqueId();
if (sessionId != NULL) {
*sessionId = lSessionId;
}
}
ALOGV("createTrack() lSessionId: %d", lSessionId);
track = thread->createTrack_l(client, streamType, sampleRate, format,
channelMask, frameCount, sharedBuffer, lSessionId, flags, tid, &lStatus);
// move effect chain to this output thread if an effect on same session was waiting
// for a track to be created
if (lStatus == NO_ERROR && effectThread != NULL) {
Mutex::Autolock _dl(thread->mLock);
Mutex::Autolock _sl(effectThread->mLock);
moveEffectChain_l(lSessionId, effectThread, thread, true);
}
// Look for sync events awaiting for a session to be used.
for (int i = 0; i < (int)mPendingSyncEvents.size(); i++) {
if (mPendingSyncEvents[i]->triggerSession() == lSessionId) {
if (thread->isValidSyncEvent(mPendingSyncEvents[i])) {
if (lStatus == NO_ERROR) {
track->setSyncEvent(mPendingSyncEvents[i]);
} else {
mPendingSyncEvents[i]->cancel();
}
mPendingSyncEvents.removeAt(i);
i--;
}
}
}
}
if (lStatus == NO_ERROR) {
trackHandle = new TrackHandle(track);
} else {
// remove local strong reference to Client before deleting the Track so that the Client
// destructor is called by the TrackBase destructor with mLock held
client.clear();
track.clear();
}
Exit:
if (status != NULL) {
*status = lStatus;
}
return trackHandle;
}
uint32_t AudioFlinger::sampleRate(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
ALOGW("sampleRate() unknown thread %d", output);
return 0;
}
return thread->sampleRate();
}
int AudioFlinger::channelCount(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
ALOGW("channelCount() unknown thread %d", output);
return 0;
}
return thread->channelCount();
}
audio_format_t AudioFlinger::format(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
ALOGW("format() unknown thread %d", output);
return AUDIO_FORMAT_INVALID;
}
return thread->format();
}
size_t AudioFlinger::frameCount(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
ALOGW("frameCount() unknown thread %d", output);
return 0;
}
// FIXME currently returns the normal mixer's frame count to avoid confusing legacy callers;
// should examine all callers and fix them to handle smaller counts
return thread->frameCount();
}
uint32_t AudioFlinger::latency(audio_io_handle_t output) const
{
Mutex::Autolock _l(mLock);
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
ALOGW("latency() unknown thread %d", output);
return 0;
}
return thread->latency();
}
status_t AudioFlinger::setMasterVolume(float value)
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return ret;
}
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
float swmv = value;
Mutex::Autolock _l(mLock);
// when hw supports master volume, don't scale in sw mixer
if (MVS_NONE != mMasterVolumeSupportLvl) {
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
AutoMutex lock(mHardwareLock);
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME;
if (NULL != dev->set_master_volume) {
dev->set_master_volume(dev, value);
}
mHardwareStatus = AUDIO_HW_IDLE;
}
swmv = 1.0;
}
mMasterVolume = value;
mMasterVolumeSW = swmv;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
mPlaybackThreads.valueAt(i)->setMasterVolume(swmv);
return NO_ERROR;
}
status_t AudioFlinger::setMode(audio_mode_t mode)
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return ret;
}
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
if (uint32_t(mode) >= AUDIO_MODE_CNT) {
ALOGW("Illegal value: setMode(%d)", mode);
return BAD_VALUE;
}
{ // scope for the lock
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_MODE;
ret = mPrimaryHardwareDev->set_mode(mPrimaryHardwareDev, mode);
mHardwareStatus = AUDIO_HW_IDLE;
}
if (NO_ERROR == ret) {
Mutex::Autolock _l(mLock);
mMode = mode;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
mPlaybackThreads.valueAt(i)->setMode(mode);
}
return ret;
}
status_t AudioFlinger::setMicMute(bool state)
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return ret;
}
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_MIC_MUTE;
ret = mPrimaryHardwareDev->set_mic_mute(mPrimaryHardwareDev, state);
mHardwareStatus = AUDIO_HW_IDLE;
return ret;
}
bool AudioFlinger::getMicMute() const
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return false;
}
bool state = AUDIO_MODE_INVALID;
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_MIC_MUTE;
mPrimaryHardwareDev->get_mic_mute(mPrimaryHardwareDev, &state);
mHardwareStatus = AUDIO_HW_IDLE;
return state;
}
status_t AudioFlinger::setMasterMute(bool muted)
{
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
Mutex::Autolock _l(mLock);
// This is an optimization, so PlaybackThread doesn't have to look at the one from AudioFlinger
mMasterMute = muted;
for (size_t i = 0; i < mPlaybackThreads.size(); i++)
mPlaybackThreads.valueAt(i)->setMasterMute(muted);
return NO_ERROR;
}
float AudioFlinger::masterVolume() const
{
Mutex::Autolock _l(mLock);
return masterVolume_l();
}
float AudioFlinger::masterVolumeSW() const
{
Mutex::Autolock _l(mLock);
return masterVolumeSW_l();
}
bool AudioFlinger::masterMute() const
{
Mutex::Autolock _l(mLock);
return masterMute_l();
}
float AudioFlinger::masterVolume_l() const
{
if (MVS_FULL == mMasterVolumeSupportLvl) {
float ret_val;
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_MASTER_VOLUME;
ALOG_ASSERT((NULL != mPrimaryHardwareDev) &&
(NULL != mPrimaryHardwareDev->get_master_volume),
"can't get master volume");
mPrimaryHardwareDev->get_master_volume(mPrimaryHardwareDev, &ret_val);
mHardwareStatus = AUDIO_HW_IDLE;
return ret_val;
}
return mMasterVolume;
}
status_t AudioFlinger::setStreamVolume(audio_stream_type_t stream, float value,
audio_io_handle_t output)
{
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
ALOGE("setStreamVolume() invalid stream %d", stream);
return BAD_VALUE;
}
AutoMutex lock(mLock);
PlaybackThread *thread = NULL;
if (output) {
thread = checkPlaybackThread_l(output);
if (thread == NULL) {
return BAD_VALUE;
}
}
mStreamTypes[stream].volume = value;
if (thread == NULL) {
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
mPlaybackThreads.valueAt(i)->setStreamVolume(stream, value);
}
} else {
thread->setStreamVolume(stream, value);
}
return NO_ERROR;
}
status_t AudioFlinger::setStreamMute(audio_stream_type_t stream, bool muted)
{
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
if (uint32_t(stream) >= AUDIO_STREAM_CNT ||
uint32_t(stream) == AUDIO_STREAM_ENFORCED_AUDIBLE) {
ALOGE("setStreamMute() invalid stream %d", stream);
return BAD_VALUE;
}
AutoMutex lock(mLock);
mStreamTypes[stream].mute = muted;
for (uint32_t i = 0; i < mPlaybackThreads.size(); i++)
mPlaybackThreads.valueAt(i)->setStreamMute(stream, muted);
return NO_ERROR;
}
float AudioFlinger::streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
{
if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
return 0.0f;
}
AutoMutex lock(mLock);
float volume;
if (output) {
PlaybackThread *thread = checkPlaybackThread_l(output);
if (thread == NULL) {
return 0.0f;
}
volume = thread->streamVolume(stream);
} else {
volume = streamVolume_l(stream);
}
return volume;
}
bool AudioFlinger::streamMute(audio_stream_type_t stream) const
{
if (uint32_t(stream) >= AUDIO_STREAM_CNT) {
return true;
}
AutoMutex lock(mLock);
return streamMute_l(stream);
}
status_t AudioFlinger::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
{
ALOGV("setParameters(): io %d, keyvalue %s, tid %d, calling pid %d",
ioHandle, keyValuePairs.string(), gettid(), IPCThreadState::self()->getCallingPid());
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
// ioHandle == 0 means the parameters are global to the audio hardware interface
if (ioHandle == 0) {
Mutex::Autolock _l(mLock);
status_t final_result = NO_ERROR;
{
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_PARAMETER;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
status_t result = dev->set_parameters(dev, keyValuePairs.string());
final_result = result ?: final_result;
}
mHardwareStatus = AUDIO_HW_IDLE;
}
// disable AEC and NS if the device is a BT SCO headset supporting those pre processings
AudioParameter param = AudioParameter(keyValuePairs);
String8 value;
if (param.get(String8(AUDIO_PARAMETER_KEY_BT_NREC), value) == NO_ERROR) {
bool btNrecIsOff = (value == AUDIO_PARAMETER_VALUE_OFF);
if (mBtNrecIsOff != btNrecIsOff) {
for (size_t i = 0; i < mRecordThreads.size(); i++) {
sp<RecordThread> thread = mRecordThreads.valueAt(i);
RecordThread::RecordTrack *track = thread->track();
if (track != NULL) {
audio_devices_t device = (audio_devices_t)(
thread->device() & AUDIO_DEVICE_IN_ALL);
bool suspend = audio_is_bluetooth_sco_device(device) && btNrecIsOff;
thread->setEffectSuspended(FX_IID_AEC,
suspend,
track->sessionId());
thread->setEffectSuspended(FX_IID_NS,
suspend,
track->sessionId());
}
}
mBtNrecIsOff = btNrecIsOff;
}
}
String8 screenState;
if (param.get(String8(AudioParameter::keyScreenState), screenState) == NO_ERROR) {
bool isOff = screenState == "off";
if (isOff != (gScreenState & 1)) {
gScreenState = ((gScreenState & ~1) + 2) | isOff;
}
}
return final_result;
}
// hold a strong ref on thread in case closeOutput() or closeInput() is called
// and the thread is exited once the lock is released
sp<ThreadBase> thread;
{
Mutex::Autolock _l(mLock);
thread = checkPlaybackThread_l(ioHandle);
if (thread == NULL) {
thread = checkRecordThread_l(ioHandle);
} else if (thread == primaryPlaybackThread_l()) {
// indicate output device change to all input threads for pre processing
AudioParameter param = AudioParameter(keyValuePairs);
int value;
if ((param.getInt(String8(AudioParameter::keyRouting), value) == NO_ERROR) &&
(value != 0)) {
for (size_t i = 0; i < mRecordThreads.size(); i++) {
mRecordThreads.valueAt(i)->setParameters(keyValuePairs);
}
}
}
}
if (thread != 0) {
return thread->setParameters(keyValuePairs);
}
return BAD_VALUE;
}
String8 AudioFlinger::getParameters(audio_io_handle_t ioHandle, const String8& keys) const
{
// ALOGV("getParameters() io %d, keys %s, tid %d, calling pid %d",
// ioHandle, keys.string(), gettid(), IPCThreadState::self()->getCallingPid());
Mutex::Autolock _l(mLock);
if (ioHandle == 0) {
String8 out_s8;
for (size_t i = 0; i < mAudioHwDevs.size(); i++) {
char *s;
{
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_PARAMETER;
audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice();
s = dev->get_parameters(dev, keys.string());
mHardwareStatus = AUDIO_HW_IDLE;
}
out_s8 += String8(s ? s : "");
free(s);
}
return out_s8;
}
PlaybackThread *playbackThread = checkPlaybackThread_l(ioHandle);
if (playbackThread != NULL) {
return playbackThread->getParameters(keys);
}
RecordThread *recordThread = checkRecordThread_l(ioHandle);
if (recordThread != NULL) {
return recordThread->getParameters(keys);
}
return String8("");
}
size_t AudioFlinger::getInputBufferSize(uint32_t sampleRate, audio_format_t format, int channelCount) const
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return 0;
}
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
struct audio_config config = {
sample_rate: sampleRate,
channel_mask: audio_channel_in_mask_from_count(channelCount),
format: format,
};
size_t size = mPrimaryHardwareDev->get_input_buffer_size(mPrimaryHardwareDev, &config);
mHardwareStatus = AUDIO_HW_IDLE;
return size;
}
unsigned int AudioFlinger::getInputFramesLost(audio_io_handle_t ioHandle) const
{
if (ioHandle == 0) {
return 0;
}
Mutex::Autolock _l(mLock);
RecordThread *recordThread = checkRecordThread_l(ioHandle);
if (recordThread != NULL) {
return recordThread->getInputFramesLost();
}
return 0;
}
status_t AudioFlinger::setVoiceVolume(float value)
{
status_t ret = initCheck();
if (ret != NO_ERROR) {
return ret;
}
// check calling permissions
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
AutoMutex lock(mHardwareLock);
mHardwareStatus = AUDIO_HW_SET_VOICE_VOLUME;
ret = mPrimaryHardwareDev->set_voice_volume(mPrimaryHardwareDev, value);
mHardwareStatus = AUDIO_HW_IDLE;
return ret;
}
status_t AudioFlinger::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames,
audio_io_handle_t output) const
{
status_t status;
Mutex::Autolock _l(mLock);
PlaybackThread *playbackThread = checkPlaybackThread_l(output);
if (playbackThread != NULL) {
return playbackThread->getRenderPosition(halFrames, dspFrames);
}
return BAD_VALUE;
}
void AudioFlinger::registerClient(const sp<IAudioFlingerClient>& client)
{
Mutex::Autolock _l(mLock);
pid_t pid = IPCThreadState::self()->getCallingPid();
if (mNotificationClients.indexOfKey(pid) < 0) {
sp<NotificationClient> notificationClient = new NotificationClient(this,
client,
pid);
ALOGV("registerClient() client %p, pid %d", notificationClient.get(), pid);
mNotificationClients.add(pid, notificationClient);
sp<IBinder> binder = client->asBinder();
binder->linkToDeath(notificationClient);
// the config change is always sent from playback or record threads to avoid deadlock
// with AudioSystem::gLock
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
mPlaybackThreads.valueAt(i)->sendConfigEvent(AudioSystem::OUTPUT_OPENED);
}
for (size_t i = 0; i < mRecordThreads.size(); i++) {
mRecordThreads.valueAt(i)->sendConfigEvent(AudioSystem::INPUT_OPENED);
}
}
}
void AudioFlinger::removeNotificationClient(pid_t pid)
{
Mutex::Autolock _l(mLock);
mNotificationClients.removeItem(pid);
ALOGV("%d died, releasing its sessions", pid);
size_t num = mAudioSessionRefs.size();
bool removed = false;
for (size_t i = 0; i< num; ) {
AudioSessionRef *ref = mAudioSessionRefs.itemAt(i);
ALOGV(" pid %d @ %d", ref->mPid, i);
if (ref->mPid == pid) {
ALOGV(" removing entry for pid %d session %d", pid, ref->mSessionid);
mAudioSessionRefs.removeAt(i);
delete ref;
removed = true;
num--;
} else {
i++;
}
}
if (removed) {
purgeStaleEffects_l();
}
}
// audioConfigChanged_l() must be called with AudioFlinger::mLock held
void AudioFlinger::audioConfigChanged_l(int event, audio_io_handle_t ioHandle, const void *param2)
{
size_t size = mNotificationClients.size();
for (size_t i = 0; i < size; i++) {
mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(event, ioHandle,
param2);
}
}
// removeClient_l() must be called with AudioFlinger::mLock held
void AudioFlinger::removeClient_l(pid_t pid)
{
ALOGV("removeClient_l() pid %d, tid %d, calling tid %d", pid, gettid(), IPCThreadState::self()->getCallingPid());
mClients.removeItem(pid);
}
// ----------------------------------------------------------------------------
AudioFlinger::ThreadBase::ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id,
uint32_t device, type_t type)
: Thread(false),
mType(type),
mAudioFlinger(audioFlinger), mSampleRate(0), mFrameCount(0), mNormalFrameCount(0),
// mChannelMask
mChannelCount(0),
mFrameSize(1), mFormat(AUDIO_FORMAT_INVALID),
mParamStatus(NO_ERROR),
mStandby(false), mId(id),
mDevice(device),
mDeathRecipient(new PMDeathRecipient(this))
{
}
AudioFlinger::ThreadBase::~ThreadBase()
{
mParamCond.broadcast();
// do not lock the mutex in destructor
releaseWakeLock_l();
if (mPowerManager != 0) {
sp<IBinder> binder = mPowerManager->asBinder();
binder->unlinkToDeath(mDeathRecipient);
}
}
void AudioFlinger::ThreadBase::exit()
{
ALOGV("ThreadBase::exit");
{
// This lock prevents the following race in thread (uniprocessor for illustration):
// if (!exitPending()) {
// // context switch from here to exit()
// // exit() calls requestExit(), what exitPending() observes
// // exit() calls signal(), which is dropped since no waiters
// // context switch back from exit() to here
// mWaitWorkCV.wait(...);
// // now thread is hung
// }
AutoMutex lock(mLock);
requestExit();
mWaitWorkCV.signal();
}
// When Thread::requestExitAndWait is made virtual and this method is renamed to
// "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
requestExitAndWait();
}
status_t AudioFlinger::ThreadBase::setParameters(const String8& keyValuePairs)
{
status_t status;
ALOGV("ThreadBase::setParameters() %s", keyValuePairs.string());
Mutex::Autolock _l(mLock);
mNewParameters.add(keyValuePairs);
mWaitWorkCV.signal();
// wait condition with timeout in case the thread loop has exited
// before the request could be processed
if (mParamCond.waitRelative(mLock, kSetParametersTimeoutNs) == NO_ERROR) {
status = mParamStatus;
mWaitWorkCV.signal();
} else {
status = TIMED_OUT;
}
return status;
}
void AudioFlinger::ThreadBase::sendConfigEvent(int event, int param)
{
Mutex::Autolock _l(mLock);
sendConfigEvent_l(event, param);
}
// sendConfigEvent_l() must be called with ThreadBase::mLock held
void AudioFlinger::ThreadBase::sendConfigEvent_l(int event, int param)
{
ConfigEvent configEvent;
configEvent.mEvent = event;
configEvent.mParam = param;
mConfigEvents.add(configEvent);
ALOGV("sendConfigEvent() num events %d event %d, param %d", mConfigEvents.size(), event, param);
mWaitWorkCV.signal();
}
void AudioFlinger::ThreadBase::processConfigEvents()
{
mLock.lock();
while (!mConfigEvents.isEmpty()) {
ALOGV("processConfigEvents() remaining events %d", mConfigEvents.size());
ConfigEvent configEvent = mConfigEvents[0];
mConfigEvents.removeAt(0);
// release mLock before locking AudioFlinger mLock: lock order is always
// AudioFlinger then ThreadBase to avoid cross deadlock
mLock.unlock();
mAudioFlinger->mLock.lock();
audioConfigChanged_l(configEvent.mEvent, configEvent.mParam);
mAudioFlinger->mLock.unlock();
mLock.lock();
}
mLock.unlock();
}
status_t AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
bool locked = tryLock(mLock);
if (!locked) {
snprintf(buffer, SIZE, "thread %p maybe dead locked\n", this);
write(fd, buffer, strlen(buffer));
}
snprintf(buffer, SIZE, "io handle: %d\n", mId);
result.append(buffer);
snprintf(buffer, SIZE, "TID: %d\n", getTid());
result.append(buffer);
snprintf(buffer, SIZE, "standby: %d\n", mStandby);
result.append(buffer);
snprintf(buffer, SIZE, "Sample rate: %d\n", mSampleRate);
result.append(buffer);
snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount);
result.append(buffer);
snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount);
result.append(buffer);
snprintf(buffer, SIZE, "Channel Count: %d\n", mChannelCount);
result.append(buffer);
snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask);
result.append(buffer);
snprintf(buffer, SIZE, "Format: %d\n", mFormat);
result.append(buffer);
snprintf(buffer, SIZE, "Frame size: %u\n", mFrameSize);
result.append(buffer);
snprintf(buffer, SIZE, "\nPending setParameters commands: \n");
result.append(buffer);
result.append(" Index Command");
for (size_t i = 0; i < mNewParameters.size(); ++i) {
snprintf(buffer, SIZE, "\n %02d ", i);
result.append(buffer);
result.append(mNewParameters[i]);
}
snprintf(buffer, SIZE, "\n\nPending config events: \n");
result.append(buffer);
snprintf(buffer, SIZE, " Index event param\n");
result.append(buffer);
for (size_t i = 0; i < mConfigEvents.size(); i++) {
snprintf(buffer, SIZE, " %02d %02d %d\n", i, mConfigEvents[i].mEvent, mConfigEvents[i].mParam);
result.append(buffer);
}
result.append("\n");
write(fd, result.string(), result.size());
if (locked) {
mLock.unlock();
}
return NO_ERROR;
}
status_t AudioFlinger::ThreadBase::dumpEffectChains(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
snprintf(buffer, SIZE, "\n- %d Effect Chains:\n", mEffectChains.size());
write(fd, buffer, strlen(buffer));
for (size_t i = 0; i < mEffectChains.size(); ++i) {
sp<EffectChain> chain = mEffectChains[i];
if (chain != 0) {
chain->dump(fd, args);
}
}
return NO_ERROR;
}
void AudioFlinger::ThreadBase::acquireWakeLock()
{
Mutex::Autolock _l(mLock);
acquireWakeLock_l();
}
void AudioFlinger::ThreadBase::acquireWakeLock_l()
{
if (mPowerManager == 0) {
// use checkService() to avoid blocking if power service is not up yet
sp<IBinder> binder =
defaultServiceManager()->checkService(String16("power"));
if (binder == 0) {
ALOGW("Thread %s cannot connect to the power manager service", mName);
} else {
mPowerManager = interface_cast<IPowerManager>(binder);
binder->linkToDeath(mDeathRecipient);
}
}
if (mPowerManager != 0) {
sp<IBinder> binder = new BBinder();
status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
binder,
String16(mName));
if (status == NO_ERROR) {
mWakeLockToken = binder;
}
ALOGV("acquireWakeLock_l() %s status %d", mName, status);
}
}
void AudioFlinger::ThreadBase::releaseWakeLock()
{
Mutex::Autolock _l(mLock);
releaseWakeLock_l();
}
void AudioFlinger::ThreadBase::releaseWakeLock_l()
{
if (mWakeLockToken != 0) {
ALOGV("releaseWakeLock_l() %s", mName);
if (mPowerManager != 0) {
mPowerManager->releaseWakeLock(mWakeLockToken, 0);
}
mWakeLockToken.clear();
}
}
void AudioFlinger::ThreadBase::clearPowerManager()
{
Mutex::Autolock _l(mLock);
releaseWakeLock_l();
mPowerManager.clear();
}
void AudioFlinger::ThreadBase::PMDeathRecipient::binderDied(const wp<IBinder>& who)
{
sp<ThreadBase> thread = mThread.promote();
if (thread != 0) {
thread->clearPowerManager();
}
ALOGW("power manager service died !!!");
}
void AudioFlinger::ThreadBase::setEffectSuspended(
const effect_uuid_t *type, bool suspend, int sessionId)
{
Mutex::Autolock _l(mLock);
setEffectSuspended_l(type, suspend, sessionId);
}
void AudioFlinger::ThreadBase::setEffectSuspended_l(
const effect_uuid_t *type, bool suspend, int sessionId)
{
sp<EffectChain> chain = getEffectChain_l(sessionId);
if (chain != 0) {
if (type != NULL) {
chain->setEffectSuspended_l(type, suspend);
} else {
chain->setEffectSuspendedAll_l(suspend);
}
}
updateSuspendedSessions_l(type, suspend, sessionId);
}
void AudioFlinger::ThreadBase::checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain)
{
ssize_t index = mSuspendedSessions.indexOfKey(chain->sessionId());
if (index < 0) {
return;
}
KeyedVector <int, sp<SuspendedSessionDesc> > sessionEffects =
mSuspendedSessions.editValueAt(index);
for (size_t i = 0; i < sessionEffects.size(); i++) {
sp<SuspendedSessionDesc> desc = sessionEffects.valueAt(i);
for (int j = 0; j < desc->mRefCount; j++) {
if (sessionEffects.keyAt(i) == EffectChain::kKeyForSuspendAll) {
chain->setEffectSuspendedAll_l(true);
} else {
ALOGV("checkSuspendOnAddEffectChain_l() suspending effects %08x",
desc->mType.timeLow);
chain->setEffectSuspended_l(&desc->mType, true);
}
}
}
}
void AudioFlinger::ThreadBase::updateSuspendedSessions_l(const effect_uuid_t *type,
bool suspend,
int sessionId)
{
ssize_t index = mSuspendedSessions.indexOfKey(sessionId);
KeyedVector <int, sp<SuspendedSessionDesc> > sessionEffects;
if (suspend) {
if (index >= 0) {
sessionEffects = mSuspendedSessions.editValueAt(index);
} else {
mSuspendedSessions.add(sessionId, sessionEffects);
}
} else {
if (index < 0) {
return;
}
sessionEffects = mSuspendedSessions.editValueAt(index);
}
int key = EffectChain::kKeyForSuspendAll;
if (type != NULL) {
key = type->timeLow;
}
index = sessionEffects.indexOfKey(key);
sp<SuspendedSessionDesc> desc;
if (suspend) {
if (index >= 0) {
desc = sessionEffects.valueAt(index);
} else {
desc = new SuspendedSessionDesc();
if (type != NULL) {
memcpy(&desc->mType, type, sizeof(effect_uuid_t));
}
sessionEffects.add(key, desc);
ALOGV("updateSuspendedSessions_l() suspend adding effect %08x", key);
}
desc->mRefCount++;
} else {
if (index < 0) {
return;
}
desc = sessionEffects.valueAt(index);
if (--desc->mRefCount == 0) {
ALOGV("updateSuspendedSessions_l() restore removing effect %08x", key);
sessionEffects.removeItemsAt(index);
if (sessionEffects.isEmpty()) {
ALOGV("updateSuspendedSessions_l() restore removing session %d",
sessionId);
mSuspendedSessions.removeItem(sessionId);
}
}
}
if (!sessionEffects.isEmpty()) {
mSuspendedSessions.replaceValueFor(sessionId, sessionEffects);
}
}
void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled(const sp<EffectModule>& effect,
bool enabled,
int sessionId)
{
Mutex::Autolock _l(mLock);
checkSuspendOnEffectEnabled_l(effect, enabled, sessionId);
}
void AudioFlinger::ThreadBase::checkSuspendOnEffectEnabled_l(const sp<EffectModule>& effect,
bool enabled,
int sessionId)
{
if (mType != RECORD) {
// suspend all effects in AUDIO_SESSION_OUTPUT_MIX when enabling any effect on
// another session. This gives the priority to well behaved effect control panels
// and applications not using global effects.
// Enabling post processing in AUDIO_SESSION_OUTPUT_STAGE session does not affect
// global effects
if ((sessionId != AUDIO_SESSION_OUTPUT_MIX) && (sessionId != AUDIO_SESSION_OUTPUT_STAGE)) {
setEffectSuspended_l(NULL, enabled, AUDIO_SESSION_OUTPUT_MIX);
}
}
sp<EffectChain> chain = getEffectChain_l(sessionId);
if (chain != 0) {
chain->checkSuspendOnEffectEnabled(effect, enabled);
}
}
// ----------------------------------------------------------------------------
AudioFlinger::PlaybackThread::PlaybackThread(const sp<AudioFlinger>& audioFlinger,
AudioStreamOut* output,
audio_io_handle_t id,
uint32_t device,
type_t type)
: ThreadBase(audioFlinger, id, device, type),
mMixBuffer(NULL), mSuspended(0), mBytesWritten(0),
// Assumes constructor is called by AudioFlinger with it's mLock held,
// but it would be safer to explicitly pass initial masterMute as parameter
mMasterMute(audioFlinger->masterMute_l()),
// mStreamTypes[] initialized in constructor body
mOutput(output),
// Assumes constructor is called by AudioFlinger with it's mLock held,
// but it would be safer to explicitly pass initial masterVolume as parameter
mMasterVolume(audioFlinger->masterVolumeSW_l()),
mLastWriteTime(0), mNumWrites(0), mNumDelayedWrites(0), mInWrite(false),
mMixerStatus(MIXER_IDLE),
mMixerStatusIgnoringFastTracks(MIXER_IDLE),
standbyDelay(AudioFlinger::mStandbyTimeInNsecs),
mScreenState(gScreenState),
// index 0 is reserved for normal mixer's submix
mFastTrackAvailMask(((1 << FastMixerState::kMaxFastTracks) - 1) & ~1)
{
snprintf(mName, kNameLength, "AudioOut_%X", id);
readOutputParameters();
// mStreamTypes[AUDIO_STREAM_CNT] is initialized by stream_type_t default constructor
// There is no AUDIO_STREAM_MIN, and ++ operator does not compile
for (audio_stream_type_t stream = (audio_stream_type_t) 0; stream < AUDIO_STREAM_CNT;
stream = (audio_stream_type_t) (stream + 1)) {
mStreamTypes[stream].volume = mAudioFlinger->streamVolume_l(stream);
mStreamTypes[stream].mute = mAudioFlinger->streamMute_l(stream);
}
// mStreamTypes[AUDIO_STREAM_CNT] exists but isn't explicitly initialized here,
// because mAudioFlinger doesn't have one to copy from
}
AudioFlinger::PlaybackThread::~PlaybackThread()
{
delete [] mMixBuffer;
}
status_t AudioFlinger::PlaybackThread::dump(int fd, const Vector<String16>& args)
{
dumpInternals(fd, args);
dumpTracks(fd, args);
dumpEffectChains(fd, args);
return NO_ERROR;
}
status_t AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
result.appendFormat("Output thread %p stream volumes in dB:\n ", this);
for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
const stream_type_t *st = &mStreamTypes[i];
if (i > 0) {
result.appendFormat(", ");
}
result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume));
if (st->mute) {
result.append("M");
}
}
result.append("\n");
write(fd, result.string(), result.length());
result.clear();
snprintf(buffer, SIZE, "Output thread %p tracks\n", this);
result.append(buffer);
Track::appendDumpHeader(result);
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
if (track != 0) {
track->dump(buffer, SIZE);
result.append(buffer);
}
}
snprintf(buffer, SIZE, "Output thread %p active tracks\n", this);
result.append(buffer);
Track::appendDumpHeader(result);
for (size_t i = 0; i < mActiveTracks.size(); ++i) {
sp<Track> track = mActiveTracks[i].promote();
if (track != 0) {
track->dump(buffer, SIZE);
result.append(buffer);
}
}
write(fd, result.string(), result.size());
// These values are "raw"; they will wrap around. See prepareTracks_l() for a better way.
FastTrackUnderruns underruns = getFastTrackUnderruns(0);
fdprintf(fd, "Normal mixer raw underrun counters: partial=%u empty=%u\n",
underruns.mBitFields.mPartial, underruns.mBitFields.mEmpty);
return NO_ERROR;
}
status_t AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this);
result.append(buffer);
snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n", ns2ms(systemTime() - mLastWriteTime));
result.append(buffer);
snprintf(buffer, SIZE, "total writes: %d\n", mNumWrites);
result.append(buffer);
snprintf(buffer, SIZE, "delayed writes: %d\n", mNumDelayedWrites);
result.append(buffer);
snprintf(buffer, SIZE, "blocked in write: %d\n", mInWrite);
result.append(buffer);
snprintf(buffer, SIZE, "suspend count: %d\n", mSuspended);
result.append(buffer);
snprintf(buffer, SIZE, "mix buffer : %p\n", mMixBuffer);
result.append(buffer);
write(fd, result.string(), result.size());
fdprintf(fd, "Fast track availMask=%#x\n", mFastTrackAvailMask);
dumpBase(fd, args);
return NO_ERROR;
}
// Thread virtuals
status_t AudioFlinger::PlaybackThread::readyToRun()
{
status_t status = initCheck();
if (status == NO_ERROR) {
ALOGI("AudioFlinger's thread %p ready to run", this);
} else {
ALOGE("No working audio driver found.");
}
return status;
}
void AudioFlinger::PlaybackThread::onFirstRef()
{
run(mName, ANDROID_PRIORITY_URGENT_AUDIO);
}
// PlaybackThread::createTrack_l() must be called with AudioFlinger::mLock held
sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrack_l(
const sp<AudioFlinger::Client>& client,
audio_stream_type_t streamType,
uint32_t sampleRate,
audio_format_t format,
uint32_t channelMask,
int frameCount,
const sp<IMemory>& sharedBuffer,
int sessionId,
IAudioFlinger::track_flags_t flags,
pid_t tid,
status_t *status)
{
sp<Track> track;
status_t lStatus;
bool isTimed = (flags & IAudioFlinger::TRACK_TIMED) != 0;
// client expresses a preference for FAST, but we get the final say
if (flags & IAudioFlinger::TRACK_FAST) {
if (
// not timed
(!isTimed) &&
// either of these use cases:
(
// use case 1: shared buffer with any frame count
(
(sharedBuffer != 0)
) ||
// use case 2: callback handler and frame count is default or at least as large as HAL
(
(tid != -1) &&
((frameCount == 0) ||
(frameCount >= (int) (mFrameCount * 2))) // * 2 is due to SRC jitter, see below
)
) &&
// PCM data
audio_is_linear_pcm(format) &&
// mono or stereo
( (channelMask == AUDIO_CHANNEL_OUT_MONO) ||
(channelMask == AUDIO_CHANNEL_OUT_STEREO) ) &&
#ifndef FAST_TRACKS_AT_NON_NATIVE_SAMPLE_RATE
// hardware sample rate
(sampleRate == mSampleRate) &&
#endif
// normal mixer has an associated fast mixer
hasFastMixer() &&
// there are sufficient fast track slots available
(mFastTrackAvailMask != 0)
// FIXME test that MixerThread for this fast track has a capable output HAL
// FIXME add a permission test also?
) {
// if frameCount not specified, then it defaults to fast mixer (HAL) frame count
if (frameCount == 0) {
frameCount = mFrameCount * 2; // FIXME * 2 is due to SRC jitter, should be computed
}
ALOGV("AUDIO_OUTPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d",
frameCount, mFrameCount);
} else {
ALOGW("AUDIO_OUTPUT_FLAG_FAST denied: isTimed=%d sharedBuffer=%p frameCount=%d "
"mFrameCount=%d format=%d isLinear=%d channelMask=%d sampleRate=%d mSampleRate=%d "
"hasFastMixer=%d tid=%d fastTrackAvailMask=%#x",
isTimed, sharedBuffer.get(), frameCount, mFrameCount, format,
audio_is_linear_pcm(format),
channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask);
flags &= ~IAudioFlinger::TRACK_FAST;
// For compatibility with AudioTrack calculation, buffer depth is forced
// to be at least 2 x the normal mixer frame count and cover audio hardware latency.
// This is probably too conservative, but legacy application code may depend on it.
// If you change this calculation, also review the start threshold which is related.
uint32_t latencyMs = mOutput->stream->get_latency(mOutput->stream);
uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
if (minBufCount < 2) {
minBufCount = 2;
}
int minFrameCount = mNormalFrameCount * minBufCount;
if (frameCount < minFrameCount) {
frameCount = minFrameCount;
}
}
}
if (mType == DIRECT) {
if ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM) {
if (sampleRate != mSampleRate || format != mFormat || channelMask != mChannelMask) {
ALOGE("createTrack_l() Bad parameter: sampleRate %d format %d, channelMask 0x%08x \""
"for output %p with format %d",
sampleRate, format, channelMask, mOutput, mFormat);
lStatus = BAD_VALUE;
goto Exit;
}
}
} else {
// Resampler implementation limits input sampling rate to 2 x output sampling rate.
if (sampleRate > mSampleRate*2) {
ALOGE("Sample rate out of range: %d mSampleRate %d", sampleRate, mSampleRate);
lStatus = BAD_VALUE;
goto Exit;
}
}
lStatus = initCheck();
if (lStatus != NO_ERROR) {
ALOGE("Audio driver not initialized.");
goto Exit;
}
{ // scope for mLock
Mutex::Autolock _l(mLock);
// all tracks in same audio session must share the same routing strategy otherwise
// conflicts will happen when tracks are moved from one output to another by audio policy
// manager
uint32_t strategy = AudioSystem::getStrategyForStream(streamType);
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> t = mTracks[i];
if (t != 0 && !t->isOutputTrack()) {
uint32_t actual = AudioSystem::getStrategyForStream(t->streamType());
if (sessionId == t->sessionId() && strategy != actual) {
ALOGE("createTrack_l() mismatched strategy; expected %u but found %u",
strategy, actual);
lStatus = BAD_VALUE;
goto Exit;
}
}
}
if (!isTimed) {
track = new Track(this, client, streamType, sampleRate, format,
channelMask, frameCount, sharedBuffer, sessionId, flags);
} else {
track = TimedTrack::create(this, client, streamType, sampleRate, format,
channelMask, frameCount, sharedBuffer, sessionId);
}
if (track == NULL || track->getCblk() == NULL || track->name() < 0) {
lStatus = NO_MEMORY;
goto Exit;
}
mTracks.add(track);
sp<EffectChain> chain = getEffectChain_l(sessionId);
if (chain != 0) {
ALOGV("createTrack_l() setting main buffer %p", chain->inBuffer());
track->setMainBuffer(chain->inBuffer());
chain->setStrategy(AudioSystem::getStrategyForStream(track->streamType()));
chain->incTrackCnt();
}
}
#ifdef HAVE_REQUEST_PRIORITY
if ((flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) {
pid_t callingPid = IPCThreadState::self()->getCallingPid();
// we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful,
// so ask activity manager to do this on our behalf
int err = requestPriority(callingPid, tid, 1);
if (err != 0) {
ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
1, callingPid, tid, err);
}
}
#endif
lStatus = NO_ERROR;
Exit:
if (status) {
*status = lStatus;
}
return track;
}
uint32_t AudioFlinger::MixerThread::correctLatency(uint32_t latency) const
{
if (mFastMixer != NULL) {
MonoPipe *pipe = (MonoPipe *)mPipeSink.get();
latency += (pipe->getAvgFrames() * 1000) / mSampleRate;
}
return latency;
}
uint32_t AudioFlinger::PlaybackThread::correctLatency(uint32_t latency) const
{
return latency;
}
uint32_t AudioFlinger::PlaybackThread::latency() const
{
Mutex::Autolock _l(mLock);
return latency_l();
}
uint32_t AudioFlinger::PlaybackThread::latency_l() const
{
if (initCheck() == NO_ERROR) {
return correctLatency(mOutput->stream->get_latency(mOutput->stream));
} else {
return 0;
}
}
void AudioFlinger::PlaybackThread::setMasterVolume(float value)
{
Mutex::Autolock _l(mLock);
mMasterVolume = value;
}
void AudioFlinger::PlaybackThread::setMasterMute(bool muted)
{
Mutex::Autolock _l(mLock);
setMasterMute_l(muted);
}
void AudioFlinger::PlaybackThread::setStreamVolume(audio_stream_type_t stream, float value)
{
Mutex::Autolock _l(mLock);
mStreamTypes[stream].volume = value;
}
void AudioFlinger::PlaybackThread::setStreamMute(audio_stream_type_t stream, bool muted)
{
Mutex::Autolock _l(mLock);
mStreamTypes[stream].mute = muted;
}
float AudioFlinger::PlaybackThread::streamVolume(audio_stream_type_t stream) const
{
Mutex::Autolock _l(mLock);
return mStreamTypes[stream].volume;
}
// addTrack_l() must be called with ThreadBase::mLock held
status_t AudioFlinger::PlaybackThread::addTrack_l(const sp<Track>& track)
{
status_t status = ALREADY_EXISTS;
// set retry count for buffer fill
track->mRetryCount = kMaxTrackStartupRetries;
if (mActiveTracks.indexOf(track) < 0) {
// the track is newly added, make sure it fills up all its
// buffers before playing. This is to ensure the client will
// effectively get the latency it requested.
track->mFillingUpStatus = Track::FS_FILLING;
track->mResetDone = false;
track->mPresentationCompleteFrames = 0;
mActiveTracks.add(track);
if (track->mainBuffer() != mMixBuffer) {
sp<EffectChain> chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
ALOGV("addTrack_l() starting track on chain %p for session %d", chain.get(), track->sessionId());
chain->incActiveTrackCnt();
}
}
status = NO_ERROR;
}
ALOGV("mWaitWorkCV.broadcast");
mWaitWorkCV.broadcast();
return status;
}
// destroyTrack_l() must be called with ThreadBase::mLock held
void AudioFlinger::PlaybackThread::destroyTrack_l(const sp<Track>& track)
{
track->mState = TrackBase::TERMINATED;
// active tracks are removed by threadLoop()
if (mActiveTracks.indexOf(track) < 0) {
removeTrack_l(track);
}
}
void AudioFlinger::PlaybackThread::removeTrack_l(const sp<Track>& track)
{
track->triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE);
mTracks.remove(track);
deleteTrackName_l(track->name());
// redundant as track is about to be destroyed, for dumpsys only
track->mName = -1;
if (track->isFastTrack()) {
int index = track->mFastIndex;
ALOG_ASSERT(0 < index && index < (int)FastMixerState::kMaxFastTracks);
ALOG_ASSERT(!(mFastTrackAvailMask & (1 << index)));
mFastTrackAvailMask |= 1 << index;
// redundant as track is about to be destroyed, for dumpsys only
track->mFastIndex = -1;
}
sp<EffectChain> chain = getEffectChain_l(track->sessionId());
if (chain != 0) {
chain->decTrackCnt();
}
}
String8 AudioFlinger::PlaybackThread::getParameters(const String8& keys)
{
String8 out_s8 = String8("");
char *s;
Mutex::Autolock _l(mLock);
if (initCheck() != NO_ERROR) {
return out_s8;
}
s = mOutput->stream->common.get_parameters(&mOutput->stream->common, keys.string());
out_s8 = String8(s);
free(s);
return out_s8;
}
// audioConfigChanged_l() must be called with AudioFlinger::mLock held
void AudioFlinger::PlaybackThread::audioConfigChanged_l(int event, int param) {
AudioSystem::OutputDescriptor desc;
void *param2 = NULL;
ALOGV("PlaybackThread::audioConfigChanged_l, thread %p, event %d, param %d", this, event, param);
switch (event) {
case AudioSystem::OUTPUT_OPENED:
case AudioSystem::OUTPUT_CONFIG_CHANGED:
desc.channels = mChannelMask;
desc.samplingRate = mSampleRate;
desc.format = mFormat;
desc.frameCount = mNormalFrameCount; // FIXME see AudioFlinger::frameCount(audio_io_handle_t)
desc.latency = latency();
param2 = &desc;
break;
case AudioSystem::STREAM_CONFIG_CHANGED:
param2 = &param;
case AudioSystem::OUTPUT_CLOSED:
default:
break;
}
mAudioFlinger->audioConfigChanged_l(event, mId, param2);
}
void AudioFlinger::PlaybackThread::readOutputParameters()
{
mSampleRate = mOutput->stream->common.get_sample_rate(&mOutput->stream->common);
mChannelMask = mOutput->stream->common.get_channels(&mOutput->stream->common);
mChannelCount = (uint16_t)popcount(mChannelMask);
mFormat = mOutput->stream->common.get_format(&mOutput->stream->common);
mFrameSize = audio_stream_frame_size(&mOutput->stream->common);
mFrameCount = mOutput->stream->common.get_buffer_size(&mOutput->stream->common) / mFrameSize;
if (mFrameCount & 15) {
ALOGW("HAL output buffer size is %u frames but AudioMixer requires multiples of 16 frames",
mFrameCount);
}
// Calculate size of normal mix buffer relative to the HAL output buffer size
double multiplier = 1.0;
if (mType == MIXER && (kUseFastMixer == FastMixer_Static || kUseFastMixer == FastMixer_Dynamic)) {
size_t minNormalFrameCount = (kMinNormalMixBufferSizeMs * mSampleRate) / 1000;
size_t maxNormalFrameCount = (kMaxNormalMixBufferSizeMs * mSampleRate) / 1000;
// round up minimum and round down maximum to nearest 16 frames to satisfy AudioMixer
minNormalFrameCount = (minNormalFrameCount + 15) & ~15;
maxNormalFrameCount = maxNormalFrameCount & ~15;
if (maxNormalFrameCount < minNormalFrameCount) {
maxNormalFrameCount = minNormalFrameCount;
}
multiplier = (double) minNormalFrameCount / (double) mFrameCount;
if (multiplier <= 1.0) {
multiplier = 1.0;
} else if (multiplier <= 2.0) {
if (2 * mFrameCount <= maxNormalFrameCount) {
multiplier = 2.0;
} else {
multiplier = (double) maxNormalFrameCount / (double) mFrameCount;
}
} else {
// prefer an even multiplier, for compatibility with doubling of fast tracks due to HAL SRC
// (it would be unusual for the normal mix buffer size to not be a multiple of fast
// track, but we sometimes have to do this to satisfy the maximum frame count constraint)
// FIXME this rounding up should not be done if no HAL SRC
uint32_t truncMult = (uint32_t) multiplier;
if ((truncMult & 1)) {
if ((truncMult + 1) * mFrameCount <= maxNormalFrameCount) {
++truncMult;
}
}
multiplier = (double) truncMult;
}
}
mNormalFrameCount = multiplier * mFrameCount;
// round up to nearest 16 frames to satisfy AudioMixer
mNormalFrameCount = (mNormalFrameCount + 15) & ~15;
ALOGI("HAL output buffer size %u frames, normal mix buffer size %u frames", mFrameCount, mNormalFrameCount);
delete[] mMixBuffer;
mMixBuffer = new int16_t[mNormalFrameCount * mChannelCount];
memset(mMixBuffer, 0, mNormalFrameCount * mChannelCount * sizeof(int16_t));
// force reconfiguration of effect chains and engines to take new buffer size and audio
// parameters into account
// Note that mLock is not held when readOutputParameters() is called from the constructor
// but in this case nothing is done below as no audio sessions have effect yet so it doesn't
// matter.
// create a copy of mEffectChains as calling moveEffectChain_l() can reorder some effect chains
Vector< sp<EffectChain> > effectChains = mEffectChains;
for (size_t i = 0; i < effectChains.size(); i ++) {
mAudioFlinger->moveEffectChain_l(effectChains[i]->sessionId(), this, this, false);
}
}
status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames)
{
if (halFrames == NULL || dspFrames == NULL) {
return BAD_VALUE;
}
Mutex::Autolock _l(mLock);
if (initCheck() != NO_ERROR) {
return INVALID_OPERATION;
}
*halFrames = mBytesWritten / audio_stream_frame_size(&mOutput->stream->common);
return mOutput->stream->get_render_position(mOutput->stream, dspFrames);
}
uint32_t AudioFlinger::PlaybackThread::hasAudioSession(int sessionId)
{
Mutex::Autolock _l(mLock);
uint32_t result = 0;
if (getEffectChain_l(sessionId) != 0) {
result = EFFECT_SESSION;
}
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
if (sessionId == track->sessionId() &&
!(track->mCblk->flags & CBLK_INVALID_MSK)) {
result |= TRACK_SESSION;
break;
}
}
return result;
}
uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId)
{
// session AUDIO_SESSION_OUTPUT_MIX is placed in same strategy as MUSIC stream so that
// it is moved to correct output by audio policy manager when A2DP is connected or disconnected
if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
}
for (size_t i = 0; i < mTracks.size(); i++) {
sp<Track> track = mTracks[i];
if (sessionId == track->sessionId() &&
!(track->mCblk->flags & CBLK_INVALID_MSK)) {
return AudioSystem::getStrategyForStream(track->streamType());
}
}
return AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
}
AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const
{
Mutex::Autolock _l(mLock);
return mOutput;
}
AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput()
{
Mutex::Autolock _l(mLock);
AudioStreamOut *output = mOutput;
mOutput = NULL;
// FIXME FastMixer might also have a raw ptr to mOutputSink;
// must push a NULL and wait for ack
mOutputSink.clear();
mPipeSink.clear();
mNormalSink.clear();
return output;
}
// this method must always be called either with ThreadBase mLock held or inside the thread loop
audio_stream_t* AudioFlinger::PlaybackThread::stream() const
{
if (mOutput == NULL) {
return NULL;
}
return &mOutput->stream->common;
}
uint32_t AudioFlinger::PlaybackThread::activeSleepTimeUs() const
{
return (uint32_t)((uint32_t)((mNormalFrameCount * 1000) / mSampleRate) * 1000);
}
status_t AudioFlinger::PlaybackThread::setSyncEvent(const sp<SyncEvent>& event)
{
if (!isValidSyncEvent(event)) {
return BAD_VALUE;
}
Mutex::Autolock _l(mLock);
for (size_t i = 0; i < mTracks.size(); ++i) {
sp<Track> track = mTracks[i];
if (event->triggerSession() == track->sessionId()) {
track->setSyncEvent(event);
return NO_ERROR;
}
}
return NAME_NOT_FOUND;
}
bool AudioFlinger::PlaybackThread::isValidSyncEvent(const sp<SyncEvent>& event)
{
switch (event->type()) {
case AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE:
return true;
default:
break;
}
return false;
}
void AudioFlinger::PlaybackThread::threadLoop_removeTracks(const Vector< sp<Track> >& tracksToRemove)
{
size_t count = tracksToRemove.size();
if (CC_UNLIKELY(count)) {
for (size_t i = 0 ; i < count ; i++) {
const sp<Track>& track = tracksToRemove.itemAt(i);
if ((track->sharedBuffer() != 0) &&
(track->mState == TrackBase::ACTIVE || track->mState == TrackBase::RESUMING)) {
AudioSystem::stopOutput(mId, track->streamType(), track->sessionId());
}
}
}
}
// ----------------------------------------------------------------------------
AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, AudioStreamOut* output,
audio_io_handle_t id, uint32_t device, type_t type)
: PlaybackThread(audioFlinger, output, id, device, type),
// mAudioMixer below
#ifdef SOAKER
mSoaker(NULL),
#endif
// mFastMixer below
mFastMixerFutex(0)
// mOutputSink below
// mPipeSink below
// mNormalSink below
{
ALOGV("MixerThread() id=%d device=%d type=%d", id, device, type);
ALOGV("mSampleRate=%d, mChannelMask=%d, mChannelCount=%d, mFormat=%d, mFrameSize=%d, "
"mFrameCount=%d, mNormalFrameCount=%d",
mSampleRate, mChannelMask, mChannelCount, mFormat, mFrameSize, mFrameCount,
mNormalFrameCount);
mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate);
// FIXME - Current mixer implementation only supports stereo output
if (mChannelCount == 1) {
ALOGE("Invalid audio hardware channel count");
}
// create an NBAIO sink for the HAL output stream, and negotiate
mOutputSink = new AudioStreamOutSink(output->stream);
size_t numCounterOffers = 0;
const NBAIO_Format offers[1] = {Format_from_SR_C(mSampleRate, mChannelCount)};
ssize_t index = mOutputSink->negotiate(offers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
// initialize fast mixer depending on configuration
bool initFastMixer;
switch (kUseFastMixer) {
case FastMixer_Never:
initFastMixer = false;
break;
case FastMixer_Always:
initFastMixer = true;
break;
case FastMixer_Static:
case FastMixer_Dynamic:
initFastMixer = mFrameCount < mNormalFrameCount;
break;
}
if (initFastMixer) {
// create a MonoPipe to connect our submix to FastMixer
NBAIO_Format format = mOutputSink->format();
// This pipe depth compensates for scheduling latency of the normal mixer thread.
// When it wakes up after a maximum latency, it runs a few cycles quickly before
// finally blocking. Note the pipe implementation rounds up the request to a power of 2.
MonoPipe *monoPipe = new MonoPipe(mNormalFrameCount * 4, format, true /*writeCanBlock*/);
const NBAIO_Format offers[1] = {format};
size_t numCounterOffers = 0;
ssize_t index = monoPipe->negotiate(offers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
monoPipe->setAvgFrames((mScreenState & 1) ?
(monoPipe->maxFrames() * 7) / 8 : mNormalFrameCount * 2);
mPipeSink = monoPipe;
#ifdef TEE_SINK_FRAMES
// create a Pipe to archive a copy of FastMixer's output for dumpsys
Pipe *teeSink = new Pipe(TEE_SINK_FRAMES, format);
numCounterOffers = 0;
index = teeSink->negotiate(offers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
mTeeSink = teeSink;
PipeReader *teeSource = new PipeReader(*teeSink);
numCounterOffers = 0;
index = teeSource->negotiate(offers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
mTeeSource = teeSource;
#endif
#ifdef SOAKER
// create a soaker as workaround for governor issues
mSoaker = new Soaker();
// FIXME Soaker should only run when needed, i.e. when FastMixer is not in COLD_IDLE
mSoaker->run("Soaker", PRIORITY_LOWEST);
#endif
// create fast mixer and configure it initially with just one fast track for our submix
mFastMixer = new FastMixer();
FastMixerStateQueue *sq = mFastMixer->sq();
#ifdef STATE_QUEUE_DUMP
sq->setObserverDump(&mStateQueueObserverDump);
sq->setMutatorDump(&mStateQueueMutatorDump);
#endif
FastMixerState *state = sq->begin();
FastTrack *fastTrack = &state->mFastTracks[0];
// wrap the source side of the MonoPipe to make it an AudioBufferProvider
fastTrack->mBufferProvider = new SourceAudioBufferProvider(new MonoPipeReader(monoPipe));
fastTrack->mVolumeProvider = NULL;
fastTrack->mGeneration++;
state->mFastTracksGen++;
state->mTrackMask = 1;
// fast mixer will use the HAL output sink
state->mOutputSink = mOutputSink.get();
state->mOutputSinkGen++;
state->mFrameCount = mFrameCount;
state->mCommand = FastMixerState::COLD_IDLE;
// already done in constructor initialization list
//mFastMixerFutex = 0;
state->mColdFutexAddr = &mFastMixerFutex;
state->mColdGen++;
state->mDumpState = &mFastMixerDumpState;
state->mTeeSink = mTeeSink.get();
sq->end();
sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
// start the fast mixer
mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO);
#ifdef HAVE_REQUEST_PRIORITY
pid_t tid = mFastMixer->getTid();
int err = requestPriority(getpid_cached, tid, 2);
if (err != 0) {
ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
2, getpid_cached, tid, err);
}
#endif
#ifdef AUDIO_WATCHDOG
// create and start the watchdog
mAudioWatchdog = new AudioWatchdog();
mAudioWatchdog->setDump(&mAudioWatchdogDump);
mAudioWatchdog->run("AudioWatchdog", PRIORITY_URGENT_AUDIO);
tid = mAudioWatchdog->getTid();
err = requestPriority(getpid_cached, tid, 1);
if (err != 0) {
ALOGW("Policy SCHED_FIFO priority %d is unavailable for pid %d tid %d; error %d",
1, getpid_cached, tid, err);
}
#endif
} else {
mFastMixer = NULL;
}
switch (kUseFastMixer) {
case FastMixer_Never:
case FastMixer_Dynamic:
mNormalSink = mOutputSink;
break;
case FastMixer_Always:
mNormalSink = mPipeSink;
break;
case FastMixer_Static:
mNormalSink = initFastMixer ? mPipeSink : mOutputSink;
break;
}
}
AudioFlinger::MixerThread::~MixerThread()
{
if (mFastMixer != NULL) {
FastMixerStateQueue *sq = mFastMixer->sq();
FastMixerState *state = sq->begin();
if (state->mCommand == FastMixerState::COLD_IDLE) {
int32_t old = android_atomic_inc(&mFastMixerFutex);
if (old == -1) {
__futex_syscall3(&mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1);
}
}
state->mCommand = FastMixerState::EXIT;
sq->end();
sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
mFastMixer->join();
// Though the fast mixer thread has exited, it's state queue is still valid.
// We'll use that extract the final state which contains one remaining fast track
// corresponding to our sub-mix.
state = sq->begin();
ALOG_ASSERT(state->mTrackMask == 1);
FastTrack *fastTrack = &state->mFastTracks[0];
ALOG_ASSERT(fastTrack->mBufferProvider != NULL);
delete fastTrack->mBufferProvider;
sq->end(false /*didModify*/);
delete mFastMixer;
#ifdef SOAKER
if (mSoaker != NULL) {
mSoaker->requestExitAndWait();
}
delete mSoaker;
#endif
if (mAudioWatchdog != 0) {
mAudioWatchdog->requestExit();
mAudioWatchdog->requestExitAndWait();
mAudioWatchdog.clear();
}
}
delete mAudioMixer;
}
class CpuStats {
public:
CpuStats();
void sample(const String8 &title);
#ifdef DEBUG_CPU_USAGE
private:
ThreadCpuUsage mCpuUsage; // instantaneous thread CPU usage in wall clock ns
CentralTendencyStatistics mWcStats; // statistics on thread CPU usage in wall clock ns
CentralTendencyStatistics mHzStats; // statistics on thread CPU usage in cycles
int mCpuNum; // thread's current CPU number
int mCpukHz; // frequency of thread's current CPU in kHz
#endif
};
CpuStats::CpuStats()
#ifdef DEBUG_CPU_USAGE
: mCpuNum(-1), mCpukHz(-1)
#endif
{
}
void CpuStats::sample(const String8 &title) {
#ifdef DEBUG_CPU_USAGE
// get current thread's delta CPU time in wall clock ns
double wcNs;
bool valid = mCpuUsage.sampleAndEnable(wcNs);
// record sample for wall clock statistics
if (valid) {
mWcStats.sample(wcNs);
}
// get the current CPU number
int cpuNum = sched_getcpu();
// get the current CPU frequency in kHz
int cpukHz = mCpuUsage.getCpukHz(cpuNum);
// check if either CPU number or frequency changed
if (cpuNum != mCpuNum || cpukHz != mCpukHz) {
mCpuNum = cpuNum;
mCpukHz = cpukHz;
// ignore sample for purposes of cycles
valid = false;
}
// if no change in CPU number or frequency, then record sample for cycle statistics
if (valid && mCpukHz > 0) {
double cycles = wcNs * cpukHz * 0.000001;
mHzStats.sample(cycles);
}
unsigned n = mWcStats.n();
// mCpuUsage.elapsed() is expensive, so don't call it every loop
if ((n & 127) == 1) {
long long elapsed = mCpuUsage.elapsed();
if (elapsed >= DEBUG_CPU_USAGE * 1000000000LL) {
double perLoop = elapsed / (double) n;
double perLoop100 = perLoop * 0.01;
double perLoop1k = perLoop * 0.001;
double mean = mWcStats.mean();
double stddev = mWcStats.stddev();
double minimum = mWcStats.minimum();
double maximum = mWcStats.maximum();
double meanCycles = mHzStats.mean();
double stddevCycles = mHzStats.stddev();
double minCycles = mHzStats.minimum();
double maxCycles = mHzStats.maximum();
mCpuUsage.resetElapsed();
mWcStats.reset();
mHzStats.reset();
ALOGD("CPU usage for %s over past %.1f secs\n"
" (%u mixer loops at %.1f mean ms per loop):\n"
" us per mix loop: mean=%.0f stddev=%.0f min=%.0f max=%.0f\n"
" %% of wall: mean=%.1f stddev=%.1f min=%.1f max=%.1f\n"
" MHz: mean=%.1f, stddev=%.1f, min=%.1f max=%.1f",
title.string(),
elapsed * .000000001, n, perLoop * .000001,
mean * .001,
stddev * .001,
minimum * .001,
maximum * .001,
mean / perLoop100,
stddev / perLoop100,
minimum / perLoop100,
maximum / perLoop100,
meanCycles / perLoop1k,
stddevCycles / perLoop1k,
minCycles / perLoop1k,
maxCycles / perLoop1k);
}
}
#endif
};
void AudioFlinger::PlaybackThread::checkSilentMode_l()
{
if (!mMasterMute) {
char value[PROPERTY_VALUE_MAX];
if (property_get("ro.audio.silent", value, "0") > 0) {
char *endptr;
unsigned long ul = strtoul(value, &endptr, 0);
if (*endptr == '\0' && ul != 0) {
ALOGD("Silence is golden");
// The setprop command will not allow a property to be changed after
// the first time it is set, so we don't have to worry about un-muting.
setMasterMute_l(true);
}
}
}
}
bool AudioFlinger::PlaybackThread::threadLoop()
{
Vector< sp<Track> > tracksToRemove;
standbyTime = systemTime();
// MIXER
nsecs_t lastWarning = 0;
if (mType == MIXER) {
longStandbyExit = false;
}
// DUPLICATING
// FIXME could this be made local to while loop?
writeFrames = 0;
cacheParameters_l();
sleepTime = idleSleepTime;
if (mType == MIXER) {
sleepTimeShift = 0;
}
CpuStats cpuStats;
const String8 myName(String8::format("thread %p type %d TID %d", this, mType, gettid()));
acquireWakeLock();
while (!exitPending())
{
cpuStats.sample(myName);
Vector< sp<EffectChain> > effectChains;
processConfigEvents();
{ // scope for mLock
Mutex::Autolock _l(mLock);
if (checkForNewParameters_l()) {
cacheParameters_l();
}
saveOutputTracks();
// put audio hardware into standby after short delay
if (CC_UNLIKELY((!mActiveTracks.size() && systemTime() > standbyTime) ||
mSuspended > 0)) {
if (!mStandby) {
threadLoop_standby();
mStandby = true;
mBytesWritten = 0;
}
if (!mActiveTracks.size() && mConfigEvents.isEmpty()) {
// we're about to wait, flush the binder command buffer
IPCThreadState::self()->flushCommands();
clearOutputTracks();
if (exitPending()) break;
releaseWakeLock_l();
// wait until we have something to do...
ALOGV("%s going to sleep", myName.string());
mWaitWorkCV.wait(mLock);
ALOGV("%s waking up", myName.string());
acquireWakeLock_l();
mMixerStatus = MIXER_IDLE;
mMixerStatusIgnoringFastTracks = MIXER_IDLE;
checkSilentMode_l();
standbyTime = systemTime() + standbyDelay;
sleepTime = idleSleepTime;
if (mType == MIXER) {
sleepTimeShift = 0;
}
continue;
}
}
// mMixerStatusIgnoringFastTracks is also updated internally
mMixerStatus = prepareTracks_l(&tracksToRemove);
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
// or modified if an effect is created or deleted
lockEffectChains_l(effectChains);
}
if (CC_LIKELY(mMixerStatus == MIXER_TRACKS_READY)) {
threadLoop_mix();
} else {
threadLoop_sleepTime();
}
if (mSuspended > 0) {
sleepTime = suspendSleepTimeUs();
}
// only process effects if we're going to write
if (sleepTime == 0) {
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
}
}
// enable changes in effect chain
unlockEffectChains(effectChains);
// sleepTime == 0 means we must write to audio hardware
if (sleepTime == 0) {
threadLoop_write();
if (mType == MIXER) {
// write blocked detection
nsecs_t now = systemTime();
nsecs_t delta = now - mLastWriteTime;
if (!mStandby && delta > maxPeriod) {
mNumDelayedWrites++;
if ((now - lastWarning) > kWarningThrottleNs) {
#if defined(ATRACE_TAG) && (ATRACE_TAG != ATRACE_TAG_NEVER)
ScopedTrace st(ATRACE_TAG, "underrun");
#endif
ALOGW("write blocked for %llu msecs, %d delayed writes, thread %p",
ns2ms(delta), mNumDelayedWrites, this);
lastWarning = now;
}
// FIXME this is broken: longStandbyExit should be handled out of the if() and with
// a different threshold. Or completely removed for what it is worth anyway...
if (mStandby) {
longStandbyExit = true;
}
}
}
mStandby = false;
} else {
usleep(sleepTime);
}
// Finally let go of removed track(s), without the lock held
// since we can't guarantee the destructors won't acquire that
// same lock. This will also mutate and push a new fast mixer state.
threadLoop_removeTracks(tracksToRemove);
tracksToRemove.clear();
// FIXME I don't understand the need for this here;
// it was in the original code but maybe the
// assignment in saveOutputTracks() makes this unnecessary?
clearOutputTracks();
// Effect chains will be actually deleted here if they were removed from
// mEffectChains list during mixing or effects processing
effectChains.clear();
// FIXME Note that the above .clear() is no longer necessary since effectChains
// is now local to this block, but will keep it for now (at least until merge done).
}
if (mType == MIXER || mType == DIRECT) {
// put output stream into standby mode
if (!mStandby) {
mOutput->stream->common.standby(&mOutput->stream->common);
}
}
if (mType == DUPLICATING) {
</