blob: 6b46b10386eca83932d756f76f47191d4c5b33ca [file] [log] [blame]
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.hardware.audio@4.0;
import android.hardware.audio.common@4.0;
enum Result : int32_t {
OK,
NOT_INITIALIZED,
INVALID_ARGUMENTS,
INVALID_STATE,
/**
* Methods marked as "Optional method" must return this result value
* if the operation is not supported by HAL.
*/
NOT_SUPPORTED
};
@export(name="audio_drain_type_t", value_prefix="AUDIO_DRAIN_")
enum AudioDrain : int32_t {
/** drain() returns when all data has been played. */
ALL,
/**
* drain() returns a short time before all data from the current track has
* been played to give time for gapless track switch.
*/
EARLY_NOTIFY
};
/**
* A substitute for POSIX timespec.
*/
struct TimeSpec {
uint64_t tvSec; // seconds
uint64_t tvNSec; // nanoseconds
};
/**
* IEEE 802 MAC address.
*/
typedef uint8_t[6] MacAddress;
struct ParameterValue {
string key;
string value;
};
/**
* Specifies a device in case when several devices of the same type
* can be connected (e.g. BT A2DP, USB).
*/
struct DeviceAddress {
AudioDevice device; // discriminator
union Address {
MacAddress mac; // used for BLUETOOTH_A2DP_*
uint8_t[4] ipv4; // used for IP
struct Alsa {
int32_t card;
int32_t device;
} alsa; // used for USB_*
} address;
string busAddress; // used for BUS
string rSubmixAddress; // used for REMOTE_SUBMIX
};
enum MmapBufferFlag : uint32_t {
NONE = 0x0,
/**
* If the buffer can be securely shared to untrusted applications
* through the AAudio exclusive mode.
* Only set this flag if applications are restricted from accessing the
* memory surrounding the audio data buffer by a kernel mechanism.
* See Linux kernel's dma_buf.
*/
APPLICATION_SHAREABLE = 0x1,
};
/**
* Mmap buffer descriptor returned by IStream.createMmapBuffer().
* Used by streams opened in mmap mode.
*/
struct MmapBufferInfo {
/** Mmap memory buffer */
memory sharedMemory;
/** Total buffer size in frames */
uint32_t bufferSizeFrames;
/** Transfer size granularity in frames */
uint32_t burstSizeFrames;
/** Attributes describing the buffer. */
bitfield<MmapBufferFlag> flags;
};
/**
* Mmap buffer read/write position returned by IStream.getMmapPosition().
* Used by streams opened in mmap mode.
*/
struct MmapPosition {
int64_t timeNanoseconds; // time stamp in ns, CLOCK_MONOTONIC
int32_t positionFrames; // increasing 32 bit frame count reset when IStream.stop() is called
};
/**
* The message queue flags used to synchronize reads and writes from
* message queues used by StreamIn and StreamOut.
*/
enum MessageQueueFlagBits : uint32_t {
NOT_EMPTY = 1 << 0,
NOT_FULL = 1 << 1
};
/** Metadata of a playback track for a StreamOut. */
struct PlaybackTrackMetadata {
AudioUsage usage;
AudioContentType contentType;
/**
* Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation,
* 2 means double amplification...
* Must not be negative.
*/
float gain;
};
/** Metadatas of the source of a StreamOut. */
struct SourceMetadata {
vec<PlaybackTrackMetadata> tracks;
};
/** Metadata of a record track for a StreamIn. */
struct RecordTrackMetadata {
AudioSource source;
/**
* Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation,
* 2 means double amplification...
* Must not be negative.
*/
float gain;
};
/** Metadatas of the source of a StreamIn. */
struct SinkMetadata {
vec<RecordTrackMetadata> tracks;
};
/*
* Microphone information
*
*/
/**
* A 3D point used to represent position or orientation of a microphone.
*
* Position: Coordinates of the microphone's capsule, in meters, from the
* bottom-left-back corner of the bounding box of android device in natural
* orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc).
* The orientation musth match the reported by the api Display.getRotation().
*
* Orientation: Normalized vector to signal the main orientation of the
* microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1
*/
struct AudioMicrophoneCoordinate {
float x;
float y;
float z;
};
/**
* Enum to identify the type of channel mapping for active microphones.
* Used channels further identify if the microphone has any significative
* process (e.g. High Pass Filtering, dynamic compression)
* Simple processing as constant gain adjustment must be DIRECT.
*/
enum AudioMicrophoneChannelMapping : uint32_t {
UNUSED = 0, /* Channel not used */
DIRECT = 1, /* Channel used and signal not processed */
PROCESSED = 2, /* Channel used and signal has some process */
};
/**
* Enum to identify locations of microphones in regards to the body of the
* android device.
*/
enum AudioMicrophoneLocation : uint32_t {
UNKNOWN = 0,
MAINBODY = 1,
MAINBODY_MOVABLE = 2,
PERIPHERAL = 3,
};
/**
* Identifier to help group related microphones together
* e.g. microphone arrays should belong to the same group
*/
typedef int32_t AudioMicrophoneGroup;
/**
* Enum with standard polar patterns of microphones
*/
enum AudioMicrophoneDirectionality : uint32_t {
UNKNOWN = 0,
OMNI = 1,
BI_DIRECTIONAL = 2,
CARDIOID = 3,
HYPER_CARDIOID = 4,
SUPER_CARDIOID = 5,
};
/**
* A (frequency, level) pair. Used to represent frequency response.
*/
struct AudioFrequencyResponsePoint {
/** In Hz */
float frequency;
/** In dB */
float level;
};
/**
* Structure used by the HAL to describe microphone's characteristics
* Used by StreamIn and Device
*/
struct MicrophoneInfo {
/** Unique alphanumeric id for microphone. Guaranteed to be the same
* even after rebooting.
*/
string deviceId;
/**
* Device specific information
*/
DeviceAddress deviceAddress;
/** Each element of the vector must describe the channel with the same
* index.
*/
vec<AudioMicrophoneChannelMapping> channelMapping;
/** Location of the microphone in regard to the body of the device */
AudioMicrophoneLocation location;
/** Identifier to help group related microphones together
* e.g. microphone arrays should belong to the same group
*/
AudioMicrophoneGroup group;
/** Index of this microphone within the group.
* (group, index) must be unique within the same device.
*/
uint32_t indexInTheGroup;
/** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */
float sensitivity;
/** Level in dB of the max SPL supported at 1000 Hz */
float maxSpl;
/** Level in dB of the min SPL supported at 1000 Hz */
float minSpl;
/** Standard polar pattern of the microphone */
AudioMicrophoneDirectionality directionality;
/** Vector with ordered frequency responses (from low to high frequencies)
* with the frequency response of the microphone.
* Levels are in dB, relative to level at 1000 Hz
*/
vec<AudioFrequencyResponsePoint> frequencyResponse;
/** Position of the microphone's capsule in meters, from the
* bottom-left-back corner of the bounding box of device.
*/
AudioMicrophoneCoordinate position;
/** Normalized point to signal the main orientation of the microphone's
* capsule. sqrt(x^2 + y^2 + z^2) = 1
*/
AudioMicrophoneCoordinate orientation;
};