/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.audio@5.0; import android.hardware.audio.common@5.0; enum Result : int32_t { OK, NOT_INITIALIZED, INVALID_ARGUMENTS, INVALID_STATE, /** * Methods marked as "Optional method" must return this result value * if the operation is not supported by HAL. */ NOT_SUPPORTED }; @export(name="audio_drain_type_t", value_prefix="AUDIO_DRAIN_") enum AudioDrain : int32_t { /** drain() returns when all data has been played. */ ALL, /** * drain() returns a short time before all data from the current track has * been played to give time for gapless track switch. */ EARLY_NOTIFY }; /** * A substitute for POSIX timespec. */ struct TimeSpec { uint64_t tvSec; // seconds uint64_t tvNSec; // nanoseconds }; struct ParameterValue { string key; string value; }; enum MmapBufferFlag : uint32_t { NONE = 0x0, /** * If the buffer can be securely shared to untrusted applications * through the AAudio exclusive mode. * Only set this flag if applications are restricted from accessing the * memory surrounding the audio data buffer by a kernel mechanism. * See Linux kernel's dma_buf. */ APPLICATION_SHAREABLE = 0x1, }; /** * Mmap buffer descriptor returned by IStream.createMmapBuffer(). * Used by streams opened in mmap mode. */ struct MmapBufferInfo { /** Mmap memory buffer */ memory sharedMemory; /** Total buffer size in frames */ uint32_t bufferSizeFrames; /** Transfer size granularity in frames */ uint32_t burstSizeFrames; /** Attributes describing the buffer. */ bitfield flags; }; /** * Mmap buffer read/write position returned by IStream.getMmapPosition(). * Used by streams opened in mmap mode. */ struct MmapPosition { int64_t timeNanoseconds; // time stamp in ns, CLOCK_MONOTONIC int32_t positionFrames; // increasing 32 bit frame count reset when IStream.stop() is called }; /** * The message queue flags used to synchronize reads and writes from * message queues used by StreamIn and StreamOut. */ enum MessageQueueFlagBits : uint32_t { NOT_EMPTY = 1 << 0, NOT_FULL = 1 << 1 }; /* * Microphone information * */ /** * A 3D point used to represent position or orientation of a microphone. * * Position: Coordinates of the microphone's capsule, in meters, from the * bottom-left-back corner of the bounding box of android device in natural * orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc). * The orientation musth match the reported by the api Display.getRotation(). * * Orientation: Normalized vector to signal the main orientation of the * microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1 */ struct AudioMicrophoneCoordinate { float x; float y; float z; }; /** * Enum to identify the type of channel mapping for active microphones. * Used channels further identify if the microphone has any significative * process (e.g. High Pass Filtering, dynamic compression) * Simple processing as constant gain adjustment must be DIRECT. */ enum AudioMicrophoneChannelMapping : uint32_t { UNUSED = 0, /* Channel not used */ DIRECT = 1, /* Channel used and signal not processed */ PROCESSED = 2, /* Channel used and signal has some process */ }; /** * Enum to identify locations of microphones in regards to the body of the * android device. */ enum AudioMicrophoneLocation : uint32_t { UNKNOWN = 0, MAINBODY = 1, MAINBODY_MOVABLE = 2, PERIPHERAL = 3, }; /** * Identifier to help group related microphones together * e.g. microphone arrays should belong to the same group */ typedef int32_t AudioMicrophoneGroup; /** * Enum with standard polar patterns of microphones */ enum AudioMicrophoneDirectionality : uint32_t { UNKNOWN = 0, OMNI = 1, BI_DIRECTIONAL = 2, CARDIOID = 3, HYPER_CARDIOID = 4, SUPER_CARDIOID = 5, }; /** * A (frequency, level) pair. Used to represent frequency response. */ struct AudioFrequencyResponsePoint { /** In Hz */ float frequency; /** In dB */ float level; }; /** * Structure used by the HAL to describe microphone's characteristics * Used by StreamIn and Device */ struct MicrophoneInfo { /** Unique alphanumeric id for microphone. Guaranteed to be the same * even after rebooting. */ string deviceId; /** * Device specific information */ DeviceAddress deviceAddress; /** Each element of the vector must describe the channel with the same * index. */ vec channelMapping; /** Location of the microphone in regard to the body of the device */ AudioMicrophoneLocation location; /** Identifier to help group related microphones together * e.g. microphone arrays should belong to the same group */ AudioMicrophoneGroup group; /** Index of this microphone within the group. * (group, index) must be unique within the same device. */ uint32_t indexInTheGroup; /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */ float sensitivity; /** Level in dB of the max SPL supported at 1000 Hz */ float maxSpl; /** Level in dB of the min SPL supported at 1000 Hz */ float minSpl; /** Standard polar pattern of the microphone */ AudioMicrophoneDirectionality directionality; /** Vector with ordered frequency responses (from low to high frequencies) * with the frequency response of the microphone. * Levels are in dB, relative to level at 1000 Hz */ vec frequencyResponse; /** Position of the microphone's capsule in meters, from the * bottom-left-back corner of the bounding box of device. */ AudioMicrophoneCoordinate position; /** Normalized point to signal the main orientation of the microphone's * capsule. sqrt(x^2 + y^2 + z^2) = 1 */ AudioMicrophoneCoordinate orientation; }; /** * Constants used by the HAL to determine how to select microphones and process those inputs in * order to optimize for capture in the specified direction. * * MicrophoneDirection Constants are defined in MicrophoneDirection.java. */ @export(name="audio_microphone_direction_t", value_prefix="MIC_DIRECTION_") enum MicrophoneDirection : int32_t { /** * Don't do any directionality processing of the activated microphone(s). */ UNSPECIFIED = 0, /** * Optimize capture for audio coming from the screen-side of the device. */ FRONT = 1, /** * Optimize capture for audio coming from the side of the device opposite the screen. */ BACK = 2, /** * Optimize capture for audio coming from an off-device microphone. */ EXTERNAL = 3, };