/* * Copyright (C) 2023 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define LOG_TAG "StreamHalAidl" //#define LOG_NDEBUG 0 #include #include #include #include #include #include #include #include #include #include #include #include #include #include "DeviceHalAidl.h" #include "EffectHalAidl.h" #include "StreamHalAidl.h" using ::aidl::android::aidl_utils::statusTFromBinderStatus; using ::aidl::android::hardware::audio::common::kDumpFromAudioServerArgument; using ::aidl::android::hardware::audio::common::PlaybackTrackMetadata; using ::aidl::android::hardware::audio::common::RecordTrackMetadata; using ::aidl::android::hardware::audio::core::IStreamCommon; using ::aidl::android::hardware::audio::core::IStreamIn; using ::aidl::android::hardware::audio::core::IStreamOut; using ::aidl::android::hardware::audio::core::MmapBufferDescriptor; using ::aidl::android::hardware::audio::core::StreamDescriptor; using ::aidl::android::media::audio::common::MicrophoneDynamicInfo; using ::aidl::android::media::audio::IHalAdapterVendorExtension; namespace android { using HalCommand = StreamDescriptor::Command; namespace { template HalCommand makeHalCommand() { return HalCommand::make(::aidl::android::media::audio::common::Void{}); } template HalCommand makeHalCommand(T data) { return HalCommand::make(data); } } // namespace // static template std::shared_ptr StreamHalAidl::getStreamCommon(const std::shared_ptr& stream) { std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> streamCommon; if (stream != nullptr) { if (ndk::ScopedAStatus status = stream->getStreamCommon(&streamCommon); !status.isOk()) { ALOGE("%s: failed to retrieve IStreamCommon instance: %s", __func__, status.getDescription().c_str()); } } return streamCommon; } StreamHalAidl::StreamHalAidl( std::string_view className, bool isInput, const audio_config& config, int32_t nominalLatency, StreamContextAidl&& context, const std::shared_ptr& stream, const std::shared_ptr& vext) : ConversionHelperAidl(className), mIsInput(isInput), mConfig(configToBase(config)), mContext(std::move(context)), mStream(stream), mVendorExt(vext), mLastReplyLifeTimeNs( std::min(static_cast(100), 2 * mContext.getBufferDurationMs(mConfig.sample_rate)) * NANOS_PER_MILLISECOND) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); { std::lock_guard l(mLock); mLastReply.latencyMs = nominalLatency; } // Instrument audio signal power logging. // Note: This assumes channel mask, format, and sample rate do not change after creation. if (audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER; /* mStreamPowerLog.isUserDebugOrEngBuild() && */ StreamHalAidl::getAudioProperties(&config) == NO_ERROR) { mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format); } } StreamHalAidl::~StreamHalAidl() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); if (mStream != nullptr) { ndk::ScopedAStatus status = mStream->close(); ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str()); } } status_t StreamHalAidl::getBufferSize(size_t *size) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); if (size == nullptr) { return BAD_VALUE; } if (mContext.getFrameSizeBytes() == 0 || mContext.getBufferSizeFrames() == 0 || !mStream) { return NO_INIT; } *size = mContext.getBufferSizeBytes(); return OK; } status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); if (configBase == nullptr) { return BAD_VALUE; } if (!mStream) return NO_INIT; *configBase = mConfig; return OK; } status_t StreamHalAidl::setParameters(const String8& kvPairs) { TIME_CHECK(); if (!mStream) return NO_INIT; AudioParameter parameters(kvPairs); ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str()); (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyStreamHwAvSync), [&](int hwAvSyncId) { return statusTFromBinderStatus(mStream->updateHwAvSyncId(hwAvSyncId)); })); return parseAndSetVendorParameters(mVendorExt, mStream, parameters); } status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) { TIME_CHECK(); if (!mStream) return NO_INIT; if (values == nullptr) { return BAD_VALUE; } AudioParameter parameterKeys(keys), result; *values = result.toString(); return parseAndGetVendorParameters(mVendorExt, mStream, parameterKeys, values); } status_t StreamHalAidl::getFrameSize(size_t *size) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); if (size == nullptr) { return BAD_VALUE; } if (mContext.getFrameSizeBytes() == 0 || !mStream) { return NO_INIT; } *size = mContext.getFrameSizeBytes(); return OK; } status_t StreamHalAidl::addEffect(sp effect) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (effect == nullptr) { return BAD_VALUE; } auto aidlEffect = sp::cast(effect); return statusTFromBinderStatus(mStream->addEffect(aidlEffect->getIEffect())); } status_t StreamHalAidl::removeEffect(sp effect) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (effect == nullptr) { return BAD_VALUE; } auto aidlEffect = sp::cast(effect); return statusTFromBinderStatus(mStream->removeEffect(aidlEffect->getIEffect())); } status_t StreamHalAidl::standby() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; const auto state = getState(); StreamDescriptor::Reply reply; switch (state) { case StreamDescriptor::State::ACTIVE: case StreamDescriptor::State::DRAINING: case StreamDescriptor::State::TRANSFERRING: RETURN_STATUS_IF_ERROR(pause(&reply)); if (reply.state != StreamDescriptor::State::PAUSED && reply.state != StreamDescriptor::State::DRAIN_PAUSED && reply.state != StreamDescriptor::State::TRANSFER_PAUSED) { ALOGE("%s: unexpected stream state: %s (expected PAUSED)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } FALLTHROUGH_INTENDED; case StreamDescriptor::State::PAUSED: case StreamDescriptor::State::DRAIN_PAUSED: case StreamDescriptor::State::TRANSFER_PAUSED: if (mIsInput) return flush(); RETURN_STATUS_IF_ERROR(flush(&reply)); if (reply.state != StreamDescriptor::State::IDLE) { ALOGE("%s: unexpected stream state: %s (expected IDLE)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } FALLTHROUGH_INTENDED; case StreamDescriptor::State::IDLE: RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand(), &reply, true /*safeFromNonWorkerThread*/)); if (reply.state != StreamDescriptor::State::STANDBY) { ALOGE("%s: unexpected stream state: %s (expected STANDBY)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } FALLTHROUGH_INTENDED; case StreamDescriptor::State::STANDBY: return OK; default: ALOGE("%s: not supported from %s stream state %s", __func__, mIsInput ? "input" : "output", toString(state).c_str()); return INVALID_OPERATION; } } status_t StreamHalAidl::dump(int fd, const Vector& args) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; Vector newArgs = args; newArgs.push(String16(kDumpFromAudioServerArgument)); status_t status = mStream->dump(fd, Args(newArgs).args(), newArgs.size()); mStreamPowerLog.dump(fd); return status; } status_t StreamHalAidl::start() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (!mContext.isMmapped()) { return BAD_VALUE; } StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); switch (reply.state) { case StreamDescriptor::State::STANDBY: RETURN_STATUS_IF_ERROR( sendCommand(makeHalCommand(), &reply, true)); if (reply.state != StreamDescriptor::State::IDLE) { ALOGE("%s: unexpected stream state: %s (expected IDLE)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } FALLTHROUGH_INTENDED; case StreamDescriptor::State::IDLE: RETURN_STATUS_IF_ERROR( sendCommand(makeHalCommand(0), &reply, true)); if (reply.state != StreamDescriptor::State::ACTIVE) { ALOGE("%s: unexpected stream state: %s (expected ACTIVE)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } FALLTHROUGH_INTENDED; case StreamDescriptor::State::ACTIVE: return OK; case StreamDescriptor::State::DRAINING: RETURN_STATUS_IF_ERROR( sendCommand(makeHalCommand(), &reply, true)); if (reply.state != StreamDescriptor::State::ACTIVE) { ALOGE("%s: unexpected stream state: %s (expected ACTIVE)", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } return OK; default: ALOGE("%s: not supported from %s stream state %s", __func__, mIsInput ? "input" : "output", toString(reply.state).c_str()); return INVALID_OPERATION; } } status_t StreamHalAidl::stop() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (!mContext.isMmapped()) { return BAD_VALUE; } StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) { return drain(false /*earlyNotify*/, nullptr); } else if (state == StreamDescriptor::State::DRAINING) { RETURN_STATUS_IF_ERROR(pause()); return flush(); } else if (state == StreamDescriptor::State::PAUSED) { return flush(); } else if (state != StreamDescriptor::State::IDLE && state != StreamDescriptor::State::STANDBY) { ALOGE("%s: not supported from %s stream state %s", __func__, mIsInput ? "input" : "output", toString(state).c_str()); return INVALID_OPERATION; } return OK; } status_t StreamHalAidl::getLatency(uint32_t *latency) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); if (!mStream) return NO_INIT; StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); *latency = std::clamp(std::max(0, reply.latencyMs), 1, 3000); ALOGW_IF(reply.latencyMs != static_cast(*latency), "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs, *latency); return OK; } status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp, StatePositions* statePositions) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); if (!mStream) return NO_INIT; StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions)); *frames = std::max(0, reply.observable.frames); *timestamp = std::max(0, reply.observable.timeNs); return OK; } status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); if (!mStream) return NO_INIT; StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); *frames = std::max(0, reply.hardware.frames); *timestamp = std::max(0, reply.hardware.timeNs); return OK; } status_t StreamHalAidl::getXruns(int32_t *frames) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); if (!mStream) return NO_INIT; StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply)); *frames = std::max(0, reply.xrunFrames); return OK; } status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) { ALOGV("%p %s::%s", this, getClassName().c_str(), __func__); // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized. if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT; mWorkerTid.store(gettid(), std::memory_order_release); // Switch the stream into an active state if needed. // Note: in future we may add support for priming the audio pipeline // with data prior to enabling output (thus we can issue a "burst" command in the "standby" // stream state), however this scenario wasn't supported by the HIDL HAL. if (getState() == StreamDescriptor::State::STANDBY) { StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand(), &reply)); if (reply.state != StreamDescriptor::State::IDLE) { ALOGE("%s: failed to get the stream out of standby, actual state: %s", __func__, toString(reply.state).c_str()); return INVALID_OPERATION; } } StreamContextAidl::DataMQ::Error fmqError = StreamContextAidl::DataMQ::Error::NONE; std::string fmqErrorMsg; if (!mIsInput) { bytes = std::min(bytes, mContext.getDataMQ()->availableToWrite(&fmqError, &fmqErrorMsg)); } StreamDescriptor::Command burst = StreamDescriptor::Command::make(bytes); if (!mIsInput) { if (!mContext.getDataMQ()->write(static_cast(buffer), bytes)) { ALOGE("%s: failed to write %zu bytes to data MQ", __func__, bytes); return NOT_ENOUGH_DATA; } } StreamDescriptor::Reply reply; RETURN_STATUS_IF_ERROR(sendCommand(burst, &reply)); *transferred = reply.fmqByteCount; if (mIsInput) { LOG_ALWAYS_FATAL_IF(*transferred > bytes, "%s: HAL module read %zu bytes, which exceeds requested count %zu", __func__, *transferred, bytes); if (auto toRead = mContext.getDataMQ()->availableToRead(&fmqError, &fmqErrorMsg); toRead != 0 && !mContext.getDataMQ()->read(static_cast(buffer), toRead)) { ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead); return NOT_ENOUGH_DATA; } } LOG_ALWAYS_FATAL_IF(fmqError != StreamContextAidl::DataMQ::Error::NONE, "%s", fmqErrorMsg.c_str()); mStreamPowerLog.log(buffer, *transferred); return OK; } status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; return sendCommand(makeHalCommand(), reply, true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first. } status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (mIsInput) { return sendCommand(makeHalCommand(0), reply); } else { if (const auto state = getState(); state == StreamDescriptor::State::IDLE) { // Handle pause-flush-resume sequence. 'flush' from PAUSED goes to // IDLE. We move here from IDLE to ACTIVE (same as 'start' from PAUSED). StreamDescriptor::Reply localReply{}; StreamDescriptor::Reply* innerReply = reply ?: &localReply; RETURN_STATUS_IF_ERROR( sendCommand(makeHalCommand(0), innerReply)); if (innerReply->state != StreamDescriptor::State::ACTIVE) { ALOGE("%s: unexpected stream state: %s (expected ACTIVE)", __func__, toString(innerReply->state).c_str()); return INVALID_OPERATION; } return OK; } else if (state == StreamDescriptor::State::PAUSED || state == StreamDescriptor::State::TRANSFER_PAUSED || state == StreamDescriptor::State::DRAIN_PAUSED) { return sendCommand(makeHalCommand(), reply); } else { ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)", __func__, toString(state).c_str()); return INVALID_OPERATION; } } } status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; return sendCommand(makeHalCommand( mIsInput ? StreamDescriptor::DrainMode::DRAIN_UNSPECIFIED : earlyNotify ? StreamDescriptor::DrainMode::DRAIN_EARLY_NOTIFY : StreamDescriptor::DrainMode::DRAIN_ALL), reply, true /*safeFromNonWorkerThread*/); } status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; return sendCommand(makeHalCommand(), reply, true /*safeFromNonWorkerThread*/); // The workers stops its I/O activity first. } status_t StreamHalAidl::exit() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; return statusTFromBinderStatus(mStream->prepareToClose()); } void StreamHalAidl::onAsyncTransferReady() { if (auto state = getState(); state == StreamDescriptor::State::TRANSFERRING) { // Retrieve the current state together with position counters unconditionally // to ensure that the state on our side gets updated. sendCommand(makeHalCommand(), nullptr, true /*safeFromNonWorkerThread */); } else { ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str()); } } void StreamHalAidl::onAsyncDrainReady() { if (auto state = getState(); state == StreamDescriptor::State::DRAINING) { // Retrieve the current state together with position counters unconditionally // to ensure that the state on our side gets updated. sendCommand(makeHalCommand(), nullptr, true /*safeFromNonWorkerThread */); // For compatibility with HIDL behavior, apply a "soft" position reset // after receiving the "drain ready" callback. std::lock_guard l(mLock); mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames; } else { ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str()); } } void StreamHalAidl::onAsyncError() { std::lock_guard l(mLock); ALOGW("%s: received in the state %s", __func__, toString(mLastReply.state).c_str()); mLastReply.state = StreamDescriptor::State::ERROR; } status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused, struct audio_mmap_buffer_info *info) { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (!mContext.isMmapped()) { return BAD_VALUE; } const MmapBufferDescriptor& bufferDescriptor = mContext.getMmapBufferDescriptor(); info->shared_memory_fd = bufferDescriptor.sharedMemory.fd.get(); info->buffer_size_frames = mContext.getBufferSizeFrames(); info->burst_size_frames = bufferDescriptor.burstSizeFrames; info->flags = static_cast(bufferDescriptor.flags); return OK; } status_t StreamHalAidl::getMmapPosition(struct audio_mmap_position *position) { TIME_CHECK(); if (!mStream) return NO_INIT; if (!mContext.isMmapped()) { return BAD_VALUE; } int64_t aidlPosition = 0, aidlTimestamp = 0; RETURN_STATUS_IF_ERROR(getHardwarePosition(&aidlPosition, &aidlTimestamp)); position->time_nanoseconds = aidlTimestamp; position->position_frames = static_cast(aidlPosition); return OK; } status_t StreamHalAidl::setHalThreadPriority(int priority __unused) { // Obsolete, must be done by the HAL module. return OK; } status_t StreamHalAidl::legacyCreateAudioPatch(const struct audio_port_config& port __unused, std::optional source __unused, audio_devices_t type __unused) { // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'. return INVALID_OPERATION; } status_t StreamHalAidl::legacyReleaseAudioPatch() { // Obsolete since 'DeviceHalAidl.supportsAudioPatches' always returns 'true'. return INVALID_OPERATION; } status_t StreamHalAidl::sendCommand( const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command, ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply, bool safeFromNonWorkerThread, StatePositions* statePositions) { // TIME_CHECK(); // TODO(b/243839867) reenable only when optimized. if (!safeFromNonWorkerThread) { const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire); LOG_ALWAYS_FATAL_IF(workerTid != gettid(), "%s %s: must be invoked from the worker thread (%d)", __func__, command.toString().c_str(), workerTid); } StreamDescriptor::Reply localReply{}; { std::lock_guard l(mCommandReplyLock); if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) { ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str()); return NOT_ENOUGH_DATA; } if (reply == nullptr) { reply = &localReply; } if (!mContext.getReplyMQ()->readBlocking(reply, 1)) { ALOGE("%s: failed to read from reply MQ, command %s", __func__, command.toString().c_str()); return NOT_ENOUGH_DATA; } { std::lock_guard l(mLock); // Not every command replies with 'latencyMs' field filled out, substitute the last // returned value in that case. if (reply->latencyMs <= 0) { reply->latencyMs = mLastReply.latencyMs; } mLastReply = *reply; mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs; if (!mIsInput && reply->status == STATUS_OK) { if (command.getTag() == StreamDescriptor::Command::standby && reply->state == StreamDescriptor::State::STANDBY) { mStatePositions.framesAtStandby = reply->observable.frames; } else if (command.getTag() == StreamDescriptor::Command::flush && reply->state == StreamDescriptor::State::IDLE) { mStatePositions.framesAtFlushOrDrain = reply->observable.frames; } else if (!mContext.isAsynchronous() && command.getTag() == StreamDescriptor::Command::drain && (reply->state == StreamDescriptor::State::IDLE || reply->state == StreamDescriptor::State::DRAINING)) { mStatePositions.framesAtFlushOrDrain = reply->observable.frames; } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady' } if (statePositions != nullptr) { *statePositions = mStatePositions; } } } switch (reply->status) { case STATUS_OK: return OK; case STATUS_BAD_VALUE: return BAD_VALUE; case STATUS_INVALID_OPERATION: return INVALID_OPERATION; case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA; default: ALOGE("%s: unexpected status %d returned for command %s", __func__, reply->status, command.toString().c_str()); return INVALID_OPERATION; } } status_t StreamHalAidl::updateCountersIfNeeded( ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply, StatePositions* statePositions) { bool doUpdate = false; { std::lock_guard l(mLock); doUpdate = uptimeNanos() > mLastReplyExpirationNs; } if (doUpdate) { // Since updates are paced, it is OK to perform them from any thread, they should // not interfere with I/O operations of the worker. return sendCommand(makeHalCommand(), reply, true /*safeFromNonWorkerThread */, statePositions); } else if (reply != nullptr) { // provide cached reply std::lock_guard l(mLock); *reply = mLastReply; if (statePositions != nullptr) { *statePositions = mStatePositions; } } return OK; } // static ConversionResult<::aidl::android::hardware::audio::common::SourceMetadata> StreamOutHalAidl::legacy2aidl_SourceMetadata(const StreamOutHalInterface::SourceMetadata& legacy) { ::aidl::android::hardware::audio::common::SourceMetadata aidl; aidl.tracks = VALUE_OR_RETURN( ::aidl::android::convertContainer>( legacy.tracks, ::aidl::android::legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata)); return aidl; } StreamOutHalAidl::StreamOutHalAidl( const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency, const std::shared_ptr& stream, const std::shared_ptr& vext, const sp& callbackBroker) : StreamHalAidl("StreamOutHalAidl", false /*isInput*/, config, nominalLatency, std::move(context), getStreamCommon(stream), vext), mStream(stream), mCallbackBroker(callbackBroker) { // Initialize the offload metadata mOffloadMetadata.sampleRate = static_cast(config.sample_rate); mOffloadMetadata.channelMask = VALUE_OR_FATAL( ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout( config.channel_mask, false)); mOffloadMetadata.averageBitRatePerSecond = static_cast(config.offload_info.bit_rate); } StreamOutHalAidl::~StreamOutHalAidl() { if (auto broker = mCallbackBroker.promote(); broker != nullptr) { broker->clearCallbacks(static_cast(this)); } } status_t StreamOutHalAidl::setParameters(const String8& kvPairs) { if (!mStream) return NO_INIT; AudioParameter parameters(kvPairs); ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str()); if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) { ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status); } return StreamHalAidl::setParameters(parameters.toString()); } status_t StreamOutHalAidl::getLatency(uint32_t *latency) { return StreamHalAidl::getLatency(latency); } status_t StreamOutHalAidl::setVolume(float left, float right) { TIME_CHECK(); if (!mStream) return NO_INIT; size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask); if (channelCount == 0) channelCount = 2; std::vector volumes(channelCount); if (channelCount == 1) { volumes[0] = (left + right) / 2; } else { volumes[0] = left; volumes[1] = right; for (size_t i = 2; i < channelCount; ++i) { volumes[i] = (left + right) / 2; } } return statusTFromBinderStatus(mStream->setHwVolume(volumes)); } status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) { TIME_CHECK(); if (!mStream) return NO_INIT; return statusTFromBinderStatus(mStream->selectPresentation(presentationId, programId)); } status_t StreamOutHalAidl::write(const void *buffer, size_t bytes, size_t *written) { if (buffer == nullptr || written == nullptr) { return BAD_VALUE; } // For the output scenario, 'transfer' does not modify the buffer. return transfer(const_cast(buffer), bytes, written); } status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) { if (dspFrames == nullptr) { return BAD_VALUE; } int64_t aidlFrames = 0, aidlTimestamp = 0; StatePositions statePositions{}; RETURN_STATUS_IF_ERROR( getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions)); // Number of audio frames since the stream has exited standby. // See the table at the start of 'StreamHalInterface' on when it needs to reset. int64_t mostRecentResetPoint; if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) { mostRecentResetPoint = statePositions.framesAtStandby; } else { mostRecentResetPoint = std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain); } *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint; return OK; } status_t StreamOutHalAidl::setCallback(wp callback) { ALOGD("%p %s", this, __func__); TIME_CHECK(); if (!mStream) return NO_INIT; if (!mContext.isAsynchronous()) { ALOGE("%s: the callback is intended for asynchronous streams only", __func__); return INVALID_OPERATION; } mClientCallback = callback; return OK; } status_t StreamOutHalAidl::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) { if (supportsPause == nullptr || supportsResume == nullptr) { return BAD_VALUE; } TIME_CHECK(); if (!mStream) return NO_INIT; *supportsPause = *supportsResume = true; return OK; } status_t StreamOutHalAidl::pause() { return StreamHalAidl::pause(); } status_t StreamOutHalAidl::resume() { return StreamHalAidl::resume(); } status_t StreamOutHalAidl::supportsDrain(bool *supportsDrain) { if (supportsDrain == nullptr) { return BAD_VALUE; } TIME_CHECK(); if (!mStream) return NO_INIT; *supportsDrain = true; return OK; } status_t StreamOutHalAidl::drain(bool earlyNotify) { return StreamHalAidl::drain(earlyNotify); } status_t StreamOutHalAidl::flush() { return StreamHalAidl::flush(); } status_t StreamOutHalAidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) { if (frames == nullptr || timestamp == nullptr) { return BAD_VALUE; } int64_t aidlFrames = 0, aidlTimestamp = 0; StatePositions statePositions{}; RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions)); // See the table at the start of 'StreamHalInterface'. if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) { *frames = aidlFrames; } else { const int64_t mostRecentResetPoint = std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain); *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint; } timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND; timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND; return OK; } status_t StreamOutHalAidl::presentationComplete() { ALOGD("%p %s::%s", this, getClassName().c_str(), __func__); return OK; } status_t StreamOutHalAidl::updateSourceMetadata( const StreamOutHalInterface::SourceMetadata& sourceMetadata) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::hardware::audio::common::SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(legacy2aidl_SourceMetadata(sourceMetadata)); return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata)); } status_t StreamOutHalAidl::getDualMonoMode(audio_dual_mono_mode_t* mode) { TIME_CHECK(); if (!mStream) return NO_INIT; if (mode == nullptr) { return BAD_VALUE; } ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode; RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getDualMonoMode(&aidlMode))); *mode = VALUE_OR_RETURN_STATUS( ::aidl::android::aidl2legacy_AudioDualMonoMode_audio_dual_mono_mode_t(aidlMode)); return OK; } status_t StreamOutHalAidl::setDualMonoMode(audio_dual_mono_mode_t mode) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::media::audio::common::AudioDualMonoMode aidlMode = VALUE_OR_RETURN_STATUS( ::aidl::android::legacy2aidl_audio_dual_mono_mode_t_AudioDualMonoMode(mode)); return statusTFromBinderStatus(mStream->setDualMonoMode(aidlMode)); } status_t StreamOutHalAidl::getAudioDescriptionMixLevel(float* leveldB) { TIME_CHECK(); if (!mStream) return NO_INIT; if (leveldB == nullptr) { return BAD_VALUE; } return statusTFromBinderStatus(mStream->getAudioDescriptionMixLevel(leveldB)); } status_t StreamOutHalAidl::setAudioDescriptionMixLevel(float leveldB) { TIME_CHECK(); if (!mStream) return NO_INIT; return statusTFromBinderStatus(mStream->setAudioDescriptionMixLevel(leveldB)); } status_t StreamOutHalAidl::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) { TIME_CHECK(); if (!mStream) return NO_INIT; if (playbackRate == nullptr) { return BAD_VALUE; } ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate; RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getPlaybackRateParameters(&aidlRate))); *playbackRate = VALUE_OR_RETURN_STATUS( ::aidl::android::aidl2legacy_AudioPlaybackRate_audio_playback_rate_t(aidlRate)); return OK; } status_t StreamOutHalAidl::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::media::audio::common::AudioPlaybackRate aidlRate = VALUE_OR_RETURN_STATUS( ::aidl::android::legacy2aidl_audio_playback_rate_t_AudioPlaybackRate(playbackRate)); return statusTFromBinderStatus(mStream->setPlaybackRateParameters(aidlRate)); } status_t StreamOutHalAidl::setEventCallback( const sp& callback) { TIME_CHECK(); if (!mStream) return NO_INIT; if (auto broker = mCallbackBroker.promote(); broker != nullptr) { broker->setStreamOutEventCallback(static_cast(this), callback); } return OK; } status_t StreamOutHalAidl::setLatencyMode(audio_latency_mode_t mode) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::media::audio::common::AudioLatencyMode aidlMode = VALUE_OR_RETURN_STATUS( ::aidl::android::legacy2aidl_audio_latency_mode_t_AudioLatencyMode(mode)); return statusTFromBinderStatus(mStream->setLatencyMode(aidlMode)); }; status_t StreamOutHalAidl::getRecommendedLatencyModes(std::vector *modes) { TIME_CHECK(); if (!mStream) return NO_INIT; if (modes == nullptr) { return BAD_VALUE; } std::vector<::aidl::android::media::audio::common::AudioLatencyMode> aidlModes; RETURN_STATUS_IF_ERROR( statusTFromBinderStatus(mStream->getRecommendedLatencyModes(&aidlModes))); *modes = VALUE_OR_RETURN_STATUS( ::aidl::android::convertContainer>( aidlModes, ::aidl::android::aidl2legacy_AudioLatencyMode_audio_latency_mode_t)); return OK; }; status_t StreamOutHalAidl::setLatencyModeCallback( const sp& callback) { TIME_CHECK(); if (!mStream) return NO_INIT; if (auto broker = mCallbackBroker.promote(); broker != nullptr) { broker->setStreamOutLatencyModeCallback( static_cast(this), callback); } return OK; }; status_t StreamOutHalAidl::exit() { return StreamHalAidl::exit(); } void StreamOutHalAidl::onWriteReady() { onAsyncTransferReady(); if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) { clientCb->onWriteReady(); } } void StreamOutHalAidl::onDrainReady() { onAsyncDrainReady(); if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) { clientCb->onDrainReady(); } } void StreamOutHalAidl::onError(bool isHardError) { onAsyncError(); if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) { clientCb->onError(isHardError); } } status_t StreamOutHalAidl::filterAndUpdateOffloadMetadata(AudioParameter ¶meters) { TIME_CHECK(); bool updateMetadata = false; if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyOffloadCodecAverageBitRate), [&](int value) { return value >= 0 ? mOffloadMetadata.averageBitRatePerSecond = value, OK : BAD_VALUE; }))) { updateMetadata = true; } if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyOffloadCodecSampleRate), [&](int value) { return value > 0 ? mOffloadMetadata.sampleRate = value, OK : BAD_VALUE; }))) { updateMetadata = true; } if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyOffloadCodecChannels), [&](int value) -> status_t { if (value > 0) { audio_channel_mask_t channel_mask = audio_channel_out_mask_from_count( static_cast(value)); if (channel_mask == AUDIO_CHANNEL_INVALID) return BAD_VALUE; mOffloadMetadata.channelMask = VALUE_OR_RETURN_STATUS( ::aidl::android::legacy2aidl_audio_channel_mask_t_AudioChannelLayout( channel_mask, false /*isInput*/)); return OK; } return BAD_VALUE; }))) { updateMetadata = true; } if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyOffloadCodecDelaySamples), [&](int value) { // The legacy keys are misnamed, the value is in frames. return value >= 0 ? mOffloadMetadata.delayFrames = value, OK : BAD_VALUE; }))) { updateMetadata = true; } if (VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter( parameters, String8(AudioParameter::keyOffloadCodecPaddingSamples), [&](int value) { // The legacy keys are misnamed, the value is in frames. return value >= 0 ? mOffloadMetadata.paddingFrames = value, OK : BAD_VALUE; }))) { updateMetadata = true; } if (updateMetadata) { ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str()); if (status_t status = statusTFromBinderStatus( mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) { ALOGE("%s: updateOffloadMetadata failed %d", __func__, status); return status; } } return OK; } // static ConversionResult<::aidl::android::hardware::audio::common::SinkMetadata> StreamInHalAidl::legacy2aidl_SinkMetadata(const StreamInHalInterface::SinkMetadata& legacy) { ::aidl::android::hardware::audio::common::SinkMetadata aidl; aidl.tracks = VALUE_OR_RETURN( ::aidl::android::convertContainer>( legacy.tracks, ::aidl::android::legacy2aidl_record_track_metadata_v7_RecordTrackMetadata)); return aidl; } StreamInHalAidl::StreamInHalAidl( const audio_config& config, StreamContextAidl&& context, int32_t nominalLatency, const std::shared_ptr& stream, const std::shared_ptr& vext, const sp& micInfoProvider) : StreamHalAidl("StreamInHalAidl", true /*isInput*/, config, nominalLatency, std::move(context), getStreamCommon(stream), vext), mStream(stream), mMicInfoProvider(micInfoProvider) {} status_t StreamInHalAidl::setGain(float gain) { TIME_CHECK(); if (!mStream) return NO_INIT; const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask); std::vector gains(channelCount != 0 ? channelCount : 1, gain); return statusTFromBinderStatus(mStream->setHwGain(gains)); } status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) { if (buffer == nullptr || read == nullptr) { return BAD_VALUE; } return transfer(buffer, bytes, read); } status_t StreamInHalAidl::getInputFramesLost(uint32_t *framesLost) { if (framesLost == nullptr) { return BAD_VALUE; } int32_t aidlXruns = 0; RETURN_STATUS_IF_ERROR(getXruns(&aidlXruns)); *framesLost = std::max(0, aidlXruns); return OK; } status_t StreamInHalAidl::getCapturePosition(int64_t *frames, int64_t *time) { if (frames == nullptr || time == nullptr) { return BAD_VALUE; } return getObservablePosition(frames, time); } status_t StreamInHalAidl::getActiveMicrophones(std::vector *microphones) { if (!microphones) { return BAD_VALUE; } TIME_CHECK(); if (!mStream) return NO_INIT; sp micInfoProvider = mMicInfoProvider.promote(); if (!micInfoProvider) return NO_INIT; auto staticInfo = micInfoProvider->getMicrophoneInfo(); if (!staticInfo) return INVALID_OPERATION; std::vector dynamicInfo; RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mStream->getActiveMicrophones(&dynamicInfo))); std::vector result; result.reserve(dynamicInfo.size()); for (const auto& d : dynamicInfo) { const auto staticInfoIt = std::find_if(staticInfo->begin(), staticInfo->end(), [&](const auto& s) { return s.id == d.id; }); if (staticInfoIt != staticInfo->end()) { // Convert into the c++ backend type from the ndk backend type via the legacy structure. audio_microphone_characteristic_t legacy = VALUE_OR_RETURN_STATUS( ::aidl::android::aidl2legacy_MicrophoneInfos_audio_microphone_characteristic_t( *staticInfoIt, d)); media::MicrophoneInfoFw info = VALUE_OR_RETURN_STATUS( ::android::legacy2aidl_audio_microphone_characteristic_t_MicrophoneInfoFw( legacy)); // Note: info.portId is not filled because it's a bit of framework info. result.push_back(std::move(info)); } else { ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str()); } } *microphones = std::move(result); return OK; } status_t StreamInHalAidl::updateSinkMetadata( const StreamInHalInterface::SinkMetadata& sinkMetadata) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::hardware::audio::common::SinkMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(legacy2aidl_SinkMetadata(sinkMetadata)); return statusTFromBinderStatus(mStream->updateMetadata(aidlMetadata)); } status_t StreamInHalAidl::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) { TIME_CHECK(); if (!mStream) return NO_INIT; ::aidl::android::hardware::audio::core::IStreamIn::MicrophoneDirection aidlDirection = VALUE_OR_RETURN_STATUS( ::aidl::android::legacy2aidl_audio_microphone_direction_t_MicrophoneDirection( direction)); return statusTFromBinderStatus(mStream->setMicrophoneDirection(aidlDirection)); } status_t StreamInHalAidl::setPreferredMicrophoneFieldDimension(float zoom) { TIME_CHECK(); if (!mStream) return NO_INIT; return statusTFromBinderStatus(mStream->setMicrophoneFieldDimension(zoom)); } } // namespace android