1 /*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "StreamOutHAL"
18
19 #include "core/default/StreamOut.h"
20 #include "core/default/Util.h"
21
22 //#define LOG_NDEBUG 0
23 #define ATRACE_TAG ATRACE_TAG_AUDIO
24
25 #include <string.h>
26
27 #include <memory>
28
29 #include <HidlUtils.h>
30 #include <android/log.h>
31 #include <audio_utils/Metadata.h>
32 #include <hardware/audio.h>
33 #include <util/CoreUtils.h>
34 #include <utils/Trace.h>
35
36 namespace android {
37 namespace hardware {
38 namespace audio {
39 namespace CPP_VERSION {
40 namespace implementation {
41
42 using ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::implementation::HidlUtils;
43 using ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::CoreUtils;
44 namespace util {
45 using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION::implementation::util;
46 }
47
48 namespace {
49
50 class WriteThread : public Thread {
51 public:
52 // WriteThread's lifespan never exceeds StreamOut's lifespan.
WriteThread(std::atomic<bool> * stop,audio_stream_out_t * stream,StreamOut::CommandMQ * commandMQ,StreamOut::DataMQ * dataMQ,StreamOut::StatusMQ * statusMQ,EventFlag * efGroup)53 WriteThread(std::atomic<bool>* stop, audio_stream_out_t* stream,
54 StreamOut::CommandMQ* commandMQ, StreamOut::DataMQ* dataMQ,
55 StreamOut::StatusMQ* statusMQ, EventFlag* efGroup)
56 : Thread(false /*canCallJava*/),
57 mStop(stop),
58 mStream(stream),
59 mCommandMQ(commandMQ),
60 mDataMQ(dataMQ),
61 mStatusMQ(statusMQ),
62 mEfGroup(efGroup),
63 mBuffer(nullptr) {}
init()64 bool init() {
65 mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]);
66 return mBuffer != nullptr;
67 }
~WriteThread()68 virtual ~WriteThread() {}
69
70 private:
71 std::atomic<bool>* mStop;
72 audio_stream_out_t* mStream;
73 StreamOut::CommandMQ* mCommandMQ;
74 StreamOut::DataMQ* mDataMQ;
75 StreamOut::StatusMQ* mStatusMQ;
76 EventFlag* mEfGroup;
77 std::unique_ptr<uint8_t[]> mBuffer;
78 IStreamOut::WriteStatus mStatus;
79
80 bool threadLoop() override;
81
82 void doGetLatency();
83 void doGetPresentationPosition();
84 void doWrite();
85 };
86
doWrite()87 void WriteThread::doWrite() {
88 const size_t availToRead = mDataMQ->availableToRead();
89 mStatus.retval = Result::OK;
90 mStatus.reply.written = 0;
91 if (mDataMQ->read(&mBuffer[0], availToRead)) {
92 ssize_t writeResult = mStream->write(mStream, &mBuffer[0], availToRead);
93 if (writeResult >= 0) {
94 mStatus.reply.written = writeResult;
95 } else {
96 mStatus.retval = Stream::analyzeStatus("write", writeResult);
97 }
98 }
99 }
100
doGetPresentationPosition()101 void WriteThread::doGetPresentationPosition() {
102 mStatus.retval =
103 StreamOut::getPresentationPositionImpl(mStream, &mStatus.reply.presentationPosition.frames,
104 &mStatus.reply.presentationPosition.timeStamp);
105 }
106
doGetLatency()107 void WriteThread::doGetLatency() {
108 mStatus.retval = Result::OK;
109 mStatus.reply.latencyMs = mStream->get_latency(mStream);
110 }
111
threadLoop()112 bool WriteThread::threadLoop() {
113 // This implementation doesn't return control back to the Thread until it
114 // decides to stop,
115 // as the Thread uses mutexes, and this can lead to priority inversion.
116 while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) {
117 uint32_t efState = 0;
118 mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY), &efState);
119 if (!(efState & static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY))) {
120 continue; // Nothing to do.
121 }
122 if (!mCommandMQ->read(&mStatus.replyTo)) {
123 continue; // Nothing to do.
124 }
125 switch (mStatus.replyTo) {
126 case IStreamOut::WriteCommand::WRITE:
127 doWrite();
128 break;
129 case IStreamOut::WriteCommand::GET_PRESENTATION_POSITION:
130 doGetPresentationPosition();
131 break;
132 case IStreamOut::WriteCommand::GET_LATENCY:
133 doGetLatency();
134 break;
135 default:
136 ALOGE("Unknown write thread command code %d", mStatus.replyTo);
137 mStatus.retval = Result::NOT_SUPPORTED;
138 break;
139 }
140 if (!mStatusMQ->write(&mStatus)) {
141 ALOGE("status message queue write failed");
142 }
143 mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
144 }
145
146 return false;
147 }
148
149 } // namespace
150
StreamOut(const sp<Device> & device,audio_stream_out_t * stream)151 StreamOut::StreamOut(const sp<Device>& device, audio_stream_out_t* stream)
152 : mDevice(device),
153 mStream(stream),
154 mStreamCommon(new Stream(false /*isInput*/, &stream->common)),
155 mStreamMmap(new StreamMmap<audio_stream_out_t>(stream)),
156 mEfGroup(nullptr),
157 mStopWriteThread(false) {}
158
~StreamOut()159 StreamOut::~StreamOut() {
160 ATRACE_CALL();
161 (void)close();
162 if (mWriteThread.get()) {
163 ATRACE_NAME("mWriteThread->join");
164 status_t status = mWriteThread->join();
165 ALOGE_IF(status, "write thread exit error: %s", strerror(-status));
166 }
167 if (mEfGroup) {
168 status_t status = EventFlag::deleteEventFlag(&mEfGroup);
169 ALOGE_IF(status, "write MQ event flag deletion error: %s", strerror(-status));
170 }
171 mCallback = nullptr;
172 #if MAJOR_VERSION <= 5
173 mDevice->closeOutputStream(mStream);
174 // Closing the output stream in the HAL waits for the callback to finish,
175 // and joins the callback thread. Thus is it guaranteed that the callback
176 // thread will not be accessing our object anymore.
177 #endif
178 mStream = nullptr;
179 }
180
181 // Methods from ::android::hardware::audio::CPP_VERSION::IStream follow.
getFrameSize()182 Return<uint64_t> StreamOut::getFrameSize() {
183 return audio_stream_out_frame_size(mStream);
184 }
185
getFrameCount()186 Return<uint64_t> StreamOut::getFrameCount() {
187 return mStreamCommon->getFrameCount();
188 }
189
getBufferSize()190 Return<uint64_t> StreamOut::getBufferSize() {
191 return mStreamCommon->getBufferSize();
192 }
193
194 #if MAJOR_VERSION <= 6
getSampleRate()195 Return<uint32_t> StreamOut::getSampleRate() {
196 return mStreamCommon->getSampleRate();
197 }
198
199 #if MAJOR_VERSION == 2
getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb)200 Return<void> StreamOut::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) {
201 return mStreamCommon->getSupportedChannelMasks(_hidl_cb);
202 }
getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb)203 Return<void> StreamOut::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) {
204 return mStreamCommon->getSupportedSampleRates(_hidl_cb);
205 }
206 #endif
207
getSupportedChannelMasks(AudioFormat format,getSupportedChannelMasks_cb _hidl_cb)208 Return<void> StreamOut::getSupportedChannelMasks(AudioFormat format,
209 getSupportedChannelMasks_cb _hidl_cb) {
210 return mStreamCommon->getSupportedChannelMasks(format, _hidl_cb);
211 }
getSupportedSampleRates(AudioFormat format,getSupportedSampleRates_cb _hidl_cb)212 Return<void> StreamOut::getSupportedSampleRates(AudioFormat format,
213 getSupportedSampleRates_cb _hidl_cb) {
214 return mStreamCommon->getSupportedSampleRates(format, _hidl_cb);
215 }
216
setSampleRate(uint32_t sampleRateHz)217 Return<Result> StreamOut::setSampleRate(uint32_t sampleRateHz) {
218 return mStreamCommon->setSampleRate(sampleRateHz);
219 }
220
getChannelMask()221 Return<AudioChannelBitfield> StreamOut::getChannelMask() {
222 return mStreamCommon->getChannelMask();
223 }
224
setChannelMask(AudioChannelBitfield mask)225 Return<Result> StreamOut::setChannelMask(AudioChannelBitfield mask) {
226 return mStreamCommon->setChannelMask(mask);
227 }
228
getFormat()229 Return<AudioFormat> StreamOut::getFormat() {
230 return mStreamCommon->getFormat();
231 }
232
getSupportedFormats(getSupportedFormats_cb _hidl_cb)233 Return<void> StreamOut::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
234 return mStreamCommon->getSupportedFormats(_hidl_cb);
235 }
236
setFormat(AudioFormat format)237 Return<Result> StreamOut::setFormat(AudioFormat format) {
238 return mStreamCommon->setFormat(format);
239 }
240
241 #else
242
getSupportedProfiles(getSupportedProfiles_cb _hidl_cb)243 Return<void> StreamOut::getSupportedProfiles(getSupportedProfiles_cb _hidl_cb) {
244 return mStreamCommon->getSupportedProfiles(_hidl_cb);
245 }
246
setAudioProperties(const AudioConfigBaseOptional & config)247 Return<Result> StreamOut::setAudioProperties(const AudioConfigBaseOptional& config) {
248 return mStreamCommon->setAudioProperties(config);
249 }
250
251 #endif // MAJOR_VERSION <= 6
252
getAudioProperties(getAudioProperties_cb _hidl_cb)253 Return<void> StreamOut::getAudioProperties(getAudioProperties_cb _hidl_cb) {
254 return mStreamCommon->getAudioProperties(_hidl_cb);
255 }
256
addEffect(uint64_t effectId)257 Return<Result> StreamOut::addEffect(uint64_t effectId) {
258 return mStreamCommon->addEffect(effectId);
259 }
260
removeEffect(uint64_t effectId)261 Return<Result> StreamOut::removeEffect(uint64_t effectId) {
262 return mStreamCommon->removeEffect(effectId);
263 }
264
standby()265 Return<Result> StreamOut::standby() {
266 return mStreamCommon->standby();
267 }
268
setHwAvSync(uint32_t hwAvSync)269 Return<Result> StreamOut::setHwAvSync(uint32_t hwAvSync) {
270 return mStreamCommon->setHwAvSync(hwAvSync);
271 }
272
273 #if MAJOR_VERSION == 2
setConnectedState(const DeviceAddress & address,bool connected)274 Return<Result> StreamOut::setConnectedState(const DeviceAddress& address, bool connected) {
275 return mStreamCommon->setConnectedState(address, connected);
276 }
277
getDevice()278 Return<AudioDevice> StreamOut::getDevice() {
279 return mStreamCommon->getDevice();
280 }
281
setDevice(const DeviceAddress & address)282 Return<Result> StreamOut::setDevice(const DeviceAddress& address) {
283 return mStreamCommon->setDevice(address);
284 }
285
getParameters(const hidl_vec<hidl_string> & keys,getParameters_cb _hidl_cb)286 Return<void> StreamOut::getParameters(const hidl_vec<hidl_string>& keys,
287 getParameters_cb _hidl_cb) {
288 return mStreamCommon->getParameters(keys, _hidl_cb);
289 }
290
setParameters(const hidl_vec<ParameterValue> & parameters)291 Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& parameters) {
292 return mStreamCommon->setParameters(parameters);
293 }
294
debugDump(const hidl_handle & fd)295 Return<void> StreamOut::debugDump(const hidl_handle& fd) {
296 return mStreamCommon->debugDump(fd);
297 }
298 #elif MAJOR_VERSION >= 4
getDevices(getDevices_cb _hidl_cb)299 Return<void> StreamOut::getDevices(getDevices_cb _hidl_cb) {
300 return mStreamCommon->getDevices(_hidl_cb);
301 }
302
setDevices(const hidl_vec<DeviceAddress> & devices)303 Return<Result> StreamOut::setDevices(const hidl_vec<DeviceAddress>& devices) {
304 return mStreamCommon->setDevices(devices);
305 }
getParameters(const hidl_vec<ParameterValue> & context,const hidl_vec<hidl_string> & keys,getParameters_cb _hidl_cb)306 Return<void> StreamOut::getParameters(const hidl_vec<ParameterValue>& context,
307 const hidl_vec<hidl_string>& keys,
308 getParameters_cb _hidl_cb) {
309 return mStreamCommon->getParameters(context, keys, _hidl_cb);
310 }
311
setParameters(const hidl_vec<ParameterValue> & context,const hidl_vec<ParameterValue> & parameters)312 Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& context,
313 const hidl_vec<ParameterValue>& parameters) {
314 return mStreamCommon->setParameters(context, parameters);
315 }
316 #endif
317
close()318 Return<Result> StreamOut::close() {
319 if (mStopWriteThread.load(std::memory_order_relaxed)) { // only this thread writes
320 return Result::INVALID_STATE;
321 }
322 mStopWriteThread.store(true, std::memory_order_release);
323 if (mEfGroup) {
324 mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
325 }
326 #if MAJOR_VERSION >= 6
327 mDevice->closeOutputStream(mStream);
328 #endif
329 return Result::OK;
330 }
331
332 // Methods from ::android::hardware::audio::CPP_VERSION::IStreamOut follow.
getLatency()333 Return<uint32_t> StreamOut::getLatency() {
334 return mStream->get_latency(mStream);
335 }
336
setVolume(float left,float right)337 Return<Result> StreamOut::setVolume(float left, float right) {
338 if (mStream->set_volume == NULL) {
339 return Result::NOT_SUPPORTED;
340 }
341 if (!util::isGainNormalized(left)) {
342 ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left, right);
343 return Result::INVALID_ARGUMENTS;
344 }
345 return Stream::analyzeStatus("set_volume", mStream->set_volume(mStream, left, right),
346 {ENOSYS} /*ignore*/);
347 }
348
prepareForWriting(uint32_t frameSize,uint32_t framesCount,prepareForWriting_cb _hidl_cb)349 Return<void> StreamOut::prepareForWriting(uint32_t frameSize, uint32_t framesCount,
350 prepareForWriting_cb _hidl_cb) {
351 status_t status;
352 #if MAJOR_VERSION <= 6
353 ThreadInfo threadInfo = {0, 0};
354 #else
355 int32_t threadInfo = 0;
356 #endif
357
358 // Wrap the _hidl_cb to return an error
359 auto sendError = [&threadInfo, &_hidl_cb](Result result) {
360 _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), StatusMQ::Descriptor(),
361 threadInfo);
362 };
363
364 // Create message queues.
365 if (mDataMQ) {
366 ALOGE("the client attempts to call prepareForWriting twice");
367 sendError(Result::INVALID_STATE);
368 return Void();
369 }
370 std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1));
371
372 // Check frameSize and framesCount
373 if (frameSize == 0 || framesCount == 0) {
374 ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, framesCount);
375 sendError(Result::INVALID_ARGUMENTS);
376 return Void();
377 }
378 if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) {
379 ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount,
380 Stream::MAX_BUFFER_SIZE);
381 sendError(Result::INVALID_ARGUMENTS);
382 return Void();
383 }
384 std::unique_ptr<DataMQ> tempDataMQ(new DataMQ(frameSize * framesCount, true /* EventFlag */));
385
386 std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1));
387 if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || !tempStatusMQ->isValid()) {
388 ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid");
389 ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid");
390 ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid");
391 sendError(Result::INVALID_ARGUMENTS);
392 return Void();
393 }
394 EventFlag* tempRawEfGroup{};
395 status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &tempRawEfGroup);
396 std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup(
397 tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); });
398 if (status != OK || !tempElfGroup) {
399 ALOGE("failed creating event flag for data MQ: %s", strerror(-status));
400 sendError(Result::INVALID_ARGUMENTS);
401 return Void();
402 }
403
404 // Create and launch the thread.
405 auto tempWriteThread =
406 sp<WriteThread>::make(&mStopWriteThread, mStream, tempCommandMQ.get(), tempDataMQ.get(),
407 tempStatusMQ.get(), tempElfGroup.get());
408 if (!tempWriteThread->init()) {
409 ALOGW("failed to start writer thread: %s", strerror(-status));
410 sendError(Result::INVALID_ARGUMENTS);
411 return Void();
412 }
413 status = tempWriteThread->run("writer", PRIORITY_URGENT_AUDIO);
414 if (status != OK) {
415 ALOGW("failed to start writer thread: %s", strerror(-status));
416 sendError(Result::INVALID_ARGUMENTS);
417 return Void();
418 }
419
420 mCommandMQ = std::move(tempCommandMQ);
421 mDataMQ = std::move(tempDataMQ);
422 mStatusMQ = std::move(tempStatusMQ);
423 mWriteThread = tempWriteThread;
424 mEfGroup = tempElfGroup.release();
425 #if MAJOR_VERSION <= 6
426 threadInfo.pid = getpid();
427 threadInfo.tid = mWriteThread->getTid();
428 #else
429 threadInfo = mWriteThread->getTid();
430 #endif
431 _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), *mStatusMQ->getDesc(),
432 threadInfo);
433 return Void();
434 }
435
getRenderPosition(getRenderPosition_cb _hidl_cb)436 Return<void> StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) {
437 uint32_t halDspFrames;
438 Result retval = Stream::analyzeStatus("get_render_position",
439 mStream->get_render_position(mStream, &halDspFrames),
440 {ENOSYS} /*ignore*/);
441 _hidl_cb(retval, halDspFrames);
442 return Void();
443 }
444
getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb)445 Return<void> StreamOut::getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) {
446 Result retval(Result::NOT_SUPPORTED);
447 int64_t timestampUs = 0;
448 if (mStream->get_next_write_timestamp != NULL) {
449 retval = Stream::analyzeStatus("get_next_write_timestamp",
450 mStream->get_next_write_timestamp(mStream, ×tampUs),
451 {ENOSYS} /*ignore*/);
452 }
453 _hidl_cb(retval, timestampUs);
454 return Void();
455 }
456
setCallback(const sp<IStreamOutCallback> & callback)457 Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) {
458 if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
459 // Safe to pass 'this' because it is guaranteed that the callback thread
460 // is joined prior to exit from StreamOut's destructor.
461 int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this);
462 if (result == 0) {
463 mCallback = callback;
464 }
465 return Stream::analyzeStatus("set_callback", result, {ENOSYS} /*ignore*/);
466 }
467
clearCallback()468 Return<Result> StreamOut::clearCallback() {
469 if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
470 mCallback = nullptr;
471 return Result::OK;
472 }
473
474 // static
asyncCallback(stream_callback_event_t event,void *,void * cookie)475 int StreamOut::asyncCallback(stream_callback_event_t event, void*, void* cookie) {
476 // It is guaranteed that the callback thread is joined prior
477 // to exiting from StreamOut's destructor. Must *not* use sp<StreamOut>
478 // here because it can make this code the last owner of StreamOut,
479 // and an attempt to run the destructor on the callback thread
480 // will cause a deadlock in the legacy HAL code.
481 StreamOut* self = reinterpret_cast<StreamOut*>(cookie);
482 // It's correct to hold an sp<> to callback because the reference
483 // in the StreamOut instance can be cleared in the meantime. There is
484 // no difference on which thread to run IStreamOutCallback's destructor.
485 sp<IStreamOutCallback> callback = self->mCallback.load();
486 if (callback.get() == nullptr) return 0;
487 ALOGV("asyncCallback() event %d", event);
488 Return<void> result;
489 switch (event) {
490 case STREAM_CBK_EVENT_WRITE_READY:
491 result = callback->onWriteReady();
492 break;
493 case STREAM_CBK_EVENT_DRAIN_READY:
494 result = callback->onDrainReady();
495 break;
496 case STREAM_CBK_EVENT_ERROR:
497 result = callback->onError();
498 break;
499 default:
500 ALOGW("asyncCallback() unknown event %d", event);
501 break;
502 }
503 ALOGW_IF(!result.isOk(), "Client callback failed: %s", result.description().c_str());
504 return 0;
505 }
506
supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb)507 Return<void> StreamOut::supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) {
508 _hidl_cb(mStream->pause != NULL, mStream->resume != NULL);
509 return Void();
510 }
511
pause()512 Return<Result> StreamOut::pause() {
513 return mStream->pause != NULL
514 ? Stream::analyzeStatus("pause", mStream->pause(mStream), {ENOSYS} /*ignore*/)
515 : Result::NOT_SUPPORTED;
516 }
517
resume()518 Return<Result> StreamOut::resume() {
519 return mStream->resume != NULL
520 ? Stream::analyzeStatus("resume", mStream->resume(mStream), {ENOSYS} /*ignore*/)
521 : Result::NOT_SUPPORTED;
522 }
523
supportsDrain()524 Return<bool> StreamOut::supportsDrain() {
525 return mStream->drain != NULL;
526 }
527
drain(AudioDrain type)528 Return<Result> StreamOut::drain(AudioDrain type) {
529 audio_drain_type_t halDrainType =
530 type == AudioDrain::EARLY_NOTIFY ? AUDIO_DRAIN_EARLY_NOTIFY : AUDIO_DRAIN_ALL;
531 return mStream->drain != NULL
532 ? Stream::analyzeStatus("drain", mStream->drain(mStream, halDrainType),
533 {ENOSYS} /*ignore*/)
534 : Result::NOT_SUPPORTED;
535 }
536
flush()537 Return<Result> StreamOut::flush() {
538 return mStream->flush != NULL
539 ? Stream::analyzeStatus("flush", mStream->flush(mStream), {ENOSYS} /*ignore*/)
540 : Result::NOT_SUPPORTED;
541 }
542
543 // static
getPresentationPositionImpl(audio_stream_out_t * stream,uint64_t * frames,TimeSpec * timeStamp)544 Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, uint64_t* frames,
545 TimeSpec* timeStamp) {
546 // Don't logspam on EINVAL--it's normal for get_presentation_position
547 // to return it sometimes. EAGAIN may be returned by A2DP audio HAL
548 // implementation. ENODATA can also be reported while the writer is
549 // continuously querying it, but the stream has been stopped.
550 static const std::vector<int> ignoredErrors{EINVAL, EAGAIN, ENODATA, ENOSYS};
551 Result retval(Result::NOT_SUPPORTED);
552 if (stream->get_presentation_position == NULL) return retval;
553 struct timespec halTimeStamp;
554 retval = Stream::analyzeStatus("get_presentation_position",
555 stream->get_presentation_position(stream, frames, &halTimeStamp),
556 ignoredErrors);
557 if (retval == Result::OK) {
558 timeStamp->tvSec = halTimeStamp.tv_sec;
559 timeStamp->tvNSec = halTimeStamp.tv_nsec;
560 }
561 return retval;
562 }
563
getPresentationPosition(getPresentationPosition_cb _hidl_cb)564 Return<void> StreamOut::getPresentationPosition(getPresentationPosition_cb _hidl_cb) {
565 uint64_t frames = 0;
566 TimeSpec timeStamp = {0, 0};
567 Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp);
568 _hidl_cb(retval, frames, timeStamp);
569 return Void();
570 }
571
start()572 Return<Result> StreamOut::start() {
573 return mStreamMmap->start();
574 }
575
stop()576 Return<Result> StreamOut::stop() {
577 return mStreamMmap->stop();
578 }
579
createMmapBuffer(int32_t minSizeFrames,createMmapBuffer_cb _hidl_cb)580 Return<void> StreamOut::createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) {
581 return mStreamMmap->createMmapBuffer(minSizeFrames, audio_stream_out_frame_size(mStream),
582 _hidl_cb);
583 }
584
getMmapPosition(getMmapPosition_cb _hidl_cb)585 Return<void> StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) {
586 return mStreamMmap->getMmapPosition(_hidl_cb);
587 }
588
debug(const hidl_handle & fd,const hidl_vec<hidl_string> & options)589 Return<void> StreamOut::debug(const hidl_handle& fd, const hidl_vec<hidl_string>& options) {
590 return mStreamCommon->debug(fd, options);
591 }
592
593 #if MAJOR_VERSION >= 4
doUpdateSourceMetadata(const SourceMetadata & sourceMetadata)594 Result StreamOut::doUpdateSourceMetadata(const SourceMetadata& sourceMetadata) {
595 std::vector<playback_track_metadata_t> halTracks;
596 #if MAJOR_VERSION <= 6
597 (void)CoreUtils::sourceMetadataToHal(sourceMetadata, &halTracks);
598 #else
599 // Validate whether a conversion to V7 is possible. This is needed
600 // to have a consistent behavior of the HAL regardless of the API
601 // version of the legacy HAL (and also to be consistent with openOutputStream).
602 std::vector<playback_track_metadata_v7> halTracksV7;
603 if (status_t status = CoreUtils::sourceMetadataToHalV7(
604 sourceMetadata, false /*ignoreNonVendorTags*/, &halTracksV7);
605 status == NO_ERROR) {
606 halTracks.reserve(halTracksV7.size());
607 for (auto metadata_v7 : halTracksV7) {
608 halTracks.push_back(std::move(metadata_v7.base));
609 }
610 } else {
611 return Stream::analyzeStatus("sourceMetadataToHal", status);
612 }
613 #endif // MAJOR_VERSION <= 6
614 const source_metadata_t halMetadata = {
615 .track_count = halTracks.size(),
616 .tracks = halTracks.data(),
617 };
618 mStream->update_source_metadata(mStream, &halMetadata);
619 return Result::OK;
620 }
621
622 #if MAJOR_VERSION >= 7
doUpdateSourceMetadataV7(const SourceMetadata & sourceMetadata)623 Result StreamOut::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
624 std::vector<playback_track_metadata_v7> halTracks;
625 if (status_t status = CoreUtils::sourceMetadataToHalV7(
626 sourceMetadata, false /*ignoreNonVendorTags*/, &halTracks);
627 status != NO_ERROR) {
628 return Stream::analyzeStatus("sourceMetadataToHal", status);
629 }
630 const source_metadata_v7_t halMetadata = {
631 .track_count = halTracks.size(),
632 .tracks = halTracks.data(),
633 };
634 mStream->update_source_metadata_v7(mStream, &halMetadata);
635 return Result::OK;
636 }
637 #endif // MAJOR_VERSION >= 7
638
639 #if MAJOR_VERSION <= 6
updateSourceMetadata(const SourceMetadata & sourceMetadata)640 Return<void> StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
641 if (mStream->update_source_metadata == nullptr) {
642 return Void(); // not supported by the HAL
643 }
644 (void)doUpdateSourceMetadata(sourceMetadata);
645 return Void();
646 }
647 #elif MAJOR_VERSION >= 7
updateSourceMetadata(const SourceMetadata & sourceMetadata)648 Return<Result> StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
649 if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
650 if (mStream->update_source_metadata == nullptr) {
651 return Result::NOT_SUPPORTED;
652 }
653 return doUpdateSourceMetadata(sourceMetadata);
654 } else {
655 if (mStream->update_source_metadata_v7 == nullptr) {
656 return Result::NOT_SUPPORTED;
657 }
658 return doUpdateSourceMetadataV7(sourceMetadata);
659 }
660 }
661 #endif
662
selectPresentation(int32_t,int32_t)663 Return<Result> StreamOut::selectPresentation(int32_t /*presentationId*/, int32_t /*programId*/) {
664 return Result::NOT_SUPPORTED; // TODO: propagate to legacy
665 }
666 #endif
667
668 #if MAJOR_VERSION >= 6
getDualMonoMode(getDualMonoMode_cb _hidl_cb)669 Return<void> StreamOut::getDualMonoMode(getDualMonoMode_cb _hidl_cb) {
670 audio_dual_mono_mode_t mode = AUDIO_DUAL_MONO_MODE_OFF;
671 Result retval = mStream->get_dual_mono_mode != nullptr
672 ? Stream::analyzeStatus("get_dual_mono_mode",
673 mStream->get_dual_mono_mode(mStream, &mode))
674 : Result::NOT_SUPPORTED;
675 _hidl_cb(retval, DualMonoMode(mode));
676 return Void();
677 }
678
setDualMonoMode(DualMonoMode mode)679 Return<Result> StreamOut::setDualMonoMode(DualMonoMode mode) {
680 return mStream->set_dual_mono_mode != nullptr
681 ? Stream::analyzeStatus(
682 "set_dual_mono_mode",
683 mStream->set_dual_mono_mode(mStream,
684 static_cast<audio_dual_mono_mode_t>(mode)))
685 : Result::NOT_SUPPORTED;
686 }
687
getAudioDescriptionMixLevel(getAudioDescriptionMixLevel_cb _hidl_cb)688 Return<void> StreamOut::getAudioDescriptionMixLevel(getAudioDescriptionMixLevel_cb _hidl_cb) {
689 float leveldB = -std::numeric_limits<float>::infinity();
690 Result retval = mStream->get_audio_description_mix_level != nullptr
691 ? Stream::analyzeStatus(
692 "get_audio_description_mix_level",
693 mStream->get_audio_description_mix_level(mStream, &leveldB))
694 : Result::NOT_SUPPORTED;
695 _hidl_cb(retval, leveldB);
696 return Void();
697 }
698
setAudioDescriptionMixLevel(float leveldB)699 Return<Result> StreamOut::setAudioDescriptionMixLevel(float leveldB) {
700 return mStream->set_audio_description_mix_level != nullptr
701 ? Stream::analyzeStatus(
702 "set_audio_description_mix_level",
703 mStream->set_audio_description_mix_level(mStream, leveldB))
704 : Result::NOT_SUPPORTED;
705 }
706
getPlaybackRateParameters(getPlaybackRateParameters_cb _hidl_cb)707 Return<void> StreamOut::getPlaybackRateParameters(getPlaybackRateParameters_cb _hidl_cb) {
708 audio_playback_rate_t rate = AUDIO_PLAYBACK_RATE_INITIALIZER;
709 Result retval =
710 mStream->get_playback_rate_parameters != nullptr
711 ? Stream::analyzeStatus("get_playback_rate_parameters",
712 mStream->get_playback_rate_parameters(mStream, &rate))
713 : Result::NOT_SUPPORTED;
714 _hidl_cb(retval,
715 PlaybackRate{rate.mSpeed, rate.mPitch, static_cast<TimestretchMode>(rate.mStretchMode),
716 static_cast<TimestretchFallbackMode>(rate.mFallbackMode)});
717 return Void();
718 }
719
setPlaybackRateParameters(const PlaybackRate & playbackRate)720 Return<Result> StreamOut::setPlaybackRateParameters(const PlaybackRate& playbackRate) {
721 audio_playback_rate_t rate = {
722 playbackRate.speed, playbackRate.pitch,
723 static_cast<audio_timestretch_stretch_mode_t>(playbackRate.timestretchMode),
724 static_cast<audio_timestretch_fallback_mode_t>(playbackRate.fallbackMode)};
725 return mStream->set_playback_rate_parameters != nullptr
726 ? Stream::analyzeStatus("set_playback_rate_parameters",
727 mStream->set_playback_rate_parameters(mStream, &rate))
728 : Result::NOT_SUPPORTED;
729 }
730
setEventCallback(const sp<IStreamOutEventCallback> & callback)731 Return<Result> StreamOut::setEventCallback(const sp<IStreamOutEventCallback>& callback) {
732 if (mStream->set_event_callback == nullptr) return Result::NOT_SUPPORTED;
733 int result = mStream->set_event_callback(mStream, StreamOut::asyncEventCallback, this);
734 if (result == 0) {
735 mEventCallback = callback;
736 }
737 return Stream::analyzeStatus("set_stream_out_callback", result, {ENOSYS} /*ignore*/);
738 }
739
740 // static
asyncEventCallback(stream_event_callback_type_t event,void * param,void * cookie)741 int StreamOut::asyncEventCallback(stream_event_callback_type_t event, void* param, void* cookie) {
742 StreamOut* self = reinterpret_cast<StreamOut*>(cookie);
743 sp<IStreamOutEventCallback> eventCallback = self->mEventCallback.load();
744 if (eventCallback.get() == nullptr) return 0;
745 ALOGV("%s event %d", __func__, event);
746 Return<void> result;
747 switch (event) {
748 case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED: {
749 hidl_vec<uint8_t> audioMetadata;
750 // void* param is the byte string buffer from byte_string_from_audio_metadata().
751 // As the byte string buffer may have embedded zeroes, we cannot use strlen()
752 // but instead use audio_utils::metadata::dataByteStringLen().
753 audioMetadata.setToExternal((uint8_t*)param, audio_utils::metadata::dataByteStringLen(
754 (const uint8_t*)param));
755 result = eventCallback->onCodecFormatChanged(audioMetadata);
756 } break;
757 default:
758 ALOGW("%s unknown event %d", __func__, event);
759 break;
760 }
761 ALOGW_IF(!result.isOk(), "Client callback failed: %s", result.description().c_str());
762 return 0;
763 }
764
765 #if MAJOR_VERSION == 7 && MINOR_VERSION == 1
setLatencyMode(LatencyMode mode)766 Return<Result> StreamOut::setLatencyMode(LatencyMode mode) {
767 return mStream->set_latency_mode != nullptr
768 ? Stream::analyzeStatus(
769 "set_latency_mode",
770 mStream->set_latency_mode(mStream,
771 static_cast<audio_latency_mode_t>(mode)))
772 : Result::NOT_SUPPORTED;
773 };
774
getRecommendedLatencyModes(getRecommendedLatencyModes_cb _hidl_cb)775 Return<void> StreamOut::getRecommendedLatencyModes(getRecommendedLatencyModes_cb _hidl_cb) {
776 Result retval = Result::NOT_SUPPORTED;
777 hidl_vec<LatencyMode> hidlModes;
778 size_t num_modes = AUDIO_LATENCY_MODE_CNT;
779 audio_latency_mode_t modes[AUDIO_LATENCY_MODE_CNT];
780
781 if (mStream->get_recommended_latency_modes != nullptr &&
782 mStream->get_recommended_latency_modes(mStream, &modes[0], &num_modes) == 0) {
783 if (num_modes == 0 || num_modes > AUDIO_LATENCY_MODE_CNT) {
784 ALOGW("%s invalid number of modes returned: %zu", __func__, num_modes);
785 retval = Result::INVALID_STATE;
786 } else {
787 hidlModes.resize(num_modes);
788 for (size_t i = 0; i < num_modes; ++i) {
789 hidlModes[i] = static_cast<LatencyMode>(modes[i]);
790 }
791 retval = Result::OK;
792 }
793 }
794 _hidl_cb(retval, hidlModes);
795 return Void();
796 };
797
798 // static
latencyModeCallback(audio_latency_mode_t * modes,size_t num_modes,void * cookie)799 void StreamOut::latencyModeCallback(audio_latency_mode_t* modes, size_t num_modes, void* cookie) {
800 StreamOut* self = reinterpret_cast<StreamOut*>(cookie);
801 sp<IStreamOutLatencyModeCallback> callback = self->mLatencyModeCallback.load();
802 if (callback.get() == nullptr) return;
803
804 ALOGV("%s", __func__);
805
806 if (num_modes == 0 || num_modes > AUDIO_LATENCY_MODE_CNT) {
807 ALOGW("%s invalid number of modes returned: %zu", __func__, num_modes);
808 return;
809 }
810
811 hidl_vec<LatencyMode> hidlModes(num_modes);
812 for (size_t i = 0; i < num_modes; ++i) {
813 hidlModes[i] = static_cast<LatencyMode>(modes[i]);
814 }
815 Return<void> result = callback->onRecommendedLatencyModeChanged(hidlModes);
816 ALOGW_IF(!result.isOk(), "Client callback failed: %s", result.description().c_str());
817 }
818
setLatencyModeCallback(const sp<IStreamOutLatencyModeCallback> & callback)819 Return<Result> StreamOut::setLatencyModeCallback(
820 const sp<IStreamOutLatencyModeCallback>& callback) {
821 if (mStream->set_latency_mode_callback == nullptr) return Result::NOT_SUPPORTED;
822 int result = mStream->set_latency_mode_callback(mStream, StreamOut::latencyModeCallback, this);
823 if (result == 0) {
824 mLatencyModeCallback = callback;
825 }
826 return Stream::analyzeStatus("set_latency_mode_callback", result, {ENOSYS} /*ignore*/);
827 };
828
829 #endif
830
831 #endif
832
833 } // namespace implementation
834 } // namespace CPP_VERSION
835 } // namespace audio
836 } // namespace hardware
837 } // namespace android
838