1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "AHAL_StreamRemoteSubmix"
18 #include <android-base/logging.h>
19 #include <audio_utils/clock.h>
20 #include <error/Result.h>
21 #include <error/expected_utils.h>
22 
23 #include "core-impl/StreamRemoteSubmix.h"
24 
25 using aidl::android::hardware::audio::common::SinkMetadata;
26 using aidl::android::hardware::audio::common::SourceMetadata;
27 using aidl::android::hardware::audio::core::r_submix::SubmixRoute;
28 using aidl::android::media::audio::common::AudioDeviceAddress;
29 using aidl::android::media::audio::common::AudioOffloadInfo;
30 using aidl::android::media::audio::common::MicrophoneDynamicInfo;
31 using aidl::android::media::audio::common::MicrophoneInfo;
32 
33 namespace aidl::android::hardware::audio::core {
34 
StreamRemoteSubmix(StreamContext * context,const Metadata & metadata,const AudioDeviceAddress & deviceAddress)35 StreamRemoteSubmix::StreamRemoteSubmix(StreamContext* context, const Metadata& metadata,
36                                        const AudioDeviceAddress& deviceAddress)
37     : StreamCommonImpl(context, metadata),
38       mDeviceAddress(deviceAddress),
39       mIsInput(isInput(metadata)) {
40     mStreamConfig.frameSize = context->getFrameSize();
41     mStreamConfig.format = context->getFormat();
42     mStreamConfig.channelLayout = context->getChannelLayout();
43     mStreamConfig.sampleRate = context->getSampleRate();
44 }
45 
init()46 ::android::status_t StreamRemoteSubmix::init() {
47     mCurrentRoute = SubmixRoute::findOrCreateRoute(mDeviceAddress, mStreamConfig);
48     if (mCurrentRoute == nullptr) {
49         return ::android::NO_INIT;
50     }
51     if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) {
52         LOG(ERROR) << __func__ << ": invalid stream config";
53         return ::android::NO_INIT;
54     }
55     sp<MonoPipe> sink = mCurrentRoute->getSink();
56     if (sink == nullptr) {
57         LOG(ERROR) << __func__ << ": nullptr sink when opening stream";
58         return ::android::NO_INIT;
59     }
60     if ((!mIsInput || mCurrentRoute->isStreamInOpen()) && sink->isShutdown()) {
61         LOG(DEBUG) << __func__ << ": Shut down sink when opening stream";
62         if (::android::OK != mCurrentRoute->resetPipe()) {
63             LOG(ERROR) << __func__ << ": reset pipe failed";
64             return ::android::NO_INIT;
65         }
66     }
67     mCurrentRoute->openStream(mIsInput);
68     return ::android::OK;
69 }
70 
drain(StreamDescriptor::DrainMode)71 ::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) {
72     usleep(1000);
73     return ::android::OK;
74 }
75 
flush()76 ::android::status_t StreamRemoteSubmix::flush() {
77     usleep(1000);
78     return ::android::OK;
79 }
80 
pause()81 ::android::status_t StreamRemoteSubmix::pause() {
82     usleep(1000);
83     return ::android::OK;
84 }
85 
standby()86 ::android::status_t StreamRemoteSubmix::standby() {
87     mCurrentRoute->standby(mIsInput);
88     return ::android::OK;
89 }
90 
start()91 ::android::status_t StreamRemoteSubmix::start() {
92     mCurrentRoute->exitStandby(mIsInput);
93     mStartTimeNs = ::android::uptimeNanos();
94     mFramesSinceStart = 0;
95     return ::android::OK;
96 }
97 
prepareToClose()98 ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() {
99     if (!mIsInput) {
100         std::shared_ptr<SubmixRoute> route = SubmixRoute::findRoute(mDeviceAddress);
101         if (route != nullptr) {
102             sp<MonoPipe> sink = route->getSink();
103             if (sink == nullptr) {
104                 ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
105             }
106             LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink";
107 
108             sink->shutdown(true);
109             // The client already considers this stream as closed, release the output end.
110             route->closeStream(mIsInput);
111         } else {
112             LOG(DEBUG) << __func__ << ": stream already closed.";
113             ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
114         }
115     }
116     return ndk::ScopedAStatus::ok();
117 }
118 
119 // Remove references to the specified input and output streams.  When the device no longer
120 // references input and output streams destroy the associated pipe.
shutdown()121 void StreamRemoteSubmix::shutdown() {
122     mCurrentRoute->closeStream(mIsInput);
123     // If all stream instances are closed, we can remove route information for this port.
124     if (!mCurrentRoute->hasAtleastOneStreamOpen()) {
125         mCurrentRoute->releasePipe();
126         LOG(DEBUG) << __func__ << ": pipe destroyed";
127         SubmixRoute::removeRoute(mDeviceAddress);
128     }
129     mCurrentRoute.reset();
130 }
131 
transfer(void * buffer,size_t frameCount,size_t * actualFrameCount,int32_t * latencyMs)132 ::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount,
133                                                  size_t* actualFrameCount, int32_t* latencyMs) {
134     *latencyMs = getDelayInUsForFrameCount(getStreamPipeSizeInFrames()) / 1000;
135     LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms";
136     mCurrentRoute->exitStandby(mIsInput);
137     ::android::status_t status = mIsInput ? inRead(buffer, frameCount, actualFrameCount)
138                                           : outWrite(buffer, frameCount, actualFrameCount);
139     if ((status != ::android::OK && mIsInput) ||
140         ((status != ::android::OK && status != ::android::DEAD_OBJECT) && !mIsInput)) {
141         return status;
142     }
143     mFramesSinceStart += *actualFrameCount;
144     if (!mIsInput && status != ::android::DEAD_OBJECT) return ::android::OK;
145     // Input streams always need to block, output streams need to block when there is no sink.
146     // When the sink exists, more sophisticated blocking algorithm is implemented by MonoPipe.
147     const long bufferDurationUs =
148             (*actualFrameCount) * MICROS_PER_SECOND / mContext.getSampleRate();
149     const auto totalDurationUs = (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND;
150     const long totalOffsetUs =
151             mFramesSinceStart * MICROS_PER_SECOND / mContext.getSampleRate() - totalDurationUs;
152     LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs;
153     if (totalOffsetUs > 0) {
154         const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs);
155         LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us";
156         usleep(sleepTimeUs);
157     }
158     return ::android::OK;
159 }
160 
refinePosition(StreamDescriptor::Position * position)161 ::android::status_t StreamRemoteSubmix::refinePosition(StreamDescriptor::Position* position) {
162     sp<MonoPipeReader> source = mCurrentRoute->getSource();
163     if (source == nullptr) {
164         return ::android::NO_INIT;
165     }
166     const ssize_t framesInPipe = source->availableToRead();
167     if (framesInPipe <= 0) {
168         // No need to update the position frames
169         return ::android::OK;
170     }
171     if (mIsInput) {
172         position->frames += framesInPipe;
173     } else if (position->frames >= framesInPipe) {
174         position->frames -= framesInPipe;
175     }
176     return ::android::OK;
177 }
178 
getDelayInUsForFrameCount(size_t frameCount)179 long StreamRemoteSubmix::getDelayInUsForFrameCount(size_t frameCount) {
180     return frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate;
181 }
182 
183 // Calculate the maximum size of the pipe buffer in frames for the specified stream.
getStreamPipeSizeInFrames()184 size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() {
185     auto pipeConfig = mCurrentRoute->getPipeConfig();
186     const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize);
187     return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize;
188 }
189 
outWrite(void * buffer,size_t frameCount,size_t * actualFrameCount)190 ::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount,
191                                                  size_t* actualFrameCount) {
192     sp<MonoPipe> sink = mCurrentRoute->getSink();
193     if (sink != nullptr) {
194         if (sink->isShutdown()) {
195             sink.clear();
196             if (++mWriteShutdownCount < kMaxErrorLogs) {
197                 LOG(DEBUG) << __func__ << ": pipe shutdown, ignoring the write. (limited logging)";
198             }
199             *actualFrameCount = frameCount;
200             return ::android::DEAD_OBJECT;  // Induce wait in `transfer`.
201         }
202     } else {
203         LOG(FATAL) << __func__ << ": without a pipe!";
204         return ::android::UNKNOWN_ERROR;
205     }
206     mWriteShutdownCount = 0;
207 
208     LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
209                  << " frames";
210 
211     const bool shouldBlockWrite = mCurrentRoute->shouldBlockWrite();
212     size_t availableToWrite = sink->availableToWrite();
213     // NOTE: sink has been checked above and sink and source life cycles are synchronized
214     sp<MonoPipeReader> source = mCurrentRoute->getSource();
215     // If the write to the sink should be blocked, flush enough frames from the pipe to make space
216     // to write the most recent data.
217     if (!shouldBlockWrite && availableToWrite < frameCount) {
218         static uint8_t flushBuffer[64];
219         const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize;
220         size_t framesToFlushFromSource = frameCount - availableToWrite;
221         LOG(DEBUG) << __func__ << ": flushing " << framesToFlushFromSource
222                    << " frames from the pipe to avoid blocking";
223         while (framesToFlushFromSource) {
224             const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames);
225             framesToFlushFromSource -= flushSize;
226             // read does not block
227             source->read(flushBuffer, flushSize);
228         }
229     }
230     availableToWrite = sink->availableToWrite();
231 
232     if (!shouldBlockWrite && frameCount > availableToWrite) {
233         LOG(WARNING) << __func__ << ": writing " << availableToWrite << " vs. requested "
234                      << frameCount;
235         // Truncate the request to avoid blocking.
236         frameCount = availableToWrite;
237     }
238     ssize_t writtenFrames = sink->write(buffer, frameCount);
239     if (writtenFrames < 0) {
240         if (writtenFrames == (ssize_t)::android::NEGOTIATE) {
241             LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE";
242             sink.clear();
243             *actualFrameCount = 0;
244             return ::android::UNKNOWN_ERROR;
245         } else {
246             // write() returned UNDERRUN or WOULD_BLOCK, retry
247             LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames;
248             writtenFrames = sink->write(buffer, frameCount);
249         }
250     }
251 
252     if (writtenFrames < 0) {
253         LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames;
254         *actualFrameCount = 0;
255         return ::android::UNKNOWN_ERROR;
256     }
257     if (writtenFrames > 0 && frameCount > (size_t)writtenFrames) {
258         LOG(WARNING) << __func__ << ": wrote " << writtenFrames << " vs. requested " << frameCount;
259     }
260     *actualFrameCount = writtenFrames;
261     return ::android::OK;
262 }
263 
inRead(void * buffer,size_t frameCount,size_t * actualFrameCount)264 ::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount,
265                                                size_t* actualFrameCount) {
266     // in any case, it is emulated that data for the entire buffer was available
267     memset(buffer, 0, mStreamConfig.frameSize * frameCount);
268     *actualFrameCount = frameCount;
269 
270     // about to read from audio source
271     sp<MonoPipeReader> source = mCurrentRoute->getSource();
272     if (source == nullptr) {
273         if (++mReadErrorCount < kMaxErrorLogs) {
274             LOG(ERROR) << __func__
275                        << ": no audio pipe yet we're trying to read! (not all errors will be "
276                           "logged)";
277         }
278         return ::android::OK;
279     }
280     mReadErrorCount = 0;
281 
282     LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
283                  << " frames";
284     // read the data from the pipe
285     char* buff = (char*)buffer;
286     size_t actuallyRead = 0;
287     long remainingFrames = frameCount;
288     const int64_t deadlineTimeNs =
289             ::android::uptimeNanos() +
290             getDelayInUsForFrameCount(frameCount) * NANOS_PER_MICROSECOND / 2;
291     while (remainingFrames > 0) {
292         ssize_t framesRead = source->read(buff, remainingFrames);
293         LOG(VERBOSE) << __func__ << ": frames read " << framesRead;
294         if (framesRead > 0) {
295             remainingFrames -= framesRead;
296             buff += framesRead * mStreamConfig.frameSize;
297             LOG(VERBOSE) << __func__ << ": got " << framesRead
298                          << " frames, remaining =" << remainingFrames;
299             actuallyRead += framesRead;
300         }
301         if (::android::uptimeNanos() >= deadlineTimeNs) break;
302         if (framesRead <= 0) {
303             LOG(VERBOSE) << __func__ << ": read returned " << framesRead
304                          << ", read failure, sleeping for " << kReadAttemptSleepUs << " us";
305             usleep(kReadAttemptSleepUs);
306         }
307     }
308     if (actuallyRead < frameCount) {
309         if (++mReadFailureCount < kMaxReadFailureAttempts) {
310             LOG(WARNING) << __func__ << ": read " << actuallyRead << " vs. requested " << frameCount
311                          << " (not all errors will be logged)";
312         }
313     } else {
314         mReadFailureCount = 0;
315     }
316     mCurrentRoute->updateReadCounterFrames(*actualFrameCount);
317     return ::android::OK;
318 }
319 
StreamInRemoteSubmix(StreamContext && context,const SinkMetadata & sinkMetadata,const std::vector<MicrophoneInfo> & microphones)320 StreamInRemoteSubmix::StreamInRemoteSubmix(StreamContext&& context,
321                                            const SinkMetadata& sinkMetadata,
322                                            const std::vector<MicrophoneInfo>& microphones)
323     : StreamIn(std::move(context), microphones), StreamSwitcher(&mContextInstance, sinkMetadata) {}
324 
getActiveMicrophones(std::vector<MicrophoneDynamicInfo> * _aidl_return)325 ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones(
326         std::vector<MicrophoneDynamicInfo>* _aidl_return) {
327     LOG(DEBUG) << __func__ << ": not supported";
328     *_aidl_return = std::vector<MicrophoneDynamicInfo>();
329     return ndk::ScopedAStatus::ok();
330 }
331 
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)332 StreamSwitcher::DeviceSwitchBehavior StreamInRemoteSubmix::switchCurrentStream(
333         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
334     // This implementation effectively postpones stream creation until
335     // receiving the first call to 'setConnectedDevices' with a non-empty list.
336     if (isStubStream()) {
337         if (devices.size() == 1) {
338             auto deviceDesc = devices.front().type;
339             if (deviceDesc.type ==
340                 ::aidl::android::media::audio::common::AudioDeviceType::IN_SUBMIX) {
341                 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
342             }
343             LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
344                        << " not supported";
345         } else {
346             LOG(ERROR) << __func__ << ": Only single device supported.";
347         }
348         return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
349     }
350     return DeviceSwitchBehavior::USE_CURRENT_STREAM;
351 }
352 
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)353 std::unique_ptr<StreamCommonInterfaceEx> StreamInRemoteSubmix::createNewStream(
354         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
355         StreamContext* context, const Metadata& metadata) {
356     return std::unique_ptr<StreamCommonInterfaceEx>(
357             new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
358 }
359 
StreamOutRemoteSubmix(StreamContext && context,const SourceMetadata & sourceMetadata,const std::optional<AudioOffloadInfo> & offloadInfo)360 StreamOutRemoteSubmix::StreamOutRemoteSubmix(StreamContext&& context,
361                                              const SourceMetadata& sourceMetadata,
362                                              const std::optional<AudioOffloadInfo>& offloadInfo)
363     : StreamOut(std::move(context), offloadInfo),
364       StreamSwitcher(&mContextInstance, sourceMetadata) {}
365 
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)366 StreamSwitcher::DeviceSwitchBehavior StreamOutRemoteSubmix::switchCurrentStream(
367         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
368     // This implementation effectively postpones stream creation until
369     // receiving the first call to 'setConnectedDevices' with a non-empty list.
370     if (isStubStream()) {
371         if (devices.size() == 1) {
372             auto deviceDesc = devices.front().type;
373             if (deviceDesc.type ==
374                 ::aidl::android::media::audio::common::AudioDeviceType::OUT_SUBMIX) {
375                 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
376             }
377             LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
378                        << " not supported";
379         } else {
380             LOG(ERROR) << __func__ << ": Only single device supported.";
381         }
382         return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
383     }
384     return DeviceSwitchBehavior::USE_CURRENT_STREAM;
385 }
386 
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)387 std::unique_ptr<StreamCommonInterfaceEx> StreamOutRemoteSubmix::createNewStream(
388         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
389         StreamContext* context, const Metadata& metadata) {
390     return std::unique_ptr<StreamCommonInterfaceEx>(
391             new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
392 }
393 
394 }  // namespace aidl::android::hardware::audio::core
395