1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaSync"
19 #include <inttypes.h>
20
21 #include <gui/BufferQueue.h>
22 #include <gui/IGraphicBufferConsumer.h>
23 #include <gui/IGraphicBufferProducer.h>
24
25 #include <media/AudioTrack.h>
26 #include <media/stagefright/MediaClock.h>
27 #include <media/stagefright/MediaSync.h>
28 #include <media/stagefright/VideoFrameScheduler.h>
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/foundation/ALooper.h>
31 #include <media/stagefright/foundation/AMessage.h>
32
33 #include <ui/GraphicBuffer.h>
34
35 // Maximum late time allowed for a video frame to be rendered. When a video
36 // frame arrives later than this number, it will be discarded without rendering.
37 static const int64_t kMaxAllowedVideoLateTimeUs = 40000ll;
38
39 namespace android {
40
41 // static
create()42 sp<MediaSync> MediaSync::create() {
43 sp<MediaSync> sync = new MediaSync();
44 sync->mLooper->registerHandler(sync);
45 return sync;
46 }
47
MediaSync()48 MediaSync::MediaSync()
49 : mIsAbandoned(false),
50 mMutex(),
51 mReleaseCondition(),
52 mNumOutstandingBuffers(0),
53 mUsageFlagsFromOutput(0),
54 mMaxAcquiredBufferCount(1),
55 mReturnPendingInputFrame(false),
56 mNativeSampleRateInHz(0),
57 mNumFramesWritten(0),
58 mHasAudio(false),
59 mNextBufferItemMediaUs(-1),
60 mPlaybackRate(0.0) {
61 mMediaClock = new MediaClock;
62
63 // initialize settings
64 mPlaybackSettings = AUDIO_PLAYBACK_RATE_DEFAULT;
65 mPlaybackSettings.mSpeed = mPlaybackRate;
66
67 mLooper = new ALooper;
68 mLooper->setName("MediaSync");
69 mLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
70 }
71
~MediaSync()72 MediaSync::~MediaSync() {
73 if (mInput != NULL) {
74 mInput->consumerDisconnect();
75 }
76 if (mOutput != NULL) {
77 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
78 }
79
80 if (mLooper != NULL) {
81 mLooper->unregisterHandler(id());
82 mLooper->stop();
83 }
84 }
85
setSurface(const sp<IGraphicBufferProducer> & output)86 status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) {
87 Mutex::Autolock lock(mMutex);
88
89 if (output == mOutput) {
90 return NO_ERROR; // same output surface.
91 }
92
93 if (output == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_VSYNC) {
94 ALOGE("setSurface: output surface is used as sync source and cannot be removed.");
95 return INVALID_OPERATION;
96 }
97
98 if (output != NULL) {
99 int newUsage = 0;
100 output->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &newUsage);
101
102 // Check usage flags only when current output surface has been used to create input surface.
103 if (mOutput != NULL && mInput != NULL) {
104 int ignoredFlags = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER
105 | GRALLOC_USAGE_EXTERNAL_DISP);
106 // New output surface is not allowed to add new usage flag except ignored ones.
107 if ((newUsage & ~(mUsageFlagsFromOutput | ignoredFlags)) != 0) {
108 ALOGE("setSurface: new output surface has new usage flag not used by current one.");
109 return BAD_VALUE;
110 }
111 }
112
113 // Try to connect to new output surface. If failed, current output surface will not
114 // be changed.
115 IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
116 sp<OutputListener> listener(new OutputListener(this, output));
117 IInterface::asBinder(output)->linkToDeath(listener);
118 status_t status =
119 output->connect(listener,
120 NATIVE_WINDOW_API_MEDIA,
121 true /* producerControlledByApp */,
122 &queueBufferOutput);
123 if (status != NO_ERROR) {
124 ALOGE("setSurface: failed to connect (%d)", status);
125 return status;
126 }
127
128 if (mFrameScheduler == NULL) {
129 mFrameScheduler = new VideoFrameScheduler();
130 mFrameScheduler->init();
131 }
132 }
133
134 if (mOutput != NULL) {
135 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
136 while (!mBuffersSentToOutput.isEmpty()) {
137 returnBufferToInput_l(mBuffersSentToOutput.valueAt(0), Fence::NO_FENCE);
138 mBuffersSentToOutput.removeItemsAt(0);
139 }
140 }
141
142 mOutput = output;
143
144 return NO_ERROR;
145 }
146
147 // |audioTrack| is used only for querying information.
setAudioTrack(const sp<AudioTrack> & audioTrack)148 status_t MediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) {
149 Mutex::Autolock lock(mMutex);
150
151 // TODO: support audio track change.
152 if (mAudioTrack != NULL) {
153 ALOGE("setAudioTrack: audioTrack has already been configured.");
154 return INVALID_OPERATION;
155 }
156
157 if (audioTrack == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_AUDIO) {
158 ALOGE("setAudioTrack: audioTrack is used as sync source and cannot be removed.");
159 return INVALID_OPERATION;
160 }
161
162 if (audioTrack != NULL) {
163 // check if audio track supports the playback settings
164 if (mPlaybackSettings.mSpeed != 0.f
165 && audioTrack->setPlaybackRate(mPlaybackSettings) != OK) {
166 ALOGE("playback settings are not supported by the audio track");
167 return INVALID_OPERATION;
168 }
169 uint32_t nativeSampleRateInHz = audioTrack->getOriginalSampleRate();
170 if (nativeSampleRateInHz <= 0) {
171 ALOGE("setAudioTrack: native sample rate should be positive.");
172 return BAD_VALUE;
173 }
174 mAudioTrack = audioTrack;
175 mNativeSampleRateInHz = nativeSampleRateInHz;
176 (void)setPlaybackSettings_l(mPlaybackSettings);
177 }
178 else {
179 mAudioTrack = NULL;
180 mNativeSampleRateInHz = 0;
181 }
182
183 // potentially resync to new source
184 resync_l();
185 return OK;
186 }
187
createInputSurface(sp<IGraphicBufferProducer> * outBufferProducer)188 status_t MediaSync::createInputSurface(
189 sp<IGraphicBufferProducer> *outBufferProducer) {
190 if (outBufferProducer == NULL) {
191 return BAD_VALUE;
192 }
193
194 Mutex::Autolock lock(mMutex);
195
196 if (mOutput == NULL) {
197 return NO_INIT;
198 }
199
200 if (mInput != NULL) {
201 return INVALID_OPERATION;
202 }
203
204 sp<IGraphicBufferProducer> bufferProducer;
205 sp<IGraphicBufferConsumer> bufferConsumer;
206 BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer);
207
208 sp<InputListener> listener(new InputListener(this));
209 IInterface::asBinder(bufferConsumer)->linkToDeath(listener);
210 status_t status =
211 bufferConsumer->consumerConnect(listener, false /* controlledByApp */);
212 if (status == NO_ERROR) {
213 bufferConsumer->setConsumerName(String8("MediaSync"));
214 // propagate usage bits from output surface
215 mUsageFlagsFromOutput = 0;
216 mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &mUsageFlagsFromOutput);
217 bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput);
218 *outBufferProducer = bufferProducer;
219 mInput = bufferConsumer;
220
221 // set undequeued buffer count
222 int minUndequeuedBuffers;
223 mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
224 mMaxAcquiredBufferCount = minUndequeuedBuffers;
225 bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
226 }
227 return status;
228 }
229
resync_l()230 void MediaSync::resync_l() {
231 AVSyncSource src = mSyncSettings.mSource;
232 if (src == AVSYNC_SOURCE_DEFAULT) {
233 if (mAudioTrack != NULL) {
234 src = AVSYNC_SOURCE_AUDIO;
235 } else {
236 src = AVSYNC_SOURCE_SYSTEM_CLOCK;
237 }
238 }
239
240 // TODO: resync ourselves to the current clock (e.g. on sync source change)
241 updatePlaybackRate_l(mPlaybackRate);
242 }
243
updatePlaybackRate_l(float rate)244 void MediaSync::updatePlaybackRate_l(float rate) {
245 if (rate > mPlaybackRate) {
246 mNextBufferItemMediaUs = -1;
247 }
248 mPlaybackRate = rate;
249 // TODO: update frame scheduler with this info
250 mMediaClock->setPlaybackRate(rate);
251 onDrainVideo_l();
252 }
253
getMediaClock()254 sp<const MediaClock> MediaSync::getMediaClock() {
255 return mMediaClock;
256 }
257
getPlayTimeForPendingAudioFrames(int64_t * outTimeUs)258 status_t MediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
259 Mutex::Autolock lock(mMutex);
260 // User should check the playback rate if it doesn't want to receive a
261 // huge number for play time.
262 if (mPlaybackRate == 0.0f) {
263 *outTimeUs = INT64_MAX;
264 return OK;
265 }
266
267 uint32_t numFramesPlayed = 0;
268 if (mAudioTrack != NULL) {
269 status_t res = mAudioTrack->getPosition(&numFramesPlayed);
270 if (res != OK) {
271 return res;
272 }
273 }
274
275 int64_t numPendingFrames = mNumFramesWritten - numFramesPlayed;
276 if (numPendingFrames < 0) {
277 numPendingFrames = 0;
278 ALOGW("getPlayTimeForPendingAudioFrames: pending frame count is negative.");
279 }
280 double timeUs = numPendingFrames * 1000000.0
281 / (mNativeSampleRateInHz * (double)mPlaybackRate);
282 if (timeUs > (double)INT64_MAX) {
283 // Overflow.
284 *outTimeUs = INT64_MAX;
285 ALOGW("getPlayTimeForPendingAudioFrames: play time for pending audio frames "
286 "is too high, possibly due to super low playback rate(%f)", mPlaybackRate);
287 } else {
288 *outTimeUs = (int64_t)timeUs;
289 }
290
291 return OK;
292 }
293
updateQueuedAudioData(size_t sizeInBytes,int64_t presentationTimeUs)294 status_t MediaSync::updateQueuedAudioData(
295 size_t sizeInBytes, int64_t presentationTimeUs) {
296 if (sizeInBytes == 0) {
297 return OK;
298 }
299
300 Mutex::Autolock lock(mMutex);
301
302 if (mAudioTrack == NULL) {
303 ALOGW("updateQueuedAudioData: audioTrack has NOT been configured.");
304 return INVALID_OPERATION;
305 }
306
307 int64_t numFrames = sizeInBytes / mAudioTrack->frameSize();
308 int64_t maxMediaTimeUs = presentationTimeUs
309 + getDurationIfPlayedAtNativeSampleRate_l(numFrames);
310
311 int64_t nowUs = ALooper::GetNowUs();
312 int64_t nowMediaUs = presentationTimeUs
313 - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten)
314 + getPlayedOutAudioDurationMedia_l(nowUs);
315
316 mNumFramesWritten += numFrames;
317
318 int64_t oldRealTime = -1;
319 if (mNextBufferItemMediaUs != -1) {
320 oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
321 }
322
323 mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs);
324 mHasAudio = true;
325
326 if (oldRealTime != -1) {
327 int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
328 if (newRealTime >= oldRealTime) {
329 return OK;
330 }
331 }
332
333 mNextBufferItemMediaUs = -1;
334 onDrainVideo_l();
335 return OK;
336 }
337
setName(const AString & name)338 void MediaSync::setName(const AString &name) {
339 Mutex::Autolock lock(mMutex);
340 mInput->setConsumerName(String8(name.c_str()));
341 }
342
flush()343 void MediaSync::flush() {
344 Mutex::Autolock lock(mMutex);
345 if (mFrameScheduler != NULL) {
346 mFrameScheduler->restart();
347 }
348 while (!mBufferItems.empty()) {
349 BufferItem *bufferItem = &*mBufferItems.begin();
350 returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence);
351 mBufferItems.erase(mBufferItems.begin());
352 }
353 mNextBufferItemMediaUs = -1;
354 mNumFramesWritten = 0;
355 mReturnPendingInputFrame = true;
356 mReleaseCondition.signal();
357 mMediaClock->clearAnchor();
358 }
359
setVideoFrameRateHint(float rate)360 status_t MediaSync::setVideoFrameRateHint(float rate) {
361 Mutex::Autolock lock(mMutex);
362 if (rate < 0.f) {
363 return BAD_VALUE;
364 }
365 if (mFrameScheduler != NULL) {
366 mFrameScheduler->init(rate);
367 }
368 return OK;
369 }
370
getVideoFrameRate()371 float MediaSync::getVideoFrameRate() {
372 Mutex::Autolock lock(mMutex);
373 if (mFrameScheduler != NULL) {
374 float fps = mFrameScheduler->getFrameRate();
375 if (fps > 0.f) {
376 return fps;
377 }
378 }
379
380 // we don't have or know the frame rate
381 return -1.f;
382 }
383
setSyncSettings(const AVSyncSettings & syncSettings)384 status_t MediaSync::setSyncSettings(const AVSyncSettings &syncSettings) {
385 // validate settings
386 if (syncSettings.mSource >= AVSYNC_SOURCE_MAX
387 || syncSettings.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
388 || syncSettings.mTolerance < 0.f
389 || syncSettings.mTolerance >= AVSYNC_TOLERANCE_MAX) {
390 return BAD_VALUE;
391 }
392
393 Mutex::Autolock lock(mMutex);
394
395 // verify that we have the sync source
396 switch (syncSettings.mSource) {
397 case AVSYNC_SOURCE_AUDIO:
398 if (mAudioTrack == NULL) {
399 ALOGE("setSyncSettings: audio sync source requires an audio track");
400 return BAD_VALUE;
401 }
402 break;
403 case AVSYNC_SOURCE_VSYNC:
404 if (mOutput == NULL) {
405 ALOGE("setSyncSettings: vsync sync source requires an output surface");
406 return BAD_VALUE;
407 }
408 break;
409 default:
410 break;
411 }
412
413 mSyncSettings = syncSettings;
414 resync_l();
415 return OK;
416 }
417
getSyncSettings(AVSyncSettings * syncSettings)418 void MediaSync::getSyncSettings(AVSyncSettings *syncSettings) {
419 Mutex::Autolock lock(mMutex);
420 *syncSettings = mSyncSettings;
421 }
422
setPlaybackSettings(const AudioPlaybackRate & rate)423 status_t MediaSync::setPlaybackSettings(const AudioPlaybackRate &rate) {
424 Mutex::Autolock lock(mMutex);
425
426 status_t err = setPlaybackSettings_l(rate);
427 if (err == OK) {
428 // TODO: adjust rate if using VSYNC as source
429 updatePlaybackRate_l(rate.mSpeed);
430 }
431 return err;
432 }
433
setPlaybackSettings_l(const AudioPlaybackRate & rate)434 status_t MediaSync::setPlaybackSettings_l(const AudioPlaybackRate &rate) {
435 if (rate.mSpeed < 0.f || rate.mPitch < 0.f) {
436 // We don't validate other audio settings.
437 // They will be validated when/if audiotrack is set.
438 return BAD_VALUE;
439 }
440
441 if (mAudioTrack != NULL) {
442 if (rate.mSpeed == 0.f) {
443 mAudioTrack->pause();
444 } else {
445 status_t err = mAudioTrack->setPlaybackRate(rate);
446 if (err != OK) {
447 return BAD_VALUE;
448 }
449
450 // ignore errors
451 (void)mAudioTrack->start();
452 }
453 }
454 mPlaybackSettings = rate;
455 return OK;
456 }
457
getPlaybackSettings(AudioPlaybackRate * rate)458 void MediaSync::getPlaybackSettings(AudioPlaybackRate *rate) {
459 Mutex::Autolock lock(mMutex);
460 *rate = mPlaybackSettings;
461 }
462
getRealTime(int64_t mediaTimeUs,int64_t nowUs)463 int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) {
464 int64_t realUs;
465 if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
466 // If failed to get current position, e.g. due to audio clock is
467 // not ready, then just play out video immediately without delay.
468 return nowUs;
469 }
470 return realUs;
471 }
472
getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames)473 int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) {
474 return (numFrames * 1000000LL / mNativeSampleRateInHz);
475 }
476
getPlayedOutAudioDurationMedia_l(int64_t nowUs)477 int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {
478 CHECK(mAudioTrack != NULL);
479
480 uint32_t numFramesPlayed;
481 int64_t numFramesPlayedAt;
482 AudioTimestamp ts;
483 static const int64_t kStaleTimestamp100ms = 100000;
484
485 status_t res = mAudioTrack->getTimestamp(ts);
486 if (res == OK) {
487 // case 1: mixing audio tracks.
488 numFramesPlayed = ts.mPosition;
489 numFramesPlayedAt =
490 ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
491 const int64_t timestampAge = nowUs - numFramesPlayedAt;
492 if (timestampAge > kStaleTimestamp100ms) {
493 // This is an audio FIXME.
494 // getTimestamp returns a timestamp which may come from audio
495 // mixing threads. After pausing, the MixerThread may go idle,
496 // thus the mTime estimate may become stale. Assuming that the
497 // MixerThread runs 20ms, with FastMixer at 5ms, the max latency
498 // should be about 25ms with an average around 12ms (to be
499 // verified). For safety we use 100ms.
500 ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) "
501 "numFramesPlayedAt(%lld)",
502 (long long)nowUs, (long long)numFramesPlayedAt);
503 numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
504 }
505 //ALOGD("getTimestamp: OK %d %lld",
506 // numFramesPlayed, (long long)numFramesPlayedAt);
507 } else if (res == WOULD_BLOCK) {
508 // case 2: transitory state on start of a new track
509 numFramesPlayed = 0;
510 numFramesPlayedAt = nowUs;
511 //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
512 // numFramesPlayed, (long long)numFramesPlayedAt);
513 } else {
514 // case 3: transitory at new track or audio fast tracks.
515 res = mAudioTrack->getPosition(&numFramesPlayed);
516 CHECK_EQ(res, (status_t)OK);
517 numFramesPlayedAt = nowUs;
518 numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
519 //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
520 }
521
522 //can't be negative until 12.4 hrs, test.
523 //CHECK_EQ(numFramesPlayed & (1 << 31), 0);
524 int64_t durationUs =
525 getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed)
526 + nowUs - numFramesPlayedAt;
527 if (durationUs < 0) {
528 // Occurs when numFramesPlayed position is very small and the following:
529 // (1) In case 1, the time nowUs is computed before getTimestamp() is
530 // called and numFramesPlayedAt is greater than nowUs by time more
531 // than numFramesPlayed.
532 // (2) In case 3, using getPosition and adding mAudioTrack->latency()
533 // to numFramesPlayedAt, by a time amount greater than
534 // numFramesPlayed.
535 //
536 // Both of these are transitory conditions.
537 ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld "
538 "set to zero", (long long)durationUs);
539 durationUs = 0;
540 }
541 ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) "
542 "framesAt(%lld)",
543 (long long)durationUs, (long long)nowUs, numFramesPlayed,
544 (long long)numFramesPlayedAt);
545 return durationUs;
546 }
547
onDrainVideo_l()548 void MediaSync::onDrainVideo_l() {
549 if (!isPlaying()) {
550 return;
551 }
552
553 while (!mBufferItems.empty()) {
554 int64_t nowUs = ALooper::GetNowUs();
555 BufferItem *bufferItem = &*mBufferItems.begin();
556 int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
557 int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
558
559 // adjust video frame PTS based on vsync
560 itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000;
561 int64_t twoVsyncsUs = 2 * (mFrameScheduler->getVsyncPeriod() / 1000);
562
563 // post 2 display refreshes before rendering is due
564 if (itemRealUs <= nowUs + twoVsyncsUs) {
565 ALOGV("adjusting PTS from %lld to %lld",
566 (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs);
567 bufferItem->mTimestamp = itemRealUs * 1000;
568 bufferItem->mIsAutoTimestamp = false;
569
570 if (mHasAudio) {
571 if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
572 renderOneBufferItem_l(*bufferItem);
573 } else {
574 // too late.
575 returnBufferToInput_l(
576 bufferItem->mGraphicBuffer, bufferItem->mFence);
577 mFrameScheduler->restart();
578 }
579 } else {
580 // always render video buffer in video-only mode.
581 renderOneBufferItem_l(*bufferItem);
582
583 // smooth out videos >= 10fps
584 mMediaClock->updateAnchor(
585 itemMediaUs, nowUs, itemMediaUs + 100000);
586 }
587
588 mBufferItems.erase(mBufferItems.begin());
589 mNextBufferItemMediaUs = -1;
590 } else {
591 if (mNextBufferItemMediaUs == -1
592 || mNextBufferItemMediaUs > itemMediaUs) {
593 sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
594 msg->post(itemRealUs - nowUs - twoVsyncsUs);
595 mNextBufferItemMediaUs = itemMediaUs;
596 }
597 break;
598 }
599 }
600 }
601
onFrameAvailableFromInput()602 void MediaSync::onFrameAvailableFromInput() {
603 Mutex::Autolock lock(mMutex);
604
605 const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds
606
607 mReturnPendingInputFrame = false;
608
609 // If there are too many outstanding buffers, wait until a buffer is
610 // released back to the input in onBufferReleased.
611 // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers
612 while (mNumOutstandingBuffers > mMaxAcquiredBufferCount
613 && !mIsAbandoned && !mReturnPendingInputFrame) {
614 if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) {
615 ALOGI_IF(mPlaybackRate != 0.f, "still waiting to release a buffer before acquire");
616 }
617
618 // If the sync is abandoned while we are waiting, the release
619 // condition variable will be broadcast, and we should just return
620 // without attempting to do anything more (since the input queue will
621 // also be abandoned).
622 if (mIsAbandoned) {
623 return;
624 }
625 }
626
627 // Acquire and detach the buffer from the input.
628 BufferItem bufferItem;
629 status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */);
630 if (status != NO_ERROR) {
631 ALOGE("acquiring buffer from input failed (%d)", status);
632 return;
633 }
634 ++mNumOutstandingBuffers;
635
636 ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId());
637
638 status = mInput->detachBuffer(bufferItem.mBuf);
639 if (status != NO_ERROR) {
640 ALOGE("detaching buffer from input failed (%d)", status);
641 if (status == NO_INIT) {
642 // If the input has been abandoned, move on.
643 onAbandoned_l(true /* isInput */);
644 }
645 return;
646 }
647
648 if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
649 // Something is wrong since this buffer should be at our hands, bail.
650 ALOGE("received buffer multiple times from input");
651 mInput->consumerDisconnect();
652 onAbandoned_l(true /* isInput */);
653 return;
654 }
655 mBuffersFromInput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
656
657 // If flush happened while waiting for a buffer to be released, simply return it
658 // TRICKY: do it here after it is detached so that we don't have to cache mGraphicBuffer.
659 if (mReturnPendingInputFrame) {
660 mReturnPendingInputFrame = false;
661 returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
662 return;
663 }
664
665 mBufferItems.push_back(bufferItem);
666
667 if (mBufferItems.size() == 1) {
668 onDrainVideo_l();
669 }
670 }
671
renderOneBufferItem_l(const BufferItem & bufferItem)672 void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem) {
673 IGraphicBufferProducer::QueueBufferInput queueInput(
674 bufferItem.mTimestamp,
675 bufferItem.mIsAutoTimestamp,
676 bufferItem.mDataSpace,
677 bufferItem.mCrop,
678 static_cast<int32_t>(bufferItem.mScalingMode),
679 bufferItem.mTransform,
680 bufferItem.mIsDroppable,
681 bufferItem.mFence);
682
683 // Attach and queue the buffer to the output.
684 int slot;
685 mOutput->setGenerationNumber(bufferItem.mGraphicBuffer->getGenerationNumber());
686 status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer);
687 ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status);
688 if (status == NO_ERROR) {
689 IGraphicBufferProducer::QueueBufferOutput queueOutput;
690 status = mOutput->queueBuffer(slot, queueInput, &queueOutput);
691 ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status);
692 }
693
694 if (status != NO_ERROR) {
695 returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
696 if (status == NO_INIT) {
697 // If the output has been abandoned, move on.
698 onAbandoned_l(false /* isInput */);
699 }
700 return;
701 }
702
703 if (mBuffersSentToOutput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) {
704 // Something is wrong since this buffer should be held by output now, bail.
705 mInput->consumerDisconnect();
706 onAbandoned_l(true /* isInput */);
707 return;
708 }
709 mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer);
710
711 ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
712 }
713
onBufferReleasedByOutput(sp<IGraphicBufferProducer> & output)714 void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) {
715 Mutex::Autolock lock(mMutex);
716
717 if (output != mOutput) {
718 return; // This is not the current output, ignore.
719 }
720
721 sp<GraphicBuffer> buffer;
722 sp<Fence> fence;
723 status_t status = mOutput->detachNextBuffer(&buffer, &fence);
724 ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
725
726 if (status == NO_INIT) {
727 // If the output has been abandoned, we can't do anything else,
728 // since buffer is invalid.
729 onAbandoned_l(false /* isInput */);
730 return;
731 }
732
733 ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
734
735 // If we've been abandoned, we can't return the buffer to the input, so just
736 // move on.
737 if (mIsAbandoned) {
738 return;
739 }
740
741 ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId());
742 if (ix < 0) {
743 // The buffer is unknown, maybe leftover, ignore.
744 return;
745 }
746 mBuffersSentToOutput.removeItemsAt(ix);
747
748 returnBufferToInput_l(buffer, fence);
749 }
750
returnBufferToInput_l(const sp<GraphicBuffer> & buffer,const sp<Fence> & fence)751 void MediaSync::returnBufferToInput_l(
752 const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) {
753 ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId());
754 if (ix < 0) {
755 // The buffer is unknown, something is wrong, bail.
756 ALOGE("output returned unknown buffer");
757 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
758 onAbandoned_l(false /* isInput */);
759 return;
760 }
761 sp<GraphicBuffer> oldBuffer = mBuffersFromInput.valueAt(ix);
762 mBuffersFromInput.removeItemsAt(ix);
763
764 // Attach and release the buffer back to the input.
765 int consumerSlot;
766 status_t status = mInput->attachBuffer(&consumerSlot, oldBuffer);
767 ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
768 if (status == NO_ERROR) {
769 status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */,
770 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
771 ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
772 }
773
774 // Notify any waiting onFrameAvailable calls.
775 --mNumOutstandingBuffers;
776 mReleaseCondition.signal();
777
778 if (status == NO_ERROR) {
779 ALOGV("released buffer %#llx to input", (long long)oldBuffer->getId());
780 }
781 }
782
onAbandoned_l(bool isInput)783 void MediaSync::onAbandoned_l(bool isInput) {
784 ALOGE("the %s has abandoned me", (isInput ? "input" : "output"));
785 if (!mIsAbandoned) {
786 if (isInput) {
787 mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
788 } else {
789 mInput->consumerDisconnect();
790 }
791 mIsAbandoned = true;
792 }
793 mReleaseCondition.broadcast();
794 }
795
onMessageReceived(const sp<AMessage> & msg)796 void MediaSync::onMessageReceived(const sp<AMessage> &msg) {
797 switch (msg->what()) {
798 case kWhatDrainVideo:
799 {
800 Mutex::Autolock lock(mMutex);
801 if (mNextBufferItemMediaUs != -1) {
802 int64_t nowUs = ALooper::GetNowUs();
803 int64_t itemRealUs = getRealTime(mNextBufferItemMediaUs, nowUs);
804
805 // The message could arrive earlier than expected due to
806 // various reasons, e.g., media clock has been changed because
807 // of new anchor time or playback rate. In such cases, the
808 // message needs to be re-posted.
809 if (itemRealUs > nowUs) {
810 msg->post(itemRealUs - nowUs);
811 break;
812 }
813 }
814
815 onDrainVideo_l();
816 break;
817 }
818
819 default:
820 TRESPASS();
821 break;
822 }
823 }
824
InputListener(const sp<MediaSync> & sync)825 MediaSync::InputListener::InputListener(const sp<MediaSync> &sync)
826 : mSync(sync) {}
827
~InputListener()828 MediaSync::InputListener::~InputListener() {}
829
onFrameAvailable(const BufferItem &)830 void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) {
831 mSync->onFrameAvailableFromInput();
832 }
833
834 // We don't care about sideband streams, since we won't relay them.
onSidebandStreamChanged()835 void MediaSync::InputListener::onSidebandStreamChanged() {
836 ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly.");
837 }
838
839
binderDied(const wp<IBinder> &)840 void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) {
841 Mutex::Autolock lock(mSync->mMutex);
842 mSync->onAbandoned_l(true /* isInput */);
843 }
844
OutputListener(const sp<MediaSync> & sync,const sp<IGraphicBufferProducer> & output)845 MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync,
846 const sp<IGraphicBufferProducer> &output)
847 : mSync(sync),
848 mOutput(output) {}
849
~OutputListener()850 MediaSync::OutputListener::~OutputListener() {}
851
onBufferReleased()852 void MediaSync::OutputListener::onBufferReleased() {
853 mSync->onBufferReleasedByOutput(mOutput);
854 }
855
binderDied(const wp<IBinder> &)856 void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) {
857 Mutex::Autolock lock(mSync->mMutex);
858 mSync->onAbandoned_l(false /* isInput */);
859 }
860
861 } // namespace android
862