1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "NuPlayerRenderer"
19 #include <utils/Log.h>
20 
21 #include "AWakeLock.h"
22 #include "NuPlayerRenderer.h"
23 #include <algorithm>
24 #include <cutils/properties.h>
25 #include <media/stagefright/foundation/ADebug.h>
26 #include <media/stagefright/foundation/AMessage.h>
27 #include <media/stagefright/foundation/AUtils.h>
28 #include <media/stagefright/MediaClock.h>
29 #include <media/stagefright/MediaCodecConstants.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <media/stagefright/Utils.h>
34 #include <media/stagefright/VideoFrameScheduler.h>
35 #include <media/MediaCodecBuffer.h>
36 #include <utils/SystemClock.h>
37 
38 #include <inttypes.h>
39 
40 namespace android {
41 
42 /*
43  * Example of common configuration settings in shell script form
44 
45    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
46    adb shell setprop audio.offload.disable 1
47 
48    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
49    adb shell setprop audio.offload.video 1
50 
51    #Use audio callbacks for PCM data
52    adb shell setprop media.stagefright.audio.cbk 1
53 
54    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
55    adb shell setprop media.stagefright.audio.deep 1
56 
57    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
58    adb shell setprop media.stagefright.audio.sink 1000
59 
60  * These configurations take effect for the next track played (not the current track).
61  */
62 
getUseAudioCallbackSetting()63 static inline bool getUseAudioCallbackSetting() {
64     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
65 }
66 
getAudioSinkPcmMsSetting()67 static inline int32_t getAudioSinkPcmMsSetting() {
68     return property_get_int32(
69             "media.stagefright.audio.sink", 500 /* default_value */);
70 }
71 
72 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
73 // is closed to allow the audio DSP to power down.
74 static const int64_t kOffloadPauseMaxUs = 10000000LL;
75 
76 // Additional delay after teardown before releasing the wake lock to allow time for the audio path
77 // to be completely released
78 static const int64_t kWakelockReleaseDelayUs = 2000000LL;
79 
80 // Maximum allowed delay from AudioSink, 1.5 seconds.
81 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
82 
83 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
84 
85 // Default video frame display duration when only video exists.
86 // Used to set max media time in MediaClock.
87 static const int64_t kDefaultVideoFrameIntervalUs = 100000LL;
88 
89 // static
90 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
91         AUDIO_CHANNEL_NONE,
92         AUDIO_OUTPUT_FLAG_NONE,
93         AUDIO_FORMAT_INVALID,
94         0, // mNumChannels
95         0 // mSampleRate
96 };
97 
98 // static
99 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
100 
audioFormatFromEncoding(int32_t pcmEncoding)101 static audio_format_t constexpr audioFormatFromEncoding(int32_t pcmEncoding) {
102     switch (pcmEncoding) {
103     case kAudioEncodingPcmFloat:
104         return AUDIO_FORMAT_PCM_FLOAT;
105     case kAudioEncodingPcm16bit:
106         return AUDIO_FORMAT_PCM_16_BIT;
107     case kAudioEncodingPcm8bit:
108         return AUDIO_FORMAT_PCM_8_BIT; // TODO: do we want to support this?
109     default:
110         ALOGE("%s: Invalid encoding: %d", __func__, pcmEncoding);
111         return AUDIO_FORMAT_INVALID;
112     }
113 }
114 
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<MediaClock> & mediaClock,const sp<AMessage> & notify,uint32_t flags)115 NuPlayer::Renderer::Renderer(
116         const sp<MediaPlayerBase::AudioSink> &sink,
117         const sp<MediaClock> &mediaClock,
118         const sp<AMessage> &notify,
119         uint32_t flags)
120     : mAudioSink(sink),
121       mUseVirtualAudioSink(false),
122       mNotify(notify),
123       mFlags(flags),
124       mNumFramesWritten(0),
125       mDrainAudioQueuePending(false),
126       mDrainVideoQueuePending(false),
127       mAudioQueueGeneration(0),
128       mVideoQueueGeneration(0),
129       mAudioDrainGeneration(0),
130       mVideoDrainGeneration(0),
131       mAudioEOSGeneration(0),
132       mMediaClock(mediaClock),
133       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
134       mAudioFirstAnchorTimeMediaUs(-1),
135       mAudioAnchorTimeMediaUs(-1),
136       mAnchorTimeMediaUs(-1),
137       mAnchorNumFramesWritten(-1),
138       mVideoLateByUs(0LL),
139       mNextVideoTimeMediaUs(-1),
140       mHasAudio(false),
141       mHasVideo(false),
142       mNotifyCompleteAudio(false),
143       mNotifyCompleteVideo(false),
144       mSyncQueues(false),
145       mPaused(false),
146       mPauseDrainAudioAllowedUs(0),
147       mVideoSampleReceived(false),
148       mVideoRenderingStarted(false),
149       mVideoRenderingStartGeneration(0),
150       mAudioRenderingStartGeneration(0),
151       mRenderingDataDelivered(false),
152       mNextAudioClockUpdateTimeUs(-1),
153       mLastAudioMediaTimeUs(-1),
154       mAudioOffloadPauseTimeoutGeneration(0),
155       mAudioTornDown(false),
156       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
157       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
158       mTotalBuffersQueued(0),
159       mLastAudioBufferDrained(0),
160       mUseAudioCallback(false),
161       mWakeLock(new AWakeLock()),
162       mNeedVideoClearAnchor(false) {
163     CHECK(mediaClock != NULL);
164     mPlaybackRate = mPlaybackSettings.mSpeed;
165     mMediaClock->setPlaybackRate(mPlaybackRate);
166     (void)mSyncFlag.test_and_set();
167 }
168 
~Renderer()169 NuPlayer::Renderer::~Renderer() {
170     if (offloadingAudio()) {
171         mAudioSink->stop();
172         mAudioSink->flush();
173         mAudioSink->close();
174     }
175 
176     // Try to avoid racing condition in case callback is still on.
177     Mutex::Autolock autoLock(mLock);
178     if (mUseAudioCallback) {
179         flushQueue(&mAudioQueue);
180         flushQueue(&mVideoQueue);
181     }
182     mWakeLock.clear();
183     mVideoScheduler.clear();
184     mNotify.clear();
185     mAudioSink.clear();
186 }
187 
queueBuffer(bool audio,const sp<MediaCodecBuffer> & buffer,const sp<AMessage> & notifyConsumed)188 void NuPlayer::Renderer::queueBuffer(
189         bool audio,
190         const sp<MediaCodecBuffer> &buffer,
191         const sp<AMessage> &notifyConsumed) {
192     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
193     msg->setInt32("queueGeneration", getQueueGeneration(audio));
194     msg->setInt32("audio", static_cast<int32_t>(audio));
195     msg->setObject("buffer", buffer);
196     msg->setMessage("notifyConsumed", notifyConsumed);
197     msg->post();
198 }
199 
queueEOS(bool audio,status_t finalResult)200 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
201     CHECK_NE(finalResult, (status_t)OK);
202 
203     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
204     msg->setInt32("queueGeneration", getQueueGeneration(audio));
205     msg->setInt32("audio", static_cast<int32_t>(audio));
206     msg->setInt32("finalResult", finalResult);
207     msg->post();
208 }
209 
setPlaybackSettings(const AudioPlaybackRate & rate)210 status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
211     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
212     writeToAMessage(msg, rate);
213     sp<AMessage> response;
214     status_t err = msg->postAndAwaitResponse(&response);
215     if (err == OK && response != NULL) {
216         CHECK(response->findInt32("err", &err));
217     }
218     return err;
219 }
220 
onConfigPlayback(const AudioPlaybackRate & rate)221 status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
222     if (rate.mSpeed == 0.f) {
223         onPause();
224         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
225         // have to correspond to the any non-0 speed (e.g old speed). Keep
226         // settings nonetheless, using the old speed, in case audiosink changes.
227         AudioPlaybackRate newRate = rate;
228         newRate.mSpeed = mPlaybackSettings.mSpeed;
229         mPlaybackSettings = newRate;
230         return OK;
231     }
232 
233     if (mAudioSink != NULL && mAudioSink->ready()) {
234         status_t err = mAudioSink->setPlaybackRate(rate);
235         if (err != OK) {
236             return err;
237         }
238     }
239 
240     if (!mHasAudio && mHasVideo) {
241         mNeedVideoClearAnchor = true;
242     }
243     mPlaybackSettings = rate;
244     mPlaybackRate = rate.mSpeed;
245     mMediaClock->setPlaybackRate(mPlaybackRate);
246     return OK;
247 }
248 
getPlaybackSettings(AudioPlaybackRate * rate)249 status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
250     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
251     sp<AMessage> response;
252     status_t err = msg->postAndAwaitResponse(&response);
253     if (err == OK && response != NULL) {
254         CHECK(response->findInt32("err", &err));
255         if (err == OK) {
256             readFromAMessage(response, rate);
257         }
258     }
259     return err;
260 }
261 
onGetPlaybackSettings(AudioPlaybackRate * rate)262 status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
263     if (mAudioSink != NULL && mAudioSink->ready()) {
264         status_t err = mAudioSink->getPlaybackRate(rate);
265         if (err == OK) {
266             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
267                 ALOGW("correcting mismatch in internal/external playback rate");
268             }
269             // get playback settings used by audiosink, as it may be
270             // slightly off due to audiosink not taking small changes.
271             mPlaybackSettings = *rate;
272             if (mPaused) {
273                 rate->mSpeed = 0.f;
274             }
275         }
276         return err;
277     }
278     *rate = mPlaybackSettings;
279     return OK;
280 }
281 
setSyncSettings(const AVSyncSettings & sync,float videoFpsHint)282 status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
283     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
284     writeToAMessage(msg, sync, videoFpsHint);
285     sp<AMessage> response;
286     status_t err = msg->postAndAwaitResponse(&response);
287     if (err == OK && response != NULL) {
288         CHECK(response->findInt32("err", &err));
289     }
290     return err;
291 }
292 
onConfigSync(const AVSyncSettings & sync,float videoFpsHint __unused)293 status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
294     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
295         return BAD_VALUE;
296     }
297     // TODO: support sync sources
298     return INVALID_OPERATION;
299 }
300 
getSyncSettings(AVSyncSettings * sync,float * videoFps)301 status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
302     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
303     sp<AMessage> response;
304     status_t err = msg->postAndAwaitResponse(&response);
305     if (err == OK && response != NULL) {
306         CHECK(response->findInt32("err", &err));
307         if (err == OK) {
308             readFromAMessage(response, sync, videoFps);
309         }
310     }
311     return err;
312 }
313 
onGetSyncSettings(AVSyncSettings * sync,float * videoFps)314 status_t NuPlayer::Renderer::onGetSyncSettings(
315         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
316     *sync = mSyncSettings;
317     *videoFps = -1.f;
318     return OK;
319 }
320 
flush(bool audio,bool notifyComplete)321 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
322     {
323         Mutex::Autolock autoLock(mLock);
324         if (audio) {
325             mNotifyCompleteAudio |= notifyComplete;
326             clearAudioFirstAnchorTime_l();
327             ++mAudioQueueGeneration;
328             ++mAudioDrainGeneration;
329         } else {
330             mNotifyCompleteVideo |= notifyComplete;
331             ++mVideoQueueGeneration;
332             ++mVideoDrainGeneration;
333             mNextVideoTimeMediaUs = -1;
334         }
335 
336         mVideoLateByUs = 0;
337         mSyncQueues = false;
338     }
339 
340     // Wait until the current job in the message queue is done, to make sure
341     // buffer processing from the old generation is finished. After the current
342     // job is finished, access to buffers are protected by generation.
343     Mutex::Autolock syncLock(mSyncLock);
344     int64_t syncCount = mSyncCount;
345     mSyncFlag.clear();
346 
347     // Make sure message queue is not empty after mSyncFlag is cleared.
348     sp<AMessage> msg = new AMessage(kWhatFlush, this);
349     msg->setInt32("audio", static_cast<int32_t>(audio));
350     msg->post();
351 
352     int64_t uptimeMs = uptimeMillis();
353     while (mSyncCount == syncCount) {
354         (void)mSyncCondition.waitRelative(mSyncLock, ms2ns(1000));
355         if (uptimeMillis() - uptimeMs > 1000) {
356             ALOGW("flush(): no wake-up from sync point for 1s; stop waiting to "
357                   "prevent being stuck indefinitely.");
358             break;
359         }
360     }
361 }
362 
signalTimeDiscontinuity()363 void NuPlayer::Renderer::signalTimeDiscontinuity() {
364 }
365 
signalDisableOffloadAudio()366 void NuPlayer::Renderer::signalDisableOffloadAudio() {
367     (new AMessage(kWhatDisableOffloadAudio, this))->post();
368 }
369 
signalEnableOffloadAudio()370 void NuPlayer::Renderer::signalEnableOffloadAudio() {
371     (new AMessage(kWhatEnableOffloadAudio, this))->post();
372 }
373 
pause()374 void NuPlayer::Renderer::pause() {
375     (new AMessage(kWhatPause, this))->post();
376 }
377 
resume()378 void NuPlayer::Renderer::resume() {
379     (new AMessage(kWhatResume, this))->post();
380 }
381 
setVideoFrameRate(float fps)382 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
383     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
384     msg->setFloat("frame-rate", fps);
385     msg->post();
386 }
387 
388 // Called on any threads without mLock acquired.
getCurrentPosition(int64_t * mediaUs)389 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
390     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
391     if (result == OK) {
392         return result;
393     }
394 
395     // MediaClock has not started yet. Try to start it if possible.
396     {
397         Mutex::Autolock autoLock(mLock);
398         if (mAudioFirstAnchorTimeMediaUs == -1) {
399             return result;
400         }
401 
402         AudioTimestamp ts;
403         status_t res = mAudioSink->getTimestamp(ts);
404         if (res != OK) {
405             return result;
406         }
407 
408         // AudioSink has rendered some frames.
409         int64_t nowUs = ALooper::GetNowUs();
410         int64_t playedOutDurationUs = mAudioSink->getPlayedOutDurationUs(nowUs);
411         if (playedOutDurationUs == 0) {
412             *mediaUs = mAudioFirstAnchorTimeMediaUs;
413             return OK;
414         }
415         int64_t nowMediaUs = playedOutDurationUs + mAudioFirstAnchorTimeMediaUs;
416         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
417     }
418 
419     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
420 }
421 
clearAudioFirstAnchorTime_l()422 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
423     mAudioFirstAnchorTimeMediaUs = -1;
424     mMediaClock->setStartingTimeMedia(-1);
425 }
426 
setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs)427 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
428     if (mAudioFirstAnchorTimeMediaUs == -1) {
429         mAudioFirstAnchorTimeMediaUs = mediaUs;
430         mMediaClock->setStartingTimeMedia(mediaUs);
431     }
432 }
433 
434 // Called on renderer looper.
clearAnchorTime()435 void NuPlayer::Renderer::clearAnchorTime() {
436     mMediaClock->clearAnchor();
437     mAudioAnchorTimeMediaUs = -1;
438     mAnchorTimeMediaUs = -1;
439     mAnchorNumFramesWritten = -1;
440 }
441 
setVideoLateByUs(int64_t lateUs)442 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
443     Mutex::Autolock autoLock(mLock);
444     mVideoLateByUs = lateUs;
445 }
446 
getVideoLateByUs()447 int64_t NuPlayer::Renderer::getVideoLateByUs() {
448     Mutex::Autolock autoLock(mLock);
449     return mVideoLateByUs;
450 }
451 
openAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool * isOffloaded,bool isStreaming)452 status_t NuPlayer::Renderer::openAudioSink(
453         const sp<AMessage> &format,
454         bool offloadOnly,
455         bool hasVideo,
456         uint32_t flags,
457         bool *isOffloaded,
458         bool isStreaming) {
459     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
460     msg->setMessage("format", format);
461     msg->setInt32("offload-only", offloadOnly);
462     msg->setInt32("has-video", hasVideo);
463     msg->setInt32("flags", flags);
464     msg->setInt32("isStreaming", isStreaming);
465 
466     sp<AMessage> response;
467     status_t postStatus = msg->postAndAwaitResponse(&response);
468 
469     int32_t err;
470     if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
471         err = INVALID_OPERATION;
472     } else if (err == OK && isOffloaded != NULL) {
473         int32_t offload;
474         CHECK(response->findInt32("offload", &offload));
475         *isOffloaded = (offload != 0);
476     }
477     return err;
478 }
479 
closeAudioSink()480 void NuPlayer::Renderer::closeAudioSink() {
481     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
482 
483     sp<AMessage> response;
484     msg->postAndAwaitResponse(&response);
485 }
486 
dump(AString & logString)487 void NuPlayer::Renderer::dump(AString& logString) {
488     Mutex::Autolock autoLock(mLock);
489     logString.append("paused(");
490     logString.append(mPaused);
491     logString.append("), offloading(");
492     logString.append(offloadingAudio());
493     logString.append("), wakelock(acquired=");
494     mWakelockAcquireEvent.dump(logString);
495     logString.append(", timeout=");
496     mWakelockTimeoutEvent.dump(logString);
497     logString.append(", release=");
498     mWakelockReleaseEvent.dump(logString);
499     logString.append(", cancel=");
500     mWakelockCancelEvent.dump(logString);
501     logString.append(")");
502 }
503 
changeAudioFormat(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming,const sp<AMessage> & notify)504 void NuPlayer::Renderer::changeAudioFormat(
505         const sp<AMessage> &format,
506         bool offloadOnly,
507         bool hasVideo,
508         uint32_t flags,
509         bool isStreaming,
510         const sp<AMessage> &notify) {
511     sp<AMessage> meta = new AMessage;
512     meta->setMessage("format", format);
513     meta->setInt32("offload-only", offloadOnly);
514     meta->setInt32("has-video", hasVideo);
515     meta->setInt32("flags", flags);
516     meta->setInt32("isStreaming", isStreaming);
517 
518     sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
519     msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
520     msg->setMessage("notify", notify);
521     msg->setMessage("meta", meta);
522     msg->post();
523 }
524 
onMessageReceived(const sp<AMessage> & msg)525 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
526     switch (msg->what()) {
527         case kWhatOpenAudioSink:
528         {
529             sp<AMessage> format;
530             CHECK(msg->findMessage("format", &format));
531 
532             int32_t offloadOnly;
533             CHECK(msg->findInt32("offload-only", &offloadOnly));
534 
535             int32_t hasVideo;
536             CHECK(msg->findInt32("has-video", &hasVideo));
537 
538             uint32_t flags;
539             CHECK(msg->findInt32("flags", (int32_t *)&flags));
540 
541             uint32_t isStreaming;
542             CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
543 
544             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
545 
546             sp<AMessage> response = new AMessage;
547             response->setInt32("err", err);
548             response->setInt32("offload", offloadingAudio());
549 
550             sp<AReplyToken> replyID;
551             CHECK(msg->senderAwaitsResponse(&replyID));
552             response->postReply(replyID);
553 
554             break;
555         }
556 
557         case kWhatCloseAudioSink:
558         {
559             sp<AReplyToken> replyID;
560             CHECK(msg->senderAwaitsResponse(&replyID));
561 
562             onCloseAudioSink();
563 
564             sp<AMessage> response = new AMessage;
565             response->postReply(replyID);
566             break;
567         }
568 
569         case kWhatStopAudioSink:
570         {
571             mAudioSink->stop();
572             break;
573         }
574 
575         case kWhatChangeAudioFormat:
576         {
577             int32_t queueGeneration;
578             CHECK(msg->findInt32("queueGeneration", &queueGeneration));
579 
580             sp<AMessage> notify;
581             CHECK(msg->findMessage("notify", &notify));
582 
583             if (offloadingAudio()) {
584                 ALOGW("changeAudioFormat should NOT be called in offload mode");
585                 notify->setInt32("err", INVALID_OPERATION);
586                 notify->post();
587                 break;
588             }
589 
590             sp<AMessage> meta;
591             CHECK(msg->findMessage("meta", &meta));
592 
593             if (queueGeneration != getQueueGeneration(true /* audio */)
594                     || mAudioQueue.empty()) {
595                 onChangeAudioFormat(meta, notify);
596                 break;
597             }
598 
599             QueueEntry entry;
600             entry.mNotifyConsumed = notify;
601             entry.mMeta = meta;
602 
603             Mutex::Autolock autoLock(mLock);
604             mAudioQueue.push_back(entry);
605             postDrainAudioQueue_l();
606 
607             break;
608         }
609 
610         case kWhatDrainAudioQueue:
611         {
612             mDrainAudioQueuePending = false;
613 
614             int32_t generation;
615             CHECK(msg->findInt32("drainGeneration", &generation));
616             if (generation != getDrainGeneration(true /* audio */)) {
617                 break;
618             }
619 
620             if (onDrainAudioQueue()) {
621                 uint32_t numFramesPlayed;
622                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
623                          (status_t)OK);
624 
625                 // Handle AudioTrack race when start is immediately called after flush.
626                 uint32_t numFramesPendingPlayout =
627                     (mNumFramesWritten > numFramesPlayed ?
628                         mNumFramesWritten - numFramesPlayed : 0);
629 
630                 // This is how long the audio sink will have data to
631                 // play back.
632                 int64_t delayUs =
633                     mAudioSink->msecsPerFrame()
634                         * numFramesPendingPlayout * 1000LL;
635                 if (mPlaybackRate > 1.0f) {
636                     delayUs /= mPlaybackRate;
637                 }
638 
639                 // Let's give it more data after about half that time
640                 // has elapsed.
641                 delayUs /= 2;
642                 // check the buffer size to estimate maximum delay permitted.
643                 const int64_t maxDrainDelayUs = std::max(
644                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
645                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
646                         (long long)delayUs, (long long)maxDrainDelayUs);
647                 Mutex::Autolock autoLock(mLock);
648                 postDrainAudioQueue_l(delayUs);
649             }
650             break;
651         }
652 
653         case kWhatDrainVideoQueue:
654         {
655             int32_t generation;
656             CHECK(msg->findInt32("drainGeneration", &generation));
657             if (generation != getDrainGeneration(false /* audio */)) {
658                 break;
659             }
660 
661             mDrainVideoQueuePending = false;
662 
663             onDrainVideoQueue();
664 
665             postDrainVideoQueue();
666             break;
667         }
668 
669         case kWhatPostDrainVideoQueue:
670         {
671             int32_t generation;
672             CHECK(msg->findInt32("drainGeneration", &generation));
673             if (generation != getDrainGeneration(false /* audio */)) {
674                 break;
675             }
676 
677             mDrainVideoQueuePending = false;
678             postDrainVideoQueue();
679             break;
680         }
681 
682         case kWhatQueueBuffer:
683         {
684             onQueueBuffer(msg);
685             break;
686         }
687 
688         case kWhatQueueEOS:
689         {
690             onQueueEOS(msg);
691             break;
692         }
693 
694         case kWhatEOS:
695         {
696             int32_t generation;
697             CHECK(msg->findInt32("audioEOSGeneration", &generation));
698             if (generation != mAudioEOSGeneration) {
699                 break;
700             }
701             status_t finalResult;
702             CHECK(msg->findInt32("finalResult", &finalResult));
703             notifyEOS(true /* audio */, finalResult);
704             break;
705         }
706 
707         case kWhatConfigPlayback:
708         {
709             sp<AReplyToken> replyID;
710             CHECK(msg->senderAwaitsResponse(&replyID));
711             AudioPlaybackRate rate;
712             readFromAMessage(msg, &rate);
713             status_t err = onConfigPlayback(rate);
714             sp<AMessage> response = new AMessage;
715             response->setInt32("err", err);
716             response->postReply(replyID);
717             break;
718         }
719 
720         case kWhatGetPlaybackSettings:
721         {
722             sp<AReplyToken> replyID;
723             CHECK(msg->senderAwaitsResponse(&replyID));
724             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
725             status_t err = onGetPlaybackSettings(&rate);
726             sp<AMessage> response = new AMessage;
727             if (err == OK) {
728                 writeToAMessage(response, rate);
729             }
730             response->setInt32("err", err);
731             response->postReply(replyID);
732             break;
733         }
734 
735         case kWhatConfigSync:
736         {
737             sp<AReplyToken> replyID;
738             CHECK(msg->senderAwaitsResponse(&replyID));
739             AVSyncSettings sync;
740             float videoFpsHint;
741             readFromAMessage(msg, &sync, &videoFpsHint);
742             status_t err = onConfigSync(sync, videoFpsHint);
743             sp<AMessage> response = new AMessage;
744             response->setInt32("err", err);
745             response->postReply(replyID);
746             break;
747         }
748 
749         case kWhatGetSyncSettings:
750         {
751             sp<AReplyToken> replyID;
752             CHECK(msg->senderAwaitsResponse(&replyID));
753 
754             ALOGV("kWhatGetSyncSettings");
755             AVSyncSettings sync;
756             float videoFps = -1.f;
757             status_t err = onGetSyncSettings(&sync, &videoFps);
758             sp<AMessage> response = new AMessage;
759             if (err == OK) {
760                 writeToAMessage(response, sync, videoFps);
761             }
762             response->setInt32("err", err);
763             response->postReply(replyID);
764             break;
765         }
766 
767         case kWhatFlush:
768         {
769             onFlush(msg);
770             break;
771         }
772 
773         case kWhatDisableOffloadAudio:
774         {
775             onDisableOffloadAudio();
776             break;
777         }
778 
779         case kWhatEnableOffloadAudio:
780         {
781             onEnableOffloadAudio();
782             break;
783         }
784 
785         case kWhatPause:
786         {
787             onPause();
788             break;
789         }
790 
791         case kWhatResume:
792         {
793             onResume();
794             break;
795         }
796 
797         case kWhatSetVideoFrameRate:
798         {
799             float fps;
800             CHECK(msg->findFloat("frame-rate", &fps));
801             onSetVideoFrameRate(fps);
802             break;
803         }
804 
805         case kWhatAudioTearDown:
806         {
807             int32_t reason;
808             CHECK(msg->findInt32("reason", &reason));
809 
810             onAudioTearDown((AudioTearDownReason)reason);
811             break;
812         }
813 
814         case kWhatAudioOffloadPauseTimeout:
815         {
816             int32_t generation;
817             CHECK(msg->findInt32("drainGeneration", &generation));
818             mWakelockTimeoutEvent.updateValues(
819                     uptimeMillis(),
820                     generation,
821                     mAudioOffloadPauseTimeoutGeneration);
822             if (generation != mAudioOffloadPauseTimeoutGeneration) {
823                 break;
824             }
825             ALOGV("Audio Offload tear down due to pause timeout.");
826             onAudioTearDown(kDueToTimeout);
827             sp<AMessage> newMsg = new AMessage(kWhatReleaseWakeLock, this);
828             newMsg->setInt32("drainGeneration", generation);
829             newMsg->post(kWakelockReleaseDelayUs);
830             break;
831         }
832 
833         case kWhatReleaseWakeLock:
834         {
835             int32_t generation;
836             CHECK(msg->findInt32("drainGeneration", &generation));
837             mWakelockReleaseEvent.updateValues(
838                 uptimeMillis(),
839                 generation,
840                 mAudioOffloadPauseTimeoutGeneration);
841             if (generation != mAudioOffloadPauseTimeoutGeneration) {
842                 break;
843             }
844             ALOGV("releasing audio offload pause wakelock.");
845             mWakeLock->release();
846             break;
847         }
848 
849         default:
850             TRESPASS();
851             break;
852     }
853     if (!mSyncFlag.test_and_set()) {
854         Mutex::Autolock syncLock(mSyncLock);
855         ++mSyncCount;
856         mSyncCondition.broadcast();
857     }
858 }
859 
postDrainAudioQueue_l(int64_t delayUs)860 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
861     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
862         return;
863     }
864 
865     if (mAudioQueue.empty()) {
866         return;
867     }
868 
869     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
870     if (mPaused) {
871         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
872         if (diffUs > delayUs) {
873             delayUs = diffUs;
874         }
875     }
876 
877     mDrainAudioQueuePending = true;
878     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
879     msg->setInt32("drainGeneration", mAudioDrainGeneration);
880     msg->post(delayUs);
881 }
882 
prepareForMediaRenderingStart_l()883 void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
884     mAudioRenderingStartGeneration = mAudioDrainGeneration;
885     mVideoRenderingStartGeneration = mVideoDrainGeneration;
886     mRenderingDataDelivered = false;
887 }
888 
notifyIfMediaRenderingStarted_l()889 void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
890     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
891         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
892         mRenderingDataDelivered = true;
893         if (mPaused) {
894             return;
895         }
896         mVideoRenderingStartGeneration = -1;
897         mAudioRenderingStartGeneration = -1;
898 
899         sp<AMessage> notify = mNotify->dup();
900         notify->setInt32("what", kWhatMediaRenderingStart);
901         notify->post();
902     }
903 }
904 
905 // static
AudioSinkCallback(MediaPlayerBase::AudioSink *,void * buffer,size_t size,void * cookie,MediaPlayerBase::AudioSink::cb_event_t event)906 size_t NuPlayer::Renderer::AudioSinkCallback(
907         MediaPlayerBase::AudioSink * /* audioSink */,
908         void *buffer,
909         size_t size,
910         void *cookie,
911         MediaPlayerBase::AudioSink::cb_event_t event) {
912     NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
913 
914     switch (event) {
915         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
916         {
917             return me->fillAudioBuffer(buffer, size);
918             break;
919         }
920 
921         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
922         {
923             ALOGV("AudioSink::CB_EVENT_STREAM_END");
924             me->notifyEOSCallback();
925             break;
926         }
927 
928         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
929         {
930             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
931             me->notifyAudioTearDown(kDueToError);
932             break;
933         }
934     }
935 
936     return 0;
937 }
938 
notifyEOSCallback()939 void NuPlayer::Renderer::notifyEOSCallback() {
940     Mutex::Autolock autoLock(mLock);
941 
942     if (!mUseAudioCallback) {
943         return;
944     }
945 
946     notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
947 }
948 
fillAudioBuffer(void * buffer,size_t size)949 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
950     Mutex::Autolock autoLock(mLock);
951 
952     if (!mUseAudioCallback) {
953         return 0;
954     }
955 
956     bool hasEOS = false;
957 
958     size_t sizeCopied = 0;
959     bool firstEntry = true;
960     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
961     while (sizeCopied < size && !mAudioQueue.empty()) {
962         entry = &*mAudioQueue.begin();
963 
964         if (entry->mBuffer == NULL) { // EOS
965             hasEOS = true;
966             mAudioQueue.erase(mAudioQueue.begin());
967             break;
968         }
969 
970         if (firstEntry && entry->mOffset == 0) {
971             firstEntry = false;
972             int64_t mediaTimeUs;
973             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
974             if (mediaTimeUs < 0) {
975                 ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
976                        mediaTimeUs / 1E6);
977                 mediaTimeUs = 0;
978             }
979             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
980             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
981         }
982 
983         size_t copy = entry->mBuffer->size() - entry->mOffset;
984         size_t sizeRemaining = size - sizeCopied;
985         if (copy > sizeRemaining) {
986             copy = sizeRemaining;
987         }
988 
989         memcpy((char *)buffer + sizeCopied,
990                entry->mBuffer->data() + entry->mOffset,
991                copy);
992 
993         entry->mOffset += copy;
994         if (entry->mOffset == entry->mBuffer->size()) {
995             entry->mNotifyConsumed->post();
996             mAudioQueue.erase(mAudioQueue.begin());
997             entry = NULL;
998         }
999         sizeCopied += copy;
1000 
1001         notifyIfMediaRenderingStarted_l();
1002     }
1003 
1004     if (mAudioFirstAnchorTimeMediaUs >= 0) {
1005         int64_t nowUs = ALooper::GetNowUs();
1006         int64_t nowMediaUs =
1007             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
1008         // we don't know how much data we are queueing for offloaded tracks.
1009         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
1010     }
1011 
1012     // for non-offloaded audio, we need to compute the frames written because
1013     // there is no EVENT_STREAM_END notification. The frames written gives
1014     // an estimate on the pending played out duration.
1015     if (!offloadingAudio()) {
1016         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
1017     }
1018 
1019     if (hasEOS) {
1020         (new AMessage(kWhatStopAudioSink, this))->post();
1021         // As there is currently no EVENT_STREAM_END callback notification for
1022         // non-offloaded audio tracks, we need to post the EOS ourselves.
1023         if (!offloadingAudio()) {
1024             int64_t postEOSDelayUs = 0;
1025             if (mAudioSink->needsTrailingPadding()) {
1026                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1027             }
1028             ALOGV("fillAudioBuffer: notifyEOS_l "
1029                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
1030                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
1031             notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1032         }
1033     }
1034     return sizeCopied;
1035 }
1036 
drainAudioQueueUntilLastEOS()1037 void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
1038     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
1039     bool foundEOS = false;
1040     while (it != mAudioQueue.end()) {
1041         int32_t eos;
1042         QueueEntry *entry = &*it++;
1043         if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
1044                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
1045             itEOS = it;
1046             foundEOS = true;
1047         }
1048     }
1049 
1050     if (foundEOS) {
1051         // post all replies before EOS and drop the samples
1052         for (it = mAudioQueue.begin(); it != itEOS; it++) {
1053             if (it->mBuffer == nullptr) {
1054                 if (it->mNotifyConsumed == nullptr) {
1055                     // delay doesn't matter as we don't even have an AudioTrack
1056                     notifyEOS(true /* audio */, it->mFinalResult);
1057                 } else {
1058                     // TAG for re-opening audio sink.
1059                     onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
1060                 }
1061             } else {
1062                 it->mNotifyConsumed->post();
1063             }
1064         }
1065         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
1066     }
1067 }
1068 
onDrainAudioQueue()1069 bool NuPlayer::Renderer::onDrainAudioQueue() {
1070     // do not drain audio during teardown as queued buffers may be invalid.
1071     if (mAudioTornDown) {
1072         return false;
1073     }
1074     // TODO: This call to getPosition checks if AudioTrack has been created
1075     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
1076     // CHECKs on getPosition will fail.
1077     // We still need to figure out why AudioTrack is not created when
1078     // this function is called. One possible reason could be leftover
1079     // audio. Another possible place is to check whether decoder
1080     // has received INFO_FORMAT_CHANGED as the first buffer since
1081     // AudioSink is opened there, and possible interactions with flush
1082     // immediately after start. Investigate error message
1083     // "vorbis_dsp_synthesis returned -135", along with RTSP.
1084     uint32_t numFramesPlayed;
1085     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
1086         // When getPosition fails, renderer will not reschedule the draining
1087         // unless new samples are queued.
1088         // If we have pending EOS (or "eos" marker for discontinuities), we need
1089         // to post these now as NuPlayerDecoder might be waiting for it.
1090         drainAudioQueueUntilLastEOS();
1091 
1092         ALOGW("onDrainAudioQueue(): audio sink is not ready");
1093         return false;
1094     }
1095 
1096 #if 0
1097     ssize_t numFramesAvailableToWrite =
1098         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
1099 
1100     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
1101         ALOGI("audio sink underrun");
1102     } else {
1103         ALOGV("audio queue has %d frames left to play",
1104              mAudioSink->frameCount() - numFramesAvailableToWrite);
1105     }
1106 #endif
1107 
1108     uint32_t prevFramesWritten = mNumFramesWritten;
1109     while (!mAudioQueue.empty()) {
1110         QueueEntry *entry = &*mAudioQueue.begin();
1111 
1112         if (entry->mBuffer == NULL) {
1113             if (entry->mNotifyConsumed != nullptr) {
1114                 // TAG for re-open audio sink.
1115                 onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1116                 mAudioQueue.erase(mAudioQueue.begin());
1117                 continue;
1118             }
1119 
1120             // EOS
1121             if (mPaused) {
1122                 // Do not notify EOS when paused.
1123                 // This is needed to avoid switch to next clip while in pause.
1124                 ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
1125                 return false;
1126             }
1127 
1128             int64_t postEOSDelayUs = 0;
1129             if (mAudioSink->needsTrailingPadding()) {
1130                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1131             }
1132             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1133             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1134 
1135             mAudioQueue.erase(mAudioQueue.begin());
1136             entry = NULL;
1137             if (mAudioSink->needsTrailingPadding()) {
1138                 // If we're not in gapless playback (i.e. through setNextPlayer), we
1139                 // need to stop the track here, because that will play out the last
1140                 // little bit at the end of the file. Otherwise short files won't play.
1141                 mAudioSink->stop();
1142                 mNumFramesWritten = 0;
1143             }
1144             return false;
1145         }
1146 
1147         mLastAudioBufferDrained = entry->mBufferOrdinal;
1148 
1149         // ignore 0-sized buffer which could be EOS marker with no data
1150         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
1151             int64_t mediaTimeUs;
1152             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1153             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
1154                     mediaTimeUs / 1E6);
1155             onNewAudioMediaTime(mediaTimeUs);
1156         }
1157 
1158         size_t copy = entry->mBuffer->size() - entry->mOffset;
1159 
1160         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
1161                                             copy, false /* blocking */);
1162         if (written < 0) {
1163             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
1164             if (written == WOULD_BLOCK) {
1165                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
1166             } else {
1167                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
1168                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
1169                 // true, in which case the NuPlayer will handle the reconnect.
1170                 notifyAudioTearDown(kDueToError);
1171             }
1172             break;
1173         }
1174 
1175         entry->mOffset += written;
1176         size_t remainder = entry->mBuffer->size() - entry->mOffset;
1177         if ((ssize_t)remainder < mAudioSink->frameSize()) {
1178             if (remainder > 0) {
1179                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
1180                         remainder);
1181                 entry->mOffset += remainder;
1182                 copy -= remainder;
1183             }
1184 
1185             entry->mNotifyConsumed->post();
1186             mAudioQueue.erase(mAudioQueue.begin());
1187 
1188             entry = NULL;
1189         }
1190 
1191         size_t copiedFrames = written / mAudioSink->frameSize();
1192         mNumFramesWritten += copiedFrames;
1193 
1194         {
1195             Mutex::Autolock autoLock(mLock);
1196             int64_t maxTimeMedia;
1197             maxTimeMedia =
1198                 mAnchorTimeMediaUs +
1199                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
1200                                 * 1000LL * mAudioSink->msecsPerFrame());
1201             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
1202 
1203             notifyIfMediaRenderingStarted_l();
1204         }
1205 
1206         if (written != (ssize_t)copy) {
1207             // A short count was received from AudioSink::write()
1208             //
1209             // AudioSink write is called in non-blocking mode.
1210             // It may return with a short count when:
1211             //
1212             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
1213             //    discarded.
1214             // 2) The data to be copied exceeds the available buffer in AudioSink.
1215             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
1216             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
1217 
1218             // (Case 1)
1219             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
1220             // needs to fail, as we should not carry over fractional frames between calls.
1221             CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
1222 
1223             // (Case 2, 3, 4)
1224             // Return early to the caller.
1225             // Beware of calling immediately again as this may busy-loop if you are not careful.
1226             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
1227             break;
1228         }
1229     }
1230 
1231     // calculate whether we need to reschedule another write.
1232     bool reschedule = !mAudioQueue.empty()
1233             && (!mPaused
1234                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
1235     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
1236     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
1237     return reschedule;
1238 }
1239 
getDurationUsIfPlayedAtSampleRate(uint32_t numFrames)1240 int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
1241     int32_t sampleRate = offloadingAudio() ?
1242             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
1243     if (sampleRate == 0) {
1244         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
1245         return 0;
1246     }
1247 
1248     return (int64_t)(numFrames * 1000000LL / sampleRate);
1249 }
1250 
1251 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
getPendingAudioPlayoutDurationUs(int64_t nowUs)1252 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
1253     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1254     if (mUseVirtualAudioSink) {
1255         int64_t nowUs = ALooper::GetNowUs();
1256         int64_t mediaUs;
1257         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
1258             return 0LL;
1259         } else {
1260             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
1261         }
1262     }
1263 
1264     const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
1265     int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
1266     if (pendingUs < 0) {
1267         // This shouldn't happen unless the timestamp is stale.
1268         ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
1269                 "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
1270                 __func__, (long long)pendingUs,
1271                 (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
1272         pendingUs = 0;
1273     }
1274     return pendingUs;
1275 }
1276 
getRealTimeUs(int64_t mediaTimeUs,int64_t nowUs)1277 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
1278     int64_t realUs;
1279     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
1280         // If failed to get current position, e.g. due to audio clock is
1281         // not ready, then just play out video immediately without delay.
1282         return nowUs;
1283     }
1284     return realUs;
1285 }
1286 
onNewAudioMediaTime(int64_t mediaTimeUs)1287 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
1288     Mutex::Autolock autoLock(mLock);
1289     // TRICKY: vorbis decoder generates multiple frames with the same
1290     // timestamp, so only update on the first frame with a given timestamp
1291     if (mediaTimeUs == mAudioAnchorTimeMediaUs) {
1292         return;
1293     }
1294     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
1295 
1296     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
1297     if (mNextAudioClockUpdateTimeUs == -1) {
1298         AudioTimestamp ts;
1299         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
1300             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
1301         }
1302     }
1303     int64_t nowUs = ALooper::GetNowUs();
1304     if (mNextAudioClockUpdateTimeUs >= 0) {
1305         if (nowUs >= mNextAudioClockUpdateTimeUs) {
1306             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
1307             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
1308             mUseVirtualAudioSink = false;
1309             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
1310         }
1311     } else {
1312         int64_t unused;
1313         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
1314                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
1315                         > kMaxAllowedAudioSinkDelayUs)) {
1316             // Enough data has been sent to AudioSink, but AudioSink has not rendered
1317             // any data yet. Something is wrong with AudioSink, e.g., the device is not
1318             // connected to audio out.
1319             // Switch to system clock. This essentially creates a virtual AudioSink with
1320             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
1321             // This virtual AudioSink renders audio data starting from the very first sample
1322             // and it's paced by system clock.
1323             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
1324             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
1325             mUseVirtualAudioSink = true;
1326         }
1327     }
1328     mAnchorNumFramesWritten = mNumFramesWritten;
1329     mAudioAnchorTimeMediaUs = mediaTimeUs;
1330     mAnchorTimeMediaUs = mediaTimeUs;
1331 }
1332 
1333 // Called without mLock acquired.
postDrainVideoQueue()1334 void NuPlayer::Renderer::postDrainVideoQueue() {
1335     if (mDrainVideoQueuePending
1336             || getSyncQueues()
1337             || (mPaused && mVideoSampleReceived)) {
1338         return;
1339     }
1340 
1341     if (mVideoQueue.empty()) {
1342         return;
1343     }
1344 
1345     QueueEntry &entry = *mVideoQueue.begin();
1346 
1347     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
1348     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
1349 
1350     if (entry.mBuffer == NULL) {
1351         // EOS doesn't carry a timestamp.
1352         msg->post();
1353         mDrainVideoQueuePending = true;
1354         return;
1355     }
1356 
1357     int64_t nowUs = ALooper::GetNowUs();
1358     if (mFlags & FLAG_REAL_TIME) {
1359         int64_t realTimeUs;
1360         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1361 
1362         realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1363 
1364         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1365 
1366         int64_t delayUs = realTimeUs - nowUs;
1367 
1368         ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
1369         // post 2 display refreshes before rendering is due
1370         msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
1371 
1372         mDrainVideoQueuePending = true;
1373         return;
1374     }
1375 
1376     int64_t mediaTimeUs;
1377     CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1378 
1379     {
1380         Mutex::Autolock autoLock(mLock);
1381         if (mNeedVideoClearAnchor && !mHasAudio) {
1382             mNeedVideoClearAnchor = false;
1383             clearAnchorTime();
1384         }
1385         if (mAnchorTimeMediaUs < 0) {
1386             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
1387             mAnchorTimeMediaUs = mediaTimeUs;
1388         }
1389     }
1390     mNextVideoTimeMediaUs = mediaTimeUs;
1391     if (!mHasAudio) {
1392         // smooth out videos >= 10fps
1393         mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1394     }
1395 
1396     if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
1397         msg->post();
1398     } else {
1399         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1400 
1401         // post 2 display refreshes before rendering is due
1402         mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
1403     }
1404 
1405     mDrainVideoQueuePending = true;
1406 }
1407 
onDrainVideoQueue()1408 void NuPlayer::Renderer::onDrainVideoQueue() {
1409     if (mVideoQueue.empty()) {
1410         return;
1411     }
1412 
1413     QueueEntry *entry = &*mVideoQueue.begin();
1414 
1415     if (entry->mBuffer == NULL) {
1416         // EOS
1417 
1418         notifyEOS(false /* audio */, entry->mFinalResult);
1419 
1420         mVideoQueue.erase(mVideoQueue.begin());
1421         entry = NULL;
1422 
1423         setVideoLateByUs(0);
1424         return;
1425     }
1426 
1427     int64_t nowUs = ALooper::GetNowUs();
1428     int64_t realTimeUs;
1429     int64_t mediaTimeUs = -1;
1430     if (mFlags & FLAG_REAL_TIME) {
1431         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1432     } else {
1433         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1434 
1435         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
1436     }
1437     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1438 
1439     bool tooLate = false;
1440 
1441     if (!mPaused) {
1442         setVideoLateByUs(nowUs - realTimeUs);
1443         tooLate = (mVideoLateByUs > 40000);
1444 
1445         if (tooLate) {
1446             ALOGV("video late by %lld us (%.2f secs)",
1447                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
1448         } else {
1449             int64_t mediaUs = 0;
1450             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
1451             ALOGV("rendering video at media time %.2f secs",
1452                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
1453                     mediaUs) / 1E6);
1454 
1455             if (!(mFlags & FLAG_REAL_TIME)
1456                     && mLastAudioMediaTimeUs != -1
1457                     && mediaTimeUs > mLastAudioMediaTimeUs) {
1458                 // If audio ends before video, video continues to drive media clock.
1459                 // Also smooth out videos >= 10fps.
1460                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1461             }
1462         }
1463     } else {
1464         setVideoLateByUs(0);
1465         if (!mVideoSampleReceived && !mHasAudio) {
1466             // This will ensure that the first frame after a flush won't be used as anchor
1467             // when renderer is in paused state, because resume can happen any time after seek.
1468             clearAnchorTime();
1469         }
1470     }
1471 
1472     // Always render the first video frame while keeping stats on A/V sync.
1473     if (!mVideoSampleReceived) {
1474         realTimeUs = nowUs;
1475         tooLate = false;
1476     }
1477 
1478     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000LL);
1479     entry->mNotifyConsumed->setInt32("render", !tooLate);
1480     entry->mNotifyConsumed->post();
1481     mVideoQueue.erase(mVideoQueue.begin());
1482     entry = NULL;
1483 
1484     mVideoSampleReceived = true;
1485 
1486     if (!mPaused) {
1487         if (!mVideoRenderingStarted) {
1488             mVideoRenderingStarted = true;
1489             notifyVideoRenderingStart();
1490         }
1491         Mutex::Autolock autoLock(mLock);
1492         notifyIfMediaRenderingStarted_l();
1493     }
1494 }
1495 
notifyVideoRenderingStart()1496 void NuPlayer::Renderer::notifyVideoRenderingStart() {
1497     sp<AMessage> notify = mNotify->dup();
1498     notify->setInt32("what", kWhatVideoRenderingStart);
1499     notify->post();
1500 }
1501 
notifyEOS(bool audio,status_t finalResult,int64_t delayUs)1502 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
1503     Mutex::Autolock autoLock(mLock);
1504     notifyEOS_l(audio, finalResult, delayUs);
1505 }
1506 
notifyEOS_l(bool audio,status_t finalResult,int64_t delayUs)1507 void NuPlayer::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
1508     if (audio && delayUs > 0) {
1509         sp<AMessage> msg = new AMessage(kWhatEOS, this);
1510         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
1511         msg->setInt32("finalResult", finalResult);
1512         msg->post(delayUs);
1513         return;
1514     }
1515     sp<AMessage> notify = mNotify->dup();
1516     notify->setInt32("what", kWhatEOS);
1517     notify->setInt32("audio", static_cast<int32_t>(audio));
1518     notify->setInt32("finalResult", finalResult);
1519     notify->post(delayUs);
1520 
1521     if (audio) {
1522         // Video might outlive audio. Clear anchor to enable video only case.
1523         mAnchorTimeMediaUs = -1;
1524         mHasAudio = false;
1525         if (mNextVideoTimeMediaUs >= 0) {
1526             int64_t mediaUs = 0;
1527             int64_t nowUs = ALooper::GetNowUs();
1528             status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
1529             if (result == OK) {
1530                 if (mNextVideoTimeMediaUs > mediaUs) {
1531                     mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
1532                 }
1533             } else {
1534                 mMediaClock->updateAnchor(
1535                         mNextVideoTimeMediaUs, nowUs,
1536                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1537             }
1538         }
1539     } else {
1540         mHasVideo = false;
1541     }
1542 }
1543 
notifyAudioTearDown(AudioTearDownReason reason)1544 void NuPlayer::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
1545     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
1546     msg->setInt32("reason", reason);
1547     msg->post();
1548 }
1549 
onQueueBuffer(const sp<AMessage> & msg)1550 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
1551     int32_t audio;
1552     CHECK(msg->findInt32("audio", &audio));
1553 
1554     if (dropBufferIfStale(audio, msg)) {
1555         return;
1556     }
1557 
1558     if (audio) {
1559         mHasAudio = true;
1560     } else {
1561         mHasVideo = true;
1562     }
1563 
1564     if (mHasVideo) {
1565         if (mVideoScheduler == NULL) {
1566             mVideoScheduler = new VideoFrameScheduler();
1567             mVideoScheduler->init();
1568         }
1569     }
1570 
1571     sp<RefBase> obj;
1572     CHECK(msg->findObject("buffer", &obj));
1573     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
1574 
1575     sp<AMessage> notifyConsumed;
1576     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
1577 
1578     QueueEntry entry;
1579     entry.mBuffer = buffer;
1580     entry.mNotifyConsumed = notifyConsumed;
1581     entry.mOffset = 0;
1582     entry.mFinalResult = OK;
1583     entry.mBufferOrdinal = ++mTotalBuffersQueued;
1584 
1585     if (audio) {
1586         Mutex::Autolock autoLock(mLock);
1587         mAudioQueue.push_back(entry);
1588         postDrainAudioQueue_l();
1589     } else {
1590         mVideoQueue.push_back(entry);
1591         postDrainVideoQueue();
1592     }
1593 
1594     Mutex::Autolock autoLock(mLock);
1595     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
1596         return;
1597     }
1598 
1599     sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
1600     sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
1601 
1602     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
1603         // EOS signalled on either queue.
1604         syncQueuesDone_l();
1605         return;
1606     }
1607 
1608     int64_t firstAudioTimeUs;
1609     int64_t firstVideoTimeUs;
1610     CHECK(firstAudioBuffer->meta()
1611             ->findInt64("timeUs", &firstAudioTimeUs));
1612     CHECK(firstVideoBuffer->meta()
1613             ->findInt64("timeUs", &firstVideoTimeUs));
1614 
1615     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
1616 
1617     ALOGV("queueDiff = %.2f secs", diff / 1E6);
1618 
1619     if (diff > 100000LL) {
1620         // Audio data starts More than 0.1 secs before video.
1621         // Drop some audio.
1622 
1623         (*mAudioQueue.begin()).mNotifyConsumed->post();
1624         mAudioQueue.erase(mAudioQueue.begin());
1625         return;
1626     }
1627 
1628     syncQueuesDone_l();
1629 }
1630 
syncQueuesDone_l()1631 void NuPlayer::Renderer::syncQueuesDone_l() {
1632     if (!mSyncQueues) {
1633         return;
1634     }
1635 
1636     mSyncQueues = false;
1637 
1638     if (!mAudioQueue.empty()) {
1639         postDrainAudioQueue_l();
1640     }
1641 
1642     if (!mVideoQueue.empty()) {
1643         mLock.unlock();
1644         postDrainVideoQueue();
1645         mLock.lock();
1646     }
1647 }
1648 
onQueueEOS(const sp<AMessage> & msg)1649 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1650     int32_t audio;
1651     CHECK(msg->findInt32("audio", &audio));
1652 
1653     if (dropBufferIfStale(audio, msg)) {
1654         return;
1655     }
1656 
1657     int32_t finalResult;
1658     CHECK(msg->findInt32("finalResult", &finalResult));
1659 
1660     QueueEntry entry;
1661     entry.mOffset = 0;
1662     entry.mFinalResult = finalResult;
1663 
1664     if (audio) {
1665         Mutex::Autolock autoLock(mLock);
1666         if (mAudioQueue.empty() && mSyncQueues) {
1667             syncQueuesDone_l();
1668         }
1669         mAudioQueue.push_back(entry);
1670         postDrainAudioQueue_l();
1671     } else {
1672         if (mVideoQueue.empty() && getSyncQueues()) {
1673             Mutex::Autolock autoLock(mLock);
1674             syncQueuesDone_l();
1675         }
1676         mVideoQueue.push_back(entry);
1677         postDrainVideoQueue();
1678     }
1679 }
1680 
onFlush(const sp<AMessage> & msg)1681 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1682     int32_t audio, notifyComplete;
1683     CHECK(msg->findInt32("audio", &audio));
1684 
1685     {
1686         Mutex::Autolock autoLock(mLock);
1687         if (audio) {
1688             notifyComplete = mNotifyCompleteAudio;
1689             mNotifyCompleteAudio = false;
1690             mLastAudioMediaTimeUs = -1;
1691 
1692             mHasAudio = false;
1693             if (mNextVideoTimeMediaUs >= 0) {
1694                 int64_t nowUs = ALooper::GetNowUs();
1695                 mMediaClock->updateAnchor(
1696                         mNextVideoTimeMediaUs, nowUs,
1697                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1698             }
1699         } else {
1700             notifyComplete = mNotifyCompleteVideo;
1701             mNotifyCompleteVideo = false;
1702             mHasVideo = false;
1703         }
1704 
1705         // If we're currently syncing the queues, i.e. dropping audio while
1706         // aligning the first audio/video buffer times and only one of the
1707         // two queues has data, we may starve that queue by not requesting
1708         // more buffers from the decoder. If the other source then encounters
1709         // a discontinuity that leads to flushing, we'll never find the
1710         // corresponding discontinuity on the other queue.
1711         // Therefore we'll stop syncing the queues if at least one of them
1712         // is flushed.
1713         syncQueuesDone_l();
1714     }
1715 
1716     if (audio && mDrainVideoQueuePending) {
1717         // Audio should not clear anchor(MediaClock) directly, because video
1718         // postDrainVideoQueue sets msg kWhatDrainVideoQueue into MediaClock
1719         // timer, clear anchor without update immediately may block msg posting.
1720         // So, postpone clear action to video to ensure anchor can be updated
1721         // immediately after clear
1722         mNeedVideoClearAnchor = true;
1723     } else {
1724         clearAnchorTime();
1725     }
1726 
1727     ALOGV("flushing %s", audio ? "audio" : "video");
1728     if (audio) {
1729         {
1730             Mutex::Autolock autoLock(mLock);
1731             flushQueue(&mAudioQueue);
1732 
1733             ++mAudioDrainGeneration;
1734             ++mAudioEOSGeneration;
1735             prepareForMediaRenderingStart_l();
1736 
1737             // the frame count will be reset after flush.
1738             clearAudioFirstAnchorTime_l();
1739         }
1740 
1741         mDrainAudioQueuePending = false;
1742 
1743         mAudioSink->pause();
1744         mAudioSink->flush();
1745         if (!offloadingAudio()) {
1746             // Call stop() to signal to the AudioSink to completely fill the
1747             // internal buffer before resuming playback.
1748             // FIXME: this is ignored after flush().
1749             mAudioSink->stop();
1750             mNumFramesWritten = 0;
1751         }
1752         if (!mPaused) {
1753             mAudioSink->start();
1754         }
1755         mNextAudioClockUpdateTimeUs = -1;
1756     } else {
1757         flushQueue(&mVideoQueue);
1758 
1759         mDrainVideoQueuePending = false;
1760 
1761         if (mVideoScheduler != NULL) {
1762             mVideoScheduler->restart();
1763         }
1764 
1765         Mutex::Autolock autoLock(mLock);
1766         ++mVideoDrainGeneration;
1767         prepareForMediaRenderingStart_l();
1768     }
1769 
1770     mVideoSampleReceived = false;
1771 
1772     if (notifyComplete) {
1773         notifyFlushComplete(audio);
1774     }
1775 }
1776 
flushQueue(List<QueueEntry> * queue)1777 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1778     while (!queue->empty()) {
1779         QueueEntry *entry = &*queue->begin();
1780 
1781         if (entry->mBuffer != NULL) {
1782             entry->mNotifyConsumed->post();
1783         } else if (entry->mNotifyConsumed != nullptr) {
1784             // Is it needed to open audio sink now?
1785             onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1786         }
1787 
1788         queue->erase(queue->begin());
1789         entry = NULL;
1790     }
1791 }
1792 
notifyFlushComplete(bool audio)1793 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1794     sp<AMessage> notify = mNotify->dup();
1795     notify->setInt32("what", kWhatFlushComplete);
1796     notify->setInt32("audio", static_cast<int32_t>(audio));
1797     notify->post();
1798 }
1799 
dropBufferIfStale(bool audio,const sp<AMessage> & msg)1800 bool NuPlayer::Renderer::dropBufferIfStale(
1801         bool audio, const sp<AMessage> &msg) {
1802     int32_t queueGeneration;
1803     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
1804 
1805     if (queueGeneration == getQueueGeneration(audio)) {
1806         return false;
1807     }
1808 
1809     sp<AMessage> notifyConsumed;
1810     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1811         notifyConsumed->post();
1812     }
1813 
1814     return true;
1815 }
1816 
onAudioSinkChanged()1817 void NuPlayer::Renderer::onAudioSinkChanged() {
1818     if (offloadingAudio()) {
1819         return;
1820     }
1821     CHECK(!mDrainAudioQueuePending);
1822     mNumFramesWritten = 0;
1823     mAnchorNumFramesWritten = -1;
1824     uint32_t written;
1825     if (mAudioSink->getFramesWritten(&written) == OK) {
1826         mNumFramesWritten = written;
1827     }
1828 }
1829 
onDisableOffloadAudio()1830 void NuPlayer::Renderer::onDisableOffloadAudio() {
1831     Mutex::Autolock autoLock(mLock);
1832     mFlags &= ~FLAG_OFFLOAD_AUDIO;
1833     ++mAudioDrainGeneration;
1834     if (mAudioRenderingStartGeneration != -1) {
1835         prepareForMediaRenderingStart_l();
1836         // PauseTimeout is applied to offload mode only. Cancel pending timer.
1837         cancelAudioOffloadPauseTimeout();
1838     }
1839 }
1840 
onEnableOffloadAudio()1841 void NuPlayer::Renderer::onEnableOffloadAudio() {
1842     Mutex::Autolock autoLock(mLock);
1843     mFlags |= FLAG_OFFLOAD_AUDIO;
1844     ++mAudioDrainGeneration;
1845     if (mAudioRenderingStartGeneration != -1) {
1846         prepareForMediaRenderingStart_l();
1847     }
1848 }
1849 
onPause()1850 void NuPlayer::Renderer::onPause() {
1851     if (mPaused) {
1852         return;
1853     }
1854 
1855     startAudioOffloadPauseTimeout();
1856 
1857     {
1858         Mutex::Autolock autoLock(mLock);
1859         // we do not increment audio drain generation so that we fill audio buffer during pause.
1860         ++mVideoDrainGeneration;
1861         prepareForMediaRenderingStart_l();
1862         mPaused = true;
1863         mMediaClock->setPlaybackRate(0.0);
1864     }
1865 
1866     mDrainAudioQueuePending = false;
1867     mDrainVideoQueuePending = false;
1868 
1869     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1870     mAudioSink->pause();
1871 
1872     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
1873           mAudioQueue.size(), mVideoQueue.size());
1874 }
1875 
onResume()1876 void NuPlayer::Renderer::onResume() {
1877     if (!mPaused) {
1878         return;
1879     }
1880 
1881     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1882     cancelAudioOffloadPauseTimeout();
1883     if (mAudioSink->ready()) {
1884         status_t err = mAudioSink->start();
1885         if (err != OK) {
1886             ALOGE("cannot start AudioSink err %d", err);
1887             notifyAudioTearDown(kDueToError);
1888         }
1889     }
1890 
1891     {
1892         Mutex::Autolock autoLock(mLock);
1893         mPaused = false;
1894         // rendering started message may have been delayed if we were paused.
1895         if (mRenderingDataDelivered) {
1896             notifyIfMediaRenderingStarted_l();
1897         }
1898         // configure audiosink as we did not do it when pausing
1899         if (mAudioSink != NULL && mAudioSink->ready()) {
1900             mAudioSink->setPlaybackRate(mPlaybackSettings);
1901         }
1902 
1903         mMediaClock->setPlaybackRate(mPlaybackRate);
1904 
1905         if (!mAudioQueue.empty()) {
1906             postDrainAudioQueue_l();
1907         }
1908     }
1909 
1910     if (!mVideoQueue.empty()) {
1911         postDrainVideoQueue();
1912     }
1913 }
1914 
onSetVideoFrameRate(float fps)1915 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1916     if (mVideoScheduler == NULL) {
1917         mVideoScheduler = new VideoFrameScheduler();
1918     }
1919     mVideoScheduler->init(fps);
1920 }
1921 
getQueueGeneration(bool audio)1922 int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
1923     Mutex::Autolock autoLock(mLock);
1924     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
1925 }
1926 
getDrainGeneration(bool audio)1927 int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
1928     Mutex::Autolock autoLock(mLock);
1929     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
1930 }
1931 
getSyncQueues()1932 bool NuPlayer::Renderer::getSyncQueues() {
1933     Mutex::Autolock autoLock(mLock);
1934     return mSyncQueues;
1935 }
1936 
onAudioTearDown(AudioTearDownReason reason)1937 void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
1938     if (mAudioTornDown) {
1939         return;
1940     }
1941 
1942     // TimeoutWhenPaused is only for offload mode.
1943     if (reason == kDueToTimeout && !offloadingAudio()) {
1944         return;
1945     }
1946 
1947     mAudioTornDown = true;
1948 
1949     int64_t currentPositionUs;
1950     sp<AMessage> notify = mNotify->dup();
1951     if (getCurrentPosition(&currentPositionUs) == OK) {
1952         notify->setInt64("positionUs", currentPositionUs);
1953     }
1954 
1955     mAudioSink->stop();
1956     mAudioSink->flush();
1957 
1958     notify->setInt32("what", kWhatAudioTearDown);
1959     notify->setInt32("reason", reason);
1960     notify->post();
1961 }
1962 
startAudioOffloadPauseTimeout()1963 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1964     if (offloadingAudio()) {
1965         mWakeLock->acquire();
1966         mWakelockAcquireEvent.updateValues(uptimeMillis(),
1967                                            mAudioOffloadPauseTimeoutGeneration,
1968                                            mAudioOffloadPauseTimeoutGeneration);
1969         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
1970         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
1971         msg->post(kOffloadPauseMaxUs);
1972     }
1973 }
1974 
cancelAudioOffloadPauseTimeout()1975 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1976     // We may have called startAudioOffloadPauseTimeout() without
1977     // the AudioSink open and with offloadingAudio enabled.
1978     //
1979     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
1980     // we always release the wakelock and increment the pause timeout generation.
1981     //
1982     // Note: The acquired wakelock prevents the device from suspending
1983     // immediately after offload pause (in case a resume happens shortly thereafter).
1984     mWakeLock->release(true);
1985     mWakelockCancelEvent.updateValues(uptimeMillis(),
1986                                       mAudioOffloadPauseTimeoutGeneration,
1987                                       mAudioOffloadPauseTimeoutGeneration);
1988     ++mAudioOffloadPauseTimeoutGeneration;
1989 }
1990 
onOpenAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming)1991 status_t NuPlayer::Renderer::onOpenAudioSink(
1992         const sp<AMessage> &format,
1993         bool offloadOnly,
1994         bool hasVideo,
1995         uint32_t flags,
1996         bool isStreaming) {
1997     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1998             offloadOnly, offloadingAudio());
1999     bool audioSinkChanged = false;
2000 
2001     int32_t numChannels;
2002     CHECK(format->findInt32("channel-count", &numChannels));
2003 
2004     // channel mask info as read from the audio format
2005     int32_t mediaFormatChannelMask;
2006     // channel mask to use for native playback
2007     audio_channel_mask_t channelMask;
2008     if (format->findInt32("channel-mask", &mediaFormatChannelMask)) {
2009         // KEY_CHANNEL_MASK follows the android.media.AudioFormat java mask
2010         channelMask = audio_channel_mask_from_media_format_mask(mediaFormatChannelMask);
2011     } else {
2012         // no mask found: the mask will be derived from the channel count
2013         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
2014     }
2015 
2016     int32_t sampleRate;
2017     CHECK(format->findInt32("sample-rate", &sampleRate));
2018 
2019     // read pcm encoding from MediaCodec output format, if available
2020     int32_t pcmEncoding;
2021     audio_format_t audioFormat =
2022             format->findInt32(KEY_PCM_ENCODING, &pcmEncoding) ?
2023                     audioFormatFromEncoding(pcmEncoding) : AUDIO_FORMAT_PCM_16_BIT;
2024 
2025     if (offloadingAudio()) {
2026         AString mime;
2027         CHECK(format->findString("mime", &mime));
2028         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
2029 
2030         if (err != OK) {
2031             ALOGE("Couldn't map mime \"%s\" to a valid "
2032                     "audio_format", mime.c_str());
2033             onDisableOffloadAudio();
2034         } else {
2035             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
2036                     mime.c_str(), audioFormat);
2037 
2038             int avgBitRate = 0;
2039             format->findInt32("bitrate", &avgBitRate);
2040 
2041             int32_t aacProfile = -1;
2042             if (audioFormat == AUDIO_FORMAT_AAC
2043                     && format->findInt32("aac-profile", &aacProfile)) {
2044                 // Redefine AAC format as per aac profile
2045                 mapAACProfileToAudioFormat(
2046                         audioFormat,
2047                         aacProfile);
2048             }
2049 
2050             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
2051             offloadInfo.duration_us = -1;
2052             format->findInt64(
2053                     "durationUs", &offloadInfo.duration_us);
2054             offloadInfo.sample_rate = sampleRate;
2055             offloadInfo.channel_mask = channelMask;
2056             offloadInfo.format = audioFormat;
2057             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
2058             offloadInfo.bit_rate = avgBitRate;
2059             offloadInfo.has_video = hasVideo;
2060             offloadInfo.is_streaming = isStreaming;
2061 
2062             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
2063                 ALOGV("openAudioSink: no change in offload mode");
2064                 // no change from previous configuration, everything ok.
2065                 return OK;
2066             }
2067             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2068 
2069             ALOGV("openAudioSink: try to open AudioSink in offload mode");
2070             uint32_t offloadFlags = flags;
2071             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2072             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
2073             audioSinkChanged = true;
2074             mAudioSink->close();
2075 
2076             err = mAudioSink->open(
2077                     sampleRate,
2078                     numChannels,
2079                     (audio_channel_mask_t)channelMask,
2080                     audioFormat,
2081                     0 /* bufferCount - unused */,
2082                     &NuPlayer::Renderer::AudioSinkCallback,
2083                     this,
2084                     (audio_output_flags_t)offloadFlags,
2085                     &offloadInfo);
2086 
2087             if (err == OK) {
2088                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
2089             }
2090 
2091             if (err == OK) {
2092                 // If the playback is offloaded to h/w, we pass
2093                 // the HAL some metadata information.
2094                 // We don't want to do this for PCM because it
2095                 // will be going through the AudioFlinger mixer
2096                 // before reaching the hardware.
2097                 // TODO
2098                 mCurrentOffloadInfo = offloadInfo;
2099                 if (!mPaused) { // for preview mode, don't start if paused
2100                     err = mAudioSink->start();
2101                 }
2102                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
2103             }
2104             if (err != OK) {
2105                 // Clean up, fall back to non offload mode.
2106                 mAudioSink->close();
2107                 onDisableOffloadAudio();
2108                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2109                 ALOGV("openAudioSink: offload failed");
2110                 if (offloadOnly) {
2111                     notifyAudioTearDown(kForceNonOffload);
2112                 }
2113             } else {
2114                 mUseAudioCallback = true;  // offload mode transfers data through callback
2115                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2116             }
2117         }
2118     }
2119     if (!offloadOnly && !offloadingAudio()) {
2120         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
2121         uint32_t pcmFlags = flags;
2122         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2123 
2124         const PcmInfo info = {
2125                 (audio_channel_mask_t)channelMask,
2126                 (audio_output_flags_t)pcmFlags,
2127                 audioFormat,
2128                 numChannels,
2129                 sampleRate
2130         };
2131         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
2132             ALOGV("openAudioSink: no change in pcm mode");
2133             // no change from previous configuration, everything ok.
2134             return OK;
2135         }
2136 
2137         audioSinkChanged = true;
2138         mAudioSink->close();
2139         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2140         // Note: It is possible to set up the callback, but not use it to send audio data.
2141         // This requires a fix in AudioSink to explicitly specify the transfer mode.
2142         mUseAudioCallback = getUseAudioCallbackSetting();
2143         if (mUseAudioCallback) {
2144             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2145         }
2146 
2147         // Compute the desired buffer size.
2148         // For callback mode, the amount of time before wakeup is about half the buffer size.
2149         const uint32_t frameCount =
2150                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
2151 
2152         // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
2153         // AudioSink. We don't want this when there's video because it will cause a video seek to
2154         // the previous I frame. But we do want this when there's only audio because it will give
2155         // NuPlayer a chance to switch from non-offload mode to offload mode.
2156         // So we only set doNotReconnect when there's no video.
2157         const bool doNotReconnect = !hasVideo;
2158 
2159         // We should always be able to set our playback settings if the sink is closed.
2160         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
2161                 "onOpenAudioSink: can't set playback rate on closed sink");
2162         status_t err = mAudioSink->open(
2163                     sampleRate,
2164                     numChannels,
2165                     (audio_channel_mask_t)channelMask,
2166                     audioFormat,
2167                     0 /* bufferCount - unused */,
2168                     mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
2169                     mUseAudioCallback ? this : NULL,
2170                     (audio_output_flags_t)pcmFlags,
2171                     NULL,
2172                     doNotReconnect,
2173                     frameCount);
2174         if (err != OK) {
2175             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
2176             mAudioSink->close();
2177             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2178             return err;
2179         }
2180         mCurrentPcmInfo = info;
2181         if (!mPaused) { // for preview mode, don't start if paused
2182             mAudioSink->start();
2183         }
2184     }
2185     if (audioSinkChanged) {
2186         onAudioSinkChanged();
2187     }
2188     mAudioTornDown = false;
2189     return OK;
2190 }
2191 
onCloseAudioSink()2192 void NuPlayer::Renderer::onCloseAudioSink() {
2193     mAudioSink->close();
2194     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2195     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2196 }
2197 
onChangeAudioFormat(const sp<AMessage> & meta,const sp<AMessage> & notify)2198 void NuPlayer::Renderer::onChangeAudioFormat(
2199         const sp<AMessage> &meta, const sp<AMessage> &notify) {
2200     sp<AMessage> format;
2201     CHECK(meta->findMessage("format", &format));
2202 
2203     int32_t offloadOnly;
2204     CHECK(meta->findInt32("offload-only", &offloadOnly));
2205 
2206     int32_t hasVideo;
2207     CHECK(meta->findInt32("has-video", &hasVideo));
2208 
2209     uint32_t flags;
2210     CHECK(meta->findInt32("flags", (int32_t *)&flags));
2211 
2212     uint32_t isStreaming;
2213     CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
2214 
2215     status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
2216 
2217     if (err != OK) {
2218         notify->setInt32("err", err);
2219     }
2220     notify->post();
2221 }
2222 
dump(AString & logString)2223 void NuPlayer::Renderer::WakeLockEvent::dump(AString& logString) {
2224   logString.append("[");
2225   logString.append(mTimeMs);
2226   logString.append(",");
2227   logString.append(mEventTimeoutGeneration);
2228   logString.append(",");
2229   logString.append(mRendererTimeoutGeneration);
2230   logString.append("]");
2231 }
2232 
2233 }  // namespace android
2234