1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "NuPlayerRenderer"
19 #include <utils/Log.h>
20 
21 #include "NuPlayerRenderer.h"
22 
23 #include <media/stagefright/foundation/ABuffer.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 #include <media/stagefright/foundation/AMessage.h>
26 #include <media/stagefright/foundation/AUtils.h>
27 #include <media/stagefright/foundation/AWakeLock.h>
28 #include <media/stagefright/MediaErrors.h>
29 #include <media/stagefright/MetaData.h>
30 #include <media/stagefright/Utils.h>
31 
32 #include <VideoFrameScheduler.h>
33 
34 #include <inttypes.h>
35 
36 namespace android {
37 
38 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
39 // is closed to allow the audio DSP to power down.
40 static const int64_t kOffloadPauseMaxUs = 10000000ll;
41 
42 // static
43 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
44         AUDIO_CHANNEL_NONE,
45         AUDIO_OUTPUT_FLAG_NONE,
46         AUDIO_FORMAT_INVALID,
47         0, // mNumChannels
48         0 // mSampleRate
49 };
50 
51 // static
52 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
53 
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<AMessage> & notify,uint32_t flags)54 NuPlayer::Renderer::Renderer(
55         const sp<MediaPlayerBase::AudioSink> &sink,
56         const sp<AMessage> &notify,
57         uint32_t flags)
58     : mAudioSink(sink),
59       mNotify(notify),
60       mFlags(flags),
61       mNumFramesWritten(0),
62       mDrainAudioQueuePending(false),
63       mDrainVideoQueuePending(false),
64       mAudioQueueGeneration(0),
65       mVideoQueueGeneration(0),
66       mAudioFirstAnchorTimeMediaUs(-1),
67       mAnchorTimeMediaUs(-1),
68       mAnchorTimeRealUs(-1),
69       mAnchorNumFramesWritten(-1),
70       mAnchorMaxMediaUs(-1),
71       mVideoLateByUs(0ll),
72       mHasAudio(false),
73       mHasVideo(false),
74       mPauseStartedTimeRealUs(-1),
75       mFlushingAudio(false),
76       mFlushingVideo(false),
77       mNotifyCompleteAudio(false),
78       mNotifyCompleteVideo(false),
79       mSyncQueues(false),
80       mPaused(false),
81       mPausePositionMediaTimeUs(-1),
82       mVideoSampleReceived(false),
83       mVideoRenderingStarted(false),
84       mVideoRenderingStartGeneration(0),
85       mAudioRenderingStartGeneration(0),
86       mAudioOffloadPauseTimeoutGeneration(0),
87       mAudioOffloadTornDown(false),
88       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
89       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
90       mTotalBuffersQueued(0),
91       mLastAudioBufferDrained(0),
92       mWakeLock(new AWakeLock()) {
93 
94 }
95 
~Renderer()96 NuPlayer::Renderer::~Renderer() {
97     if (offloadingAudio()) {
98         mAudioSink->stop();
99         mAudioSink->flush();
100         mAudioSink->close();
101     }
102 }
103 
queueBuffer(bool audio,const sp<ABuffer> & buffer,const sp<AMessage> & notifyConsumed)104 void NuPlayer::Renderer::queueBuffer(
105         bool audio,
106         const sp<ABuffer> &buffer,
107         const sp<AMessage> &notifyConsumed) {
108     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
109     msg->setInt32("audio", static_cast<int32_t>(audio));
110     msg->setBuffer("buffer", buffer);
111     msg->setMessage("notifyConsumed", notifyConsumed);
112     msg->post();
113 }
114 
queueEOS(bool audio,status_t finalResult)115 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
116     CHECK_NE(finalResult, (status_t)OK);
117 
118     sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
119     msg->setInt32("audio", static_cast<int32_t>(audio));
120     msg->setInt32("finalResult", finalResult);
121     msg->post();
122 }
123 
flush(bool audio,bool notifyComplete)124 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
125     {
126         Mutex::Autolock autoLock(mFlushLock);
127         if (audio) {
128             mNotifyCompleteAudio |= notifyComplete;
129             if (mFlushingAudio) {
130                 return;
131             }
132             mFlushingAudio = true;
133         } else {
134             mNotifyCompleteVideo |= notifyComplete;
135             if (mFlushingVideo) {
136                 return;
137             }
138             mFlushingVideo = true;
139         }
140     }
141 
142     sp<AMessage> msg = new AMessage(kWhatFlush, id());
143     msg->setInt32("audio", static_cast<int32_t>(audio));
144     msg->post();
145 }
146 
signalTimeDiscontinuity()147 void NuPlayer::Renderer::signalTimeDiscontinuity() {
148     Mutex::Autolock autoLock(mLock);
149     // CHECK(mAudioQueue.empty());
150     // CHECK(mVideoQueue.empty());
151     setAudioFirstAnchorTime(-1);
152     setAnchorTime(-1, -1);
153     setVideoLateByUs(0);
154     mSyncQueues = false;
155 }
156 
signalAudioSinkChanged()157 void NuPlayer::Renderer::signalAudioSinkChanged() {
158     (new AMessage(kWhatAudioSinkChanged, id()))->post();
159 }
160 
signalDisableOffloadAudio()161 void NuPlayer::Renderer::signalDisableOffloadAudio() {
162     (new AMessage(kWhatDisableOffloadAudio, id()))->post();
163 }
164 
signalEnableOffloadAudio()165 void NuPlayer::Renderer::signalEnableOffloadAudio() {
166     (new AMessage(kWhatEnableOffloadAudio, id()))->post();
167 }
168 
pause()169 void NuPlayer::Renderer::pause() {
170     (new AMessage(kWhatPause, id()))->post();
171 }
172 
resume()173 void NuPlayer::Renderer::resume() {
174     (new AMessage(kWhatResume, id()))->post();
175 }
176 
setVideoFrameRate(float fps)177 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
178     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
179     msg->setFloat("frame-rate", fps);
180     msg->post();
181 }
182 
183 // Called on any threads, except renderer's thread.
getCurrentPosition(int64_t * mediaUs)184 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
185     {
186         Mutex::Autolock autoLock(mLock);
187         int64_t currentPositionUs;
188         if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
189             *mediaUs = currentPositionUs;
190             return OK;
191         }
192     }
193     return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs());
194 }
195 
196 // Called on only renderer's thread.
getCurrentPositionOnLooper(int64_t * mediaUs)197 status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) {
198     return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs());
199 }
200 
201 // Called on only renderer's thread.
202 // Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's
203 // thread, no need to acquire mLock.
getCurrentPositionOnLooper(int64_t * mediaUs,int64_t nowUs,bool allowPastQueuedVideo)204 status_t NuPlayer::Renderer::getCurrentPositionOnLooper(
205         int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
206     int64_t currentPositionUs;
207     if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
208         *mediaUs = currentPositionUs;
209         return OK;
210     }
211     return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo);
212 }
213 
214 // Called either with mLock acquired or on renderer's thread.
getCurrentPositionIfPaused_l(int64_t * mediaUs)215 bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) {
216     if (!mPaused || mPausePositionMediaTimeUs < 0ll) {
217         return false;
218     }
219     *mediaUs = mPausePositionMediaTimeUs;
220     return true;
221 }
222 
223 // Called on any threads.
getCurrentPositionFromAnchor(int64_t * mediaUs,int64_t nowUs,bool allowPastQueuedVideo)224 status_t NuPlayer::Renderer::getCurrentPositionFromAnchor(
225         int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
226     Mutex::Autolock autoLock(mTimeLock);
227     if (!mHasAudio && !mHasVideo) {
228         return NO_INIT;
229     }
230 
231     if (mAnchorTimeMediaUs < 0) {
232         return NO_INIT;
233     }
234 
235     int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
236 
237     if (mPauseStartedTimeRealUs != -1) {
238         positionUs -= (nowUs - mPauseStartedTimeRealUs);
239     }
240 
241     // limit position to the last queued media time (for video only stream
242     // position will be discrete as we don't know how long each frame lasts)
243     if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
244         if (positionUs > mAnchorMaxMediaUs) {
245             positionUs = mAnchorMaxMediaUs;
246         }
247     }
248 
249     if (positionUs < mAudioFirstAnchorTimeMediaUs) {
250         positionUs = mAudioFirstAnchorTimeMediaUs;
251     }
252 
253     *mediaUs = (positionUs <= 0) ? 0 : positionUs;
254     return OK;
255 }
256 
setHasMedia(bool audio)257 void NuPlayer::Renderer::setHasMedia(bool audio) {
258     Mutex::Autolock autoLock(mTimeLock);
259     if (audio) {
260         mHasAudio = true;
261     } else {
262         mHasVideo = true;
263     }
264 }
265 
setAudioFirstAnchorTime(int64_t mediaUs)266 void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
267     Mutex::Autolock autoLock(mTimeLock);
268     mAudioFirstAnchorTimeMediaUs = mediaUs;
269 }
270 
setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs)271 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
272     Mutex::Autolock autoLock(mTimeLock);
273     if (mAudioFirstAnchorTimeMediaUs == -1) {
274         mAudioFirstAnchorTimeMediaUs = mediaUs;
275     }
276 }
277 
setAnchorTime(int64_t mediaUs,int64_t realUs,int64_t numFramesWritten,bool resume)278 void NuPlayer::Renderer::setAnchorTime(
279         int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
280     Mutex::Autolock autoLock(mTimeLock);
281     mAnchorTimeMediaUs = mediaUs;
282     mAnchorTimeRealUs = realUs;
283     mAnchorNumFramesWritten = numFramesWritten;
284     if (resume) {
285         mPauseStartedTimeRealUs = -1;
286     }
287 }
288 
setVideoLateByUs(int64_t lateUs)289 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
290     Mutex::Autolock autoLock(mTimeLock);
291     mVideoLateByUs = lateUs;
292 }
293 
getVideoLateByUs()294 int64_t NuPlayer::Renderer::getVideoLateByUs() {
295     Mutex::Autolock autoLock(mTimeLock);
296     return mVideoLateByUs;
297 }
298 
setPauseStartedTimeRealUs(int64_t realUs)299 void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
300     Mutex::Autolock autoLock(mTimeLock);
301     mPauseStartedTimeRealUs = realUs;
302 }
303 
openAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool * isOffloaded)304 status_t NuPlayer::Renderer::openAudioSink(
305         const sp<AMessage> &format,
306         bool offloadOnly,
307         bool hasVideo,
308         uint32_t flags,
309         bool *isOffloaded) {
310     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
311     msg->setMessage("format", format);
312     msg->setInt32("offload-only", offloadOnly);
313     msg->setInt32("has-video", hasVideo);
314     msg->setInt32("flags", flags);
315 
316     sp<AMessage> response;
317     msg->postAndAwaitResponse(&response);
318 
319     int32_t err;
320     if (!response->findInt32("err", &err)) {
321         err = INVALID_OPERATION;
322     } else if (err == OK && isOffloaded != NULL) {
323         int32_t offload;
324         CHECK(response->findInt32("offload", &offload));
325         *isOffloaded = (offload != 0);
326     }
327     return err;
328 }
329 
closeAudioSink()330 void NuPlayer::Renderer::closeAudioSink() {
331     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
332 
333     sp<AMessage> response;
334     msg->postAndAwaitResponse(&response);
335 }
336 
onMessageReceived(const sp<AMessage> & msg)337 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
338     switch (msg->what()) {
339         case kWhatOpenAudioSink:
340         {
341             sp<AMessage> format;
342             CHECK(msg->findMessage("format", &format));
343 
344             int32_t offloadOnly;
345             CHECK(msg->findInt32("offload-only", &offloadOnly));
346 
347             int32_t hasVideo;
348             CHECK(msg->findInt32("has-video", &hasVideo));
349 
350             uint32_t flags;
351             CHECK(msg->findInt32("flags", (int32_t *)&flags));
352 
353             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
354 
355             sp<AMessage> response = new AMessage;
356             response->setInt32("err", err);
357             response->setInt32("offload", offloadingAudio());
358 
359             uint32_t replyID;
360             CHECK(msg->senderAwaitsResponse(&replyID));
361             response->postReply(replyID);
362 
363             break;
364         }
365 
366         case kWhatCloseAudioSink:
367         {
368             uint32_t replyID;
369             CHECK(msg->senderAwaitsResponse(&replyID));
370 
371             onCloseAudioSink();
372 
373             sp<AMessage> response = new AMessage;
374             response->postReply(replyID);
375             break;
376         }
377 
378         case kWhatStopAudioSink:
379         {
380             mAudioSink->stop();
381             break;
382         }
383 
384         case kWhatDrainAudioQueue:
385         {
386             int32_t generation;
387             CHECK(msg->findInt32("generation", &generation));
388             if (generation != mAudioQueueGeneration) {
389                 break;
390             }
391 
392             mDrainAudioQueuePending = false;
393 
394             if (onDrainAudioQueue()) {
395                 uint32_t numFramesPlayed;
396                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
397                          (status_t)OK);
398 
399                 uint32_t numFramesPendingPlayout =
400                     mNumFramesWritten - numFramesPlayed;
401 
402                 // This is how long the audio sink will have data to
403                 // play back.
404                 int64_t delayUs =
405                     mAudioSink->msecsPerFrame()
406                         * numFramesPendingPlayout * 1000ll;
407 
408                 // Let's give it more data after about half that time
409                 // has elapsed.
410                 // kWhatDrainAudioQueue is used for non-offloading mode,
411                 // and mLock is used only for offloading mode. Therefore,
412                 // no need to acquire mLock here.
413                 postDrainAudioQueue_l(delayUs / 2);
414             }
415             break;
416         }
417 
418         case kWhatDrainVideoQueue:
419         {
420             int32_t generation;
421             CHECK(msg->findInt32("generation", &generation));
422             if (generation != mVideoQueueGeneration) {
423                 break;
424             }
425 
426             mDrainVideoQueuePending = false;
427 
428             onDrainVideoQueue();
429 
430             Mutex::Autolock autoLock(mLock);
431             postDrainVideoQueue_l();
432             break;
433         }
434 
435         case kWhatPostDrainVideoQueue:
436         {
437             int32_t generation;
438             CHECK(msg->findInt32("generation", &generation));
439             if (generation != mVideoQueueGeneration) {
440                 break;
441             }
442 
443             mDrainVideoQueuePending = false;
444             Mutex::Autolock autoLock(mLock);
445             postDrainVideoQueue_l();
446             break;
447         }
448 
449         case kWhatQueueBuffer:
450         {
451             onQueueBuffer(msg);
452             break;
453         }
454 
455         case kWhatQueueEOS:
456         {
457             onQueueEOS(msg);
458             break;
459         }
460 
461         case kWhatFlush:
462         {
463             onFlush(msg);
464             break;
465         }
466 
467         case kWhatAudioSinkChanged:
468         {
469             onAudioSinkChanged();
470             break;
471         }
472 
473         case kWhatDisableOffloadAudio:
474         {
475             onDisableOffloadAudio();
476             break;
477         }
478 
479         case kWhatEnableOffloadAudio:
480         {
481             onEnableOffloadAudio();
482             break;
483         }
484 
485         case kWhatPause:
486         {
487             onPause();
488             break;
489         }
490 
491         case kWhatResume:
492         {
493             onResume();
494             break;
495         }
496 
497         case kWhatSetVideoFrameRate:
498         {
499             float fps;
500             CHECK(msg->findFloat("frame-rate", &fps));
501             onSetVideoFrameRate(fps);
502             break;
503         }
504 
505         case kWhatAudioOffloadTearDown:
506         {
507             onAudioOffloadTearDown(kDueToError);
508             break;
509         }
510 
511         case kWhatAudioOffloadPauseTimeout:
512         {
513             int32_t generation;
514             CHECK(msg->findInt32("generation", &generation));
515             if (generation != mAudioOffloadPauseTimeoutGeneration) {
516                 break;
517             }
518             ALOGV("Audio Offload tear down due to pause timeout.");
519             onAudioOffloadTearDown(kDueToTimeout);
520             mWakeLock->release();
521             break;
522         }
523 
524         default:
525             TRESPASS();
526             break;
527     }
528 }
529 
postDrainAudioQueue_l(int64_t delayUs)530 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
531     if (mDrainAudioQueuePending || mSyncQueues || mPaused
532             || offloadingAudio()) {
533         return;
534     }
535 
536     if (mAudioQueue.empty()) {
537         return;
538     }
539 
540     mDrainAudioQueuePending = true;
541     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
542     msg->setInt32("generation", mAudioQueueGeneration);
543     msg->post(delayUs);
544 }
545 
prepareForMediaRenderingStart()546 void NuPlayer::Renderer::prepareForMediaRenderingStart() {
547     mAudioRenderingStartGeneration = mAudioQueueGeneration;
548     mVideoRenderingStartGeneration = mVideoQueueGeneration;
549 }
550 
notifyIfMediaRenderingStarted()551 void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
552     if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
553         mAudioRenderingStartGeneration == mAudioQueueGeneration) {
554         mVideoRenderingStartGeneration = -1;
555         mAudioRenderingStartGeneration = -1;
556 
557         sp<AMessage> notify = mNotify->dup();
558         notify->setInt32("what", kWhatMediaRenderingStart);
559         notify->post();
560     }
561 }
562 
563 // static
AudioSinkCallback(MediaPlayerBase::AudioSink *,void * buffer,size_t size,void * cookie,MediaPlayerBase::AudioSink::cb_event_t event)564 size_t NuPlayer::Renderer::AudioSinkCallback(
565         MediaPlayerBase::AudioSink * /* audioSink */,
566         void *buffer,
567         size_t size,
568         void *cookie,
569         MediaPlayerBase::AudioSink::cb_event_t event) {
570     NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
571 
572     switch (event) {
573         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
574         {
575             return me->fillAudioBuffer(buffer, size);
576             break;
577         }
578 
579         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
580         {
581             me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
582             break;
583         }
584 
585         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
586         {
587             me->notifyAudioOffloadTearDown();
588             break;
589         }
590     }
591 
592     return 0;
593 }
594 
fillAudioBuffer(void * buffer,size_t size)595 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
596     Mutex::Autolock autoLock(mLock);
597 
598     if (!offloadingAudio() || mPaused) {
599         return 0;
600     }
601 
602     bool hasEOS = false;
603 
604     size_t sizeCopied = 0;
605     bool firstEntry = true;
606     while (sizeCopied < size && !mAudioQueue.empty()) {
607         QueueEntry *entry = &*mAudioQueue.begin();
608 
609         if (entry->mBuffer == NULL) { // EOS
610             hasEOS = true;
611             mAudioQueue.erase(mAudioQueue.begin());
612             entry = NULL;
613             break;
614         }
615 
616         if (firstEntry && entry->mOffset == 0) {
617             firstEntry = false;
618             int64_t mediaTimeUs;
619             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
620             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
621             setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
622         }
623 
624         size_t copy = entry->mBuffer->size() - entry->mOffset;
625         size_t sizeRemaining = size - sizeCopied;
626         if (copy > sizeRemaining) {
627             copy = sizeRemaining;
628         }
629 
630         memcpy((char *)buffer + sizeCopied,
631                entry->mBuffer->data() + entry->mOffset,
632                copy);
633 
634         entry->mOffset += copy;
635         if (entry->mOffset == entry->mBuffer->size()) {
636             entry->mNotifyConsumed->post();
637             mAudioQueue.erase(mAudioQueue.begin());
638             entry = NULL;
639         }
640         sizeCopied += copy;
641         notifyIfMediaRenderingStarted();
642     }
643 
644     if (mAudioFirstAnchorTimeMediaUs >= 0) {
645         int64_t nowUs = ALooper::GetNowUs();
646         setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
647     }
648 
649     // we don't know how much data we are queueing for offloaded tracks
650     mAnchorMaxMediaUs = -1;
651 
652     if (hasEOS) {
653         (new AMessage(kWhatStopAudioSink, id()))->post();
654     }
655 
656     return sizeCopied;
657 }
658 
onDrainAudioQueue()659 bool NuPlayer::Renderer::onDrainAudioQueue() {
660     uint32_t numFramesPlayed;
661     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
662         return false;
663     }
664 
665     ssize_t numFramesAvailableToWrite =
666         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
667 
668 #if 0
669     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
670         ALOGI("audio sink underrun");
671     } else {
672         ALOGV("audio queue has %d frames left to play",
673              mAudioSink->frameCount() - numFramesAvailableToWrite);
674     }
675 #endif
676 
677     size_t numBytesAvailableToWrite =
678         numFramesAvailableToWrite * mAudioSink->frameSize();
679 
680     while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
681         QueueEntry *entry = &*mAudioQueue.begin();
682 
683         mLastAudioBufferDrained = entry->mBufferOrdinal;
684 
685         if (entry->mBuffer == NULL) {
686             // EOS
687             int64_t postEOSDelayUs = 0;
688             if (mAudioSink->needsTrailingPadding()) {
689                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
690             }
691             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
692 
693             mAudioQueue.erase(mAudioQueue.begin());
694             entry = NULL;
695             if (mAudioSink->needsTrailingPadding()) {
696                 // If we're not in gapless playback (i.e. through setNextPlayer), we
697                 // need to stop the track here, because that will play out the last
698                 // little bit at the end of the file. Otherwise short files won't play.
699                 mAudioSink->stop();
700                 mNumFramesWritten = 0;
701             }
702             return false;
703         }
704 
705         if (entry->mOffset == 0) {
706             int64_t mediaTimeUs;
707             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
708             ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
709             onNewAudioMediaTime(mediaTimeUs);
710         }
711 
712         size_t copy = entry->mBuffer->size() - entry->mOffset;
713         if (copy > numBytesAvailableToWrite) {
714             copy = numBytesAvailableToWrite;
715         }
716 
717         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
718         if (written < 0) {
719             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
720             ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
721             break;
722         }
723 
724         entry->mOffset += written;
725         if (entry->mOffset == entry->mBuffer->size()) {
726             entry->mNotifyConsumed->post();
727             mAudioQueue.erase(mAudioQueue.begin());
728 
729             entry = NULL;
730         }
731 
732         numBytesAvailableToWrite -= written;
733         size_t copiedFrames = written / mAudioSink->frameSize();
734         mNumFramesWritten += copiedFrames;
735 
736         notifyIfMediaRenderingStarted();
737 
738         if (written != (ssize_t)copy) {
739             // A short count was received from AudioSink::write()
740             //
741             // AudioSink write should block until exactly the number of bytes are delivered.
742             // But it may return with a short count (without an error) when:
743             //
744             // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
745             // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
746 
747             // (Case 1)
748             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
749             // needs to fail, as we should not carry over fractional frames between calls.
750             CHECK_EQ(copy % mAudioSink->frameSize(), 0);
751 
752             // (Case 2)
753             // Return early to the caller.
754             // Beware of calling immediately again as this may busy-loop if you are not careful.
755             ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
756             break;
757         }
758     }
759     mAnchorMaxMediaUs =
760         mAnchorTimeMediaUs +
761                 (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
762                         * 1000LL * mAudioSink->msecsPerFrame());
763 
764     return !mAudioQueue.empty();
765 }
766 
getPendingAudioPlayoutDurationUs(int64_t nowUs)767 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
768     int64_t writtenAudioDurationUs =
769         mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
770     return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
771 }
772 
getRealTimeUs(int64_t mediaTimeUs,int64_t nowUs)773 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
774     int64_t currentPositionUs;
775     if (mPaused || getCurrentPositionOnLooper(
776             &currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
777         // If failed to get current position, e.g. due to audio clock is not ready, then just
778         // play out video immediately without delay.
779         return nowUs;
780     }
781     return (mediaTimeUs - currentPositionUs) + nowUs;
782 }
783 
onNewAudioMediaTime(int64_t mediaTimeUs)784 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
785     // TRICKY: vorbis decoder generates multiple frames with the same
786     // timestamp, so only update on the first frame with a given timestamp
787     if (mediaTimeUs == mAnchorTimeMediaUs) {
788         return;
789     }
790     setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
791     int64_t nowUs = ALooper::GetNowUs();
792     setAnchorTime(
793             mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
794 }
795 
postDrainVideoQueue_l()796 void NuPlayer::Renderer::postDrainVideoQueue_l() {
797     if (mDrainVideoQueuePending
798             || mSyncQueues
799             || (mPaused && mVideoSampleReceived)) {
800         return;
801     }
802 
803     if (mVideoQueue.empty()) {
804         return;
805     }
806 
807     QueueEntry &entry = *mVideoQueue.begin();
808 
809     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
810     msg->setInt32("generation", mVideoQueueGeneration);
811 
812     if (entry.mBuffer == NULL) {
813         // EOS doesn't carry a timestamp.
814         msg->post();
815         mDrainVideoQueuePending = true;
816         return;
817     }
818 
819     int64_t delayUs;
820     int64_t nowUs = ALooper::GetNowUs();
821     int64_t realTimeUs;
822     if (mFlags & FLAG_REAL_TIME) {
823         int64_t mediaTimeUs;
824         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
825         realTimeUs = mediaTimeUs;
826     } else {
827         int64_t mediaTimeUs;
828         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
829 
830         if (mAnchorTimeMediaUs < 0) {
831             setAnchorTime(mediaTimeUs, nowUs);
832             mPausePositionMediaTimeUs = mediaTimeUs;
833             mAnchorMaxMediaUs = mediaTimeUs;
834             realTimeUs = nowUs;
835         } else {
836             realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
837         }
838         if (!mHasAudio) {
839             mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
840         }
841 
842         // Heuristics to handle situation when media time changed without a
843         // discontinuity. If we have not drained an audio buffer that was
844         // received after this buffer, repost in 10 msec. Otherwise repost
845         // in 500 msec.
846         delayUs = realTimeUs - nowUs;
847         if (delayUs > 500000) {
848             int64_t postDelayUs = 500000;
849             if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
850                 postDelayUs = 10000;
851             }
852             msg->setWhat(kWhatPostDrainVideoQueue);
853             msg->post(postDelayUs);
854             mVideoScheduler->restart();
855             ALOGI("possible video time jump of %dms, retrying in %dms",
856                     (int)(delayUs / 1000), (int)(postDelayUs / 1000));
857             mDrainVideoQueuePending = true;
858             return;
859         }
860     }
861 
862     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
863     int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
864 
865     delayUs = realTimeUs - nowUs;
866 
867     ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
868     // post 2 display refreshes before rendering is due
869     msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
870 
871     mDrainVideoQueuePending = true;
872 }
873 
onDrainVideoQueue()874 void NuPlayer::Renderer::onDrainVideoQueue() {
875     if (mVideoQueue.empty()) {
876         return;
877     }
878 
879     QueueEntry *entry = &*mVideoQueue.begin();
880 
881     if (entry->mBuffer == NULL) {
882         // EOS
883 
884         notifyEOS(false /* audio */, entry->mFinalResult);
885 
886         mVideoQueue.erase(mVideoQueue.begin());
887         entry = NULL;
888 
889         setVideoLateByUs(0);
890         return;
891     }
892 
893     int64_t nowUs = -1;
894     int64_t realTimeUs;
895     if (mFlags & FLAG_REAL_TIME) {
896         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
897     } else {
898         int64_t mediaTimeUs;
899         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
900 
901         nowUs = ALooper::GetNowUs();
902         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
903     }
904 
905     bool tooLate = false;
906 
907     if (!mPaused) {
908         if (nowUs == -1) {
909             nowUs = ALooper::GetNowUs();
910         }
911         setVideoLateByUs(nowUs - realTimeUs);
912         tooLate = (mVideoLateByUs > 40000);
913 
914         if (tooLate) {
915             ALOGV("video late by %lld us (%.2f secs)",
916                  mVideoLateByUs, mVideoLateByUs / 1E6);
917         } else {
918             ALOGV("rendering video at media time %.2f secs",
919                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
920                     (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
921         }
922     } else {
923         setVideoLateByUs(0);
924         if (!mVideoSampleReceived && !mHasAudio) {
925             // This will ensure that the first frame after a flush won't be used as anchor
926             // when renderer is in paused state, because resume can happen any time after seek.
927             setAnchorTime(-1, -1);
928         }
929     }
930 
931     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
932     entry->mNotifyConsumed->setInt32("render", !tooLate);
933     entry->mNotifyConsumed->post();
934     mVideoQueue.erase(mVideoQueue.begin());
935     entry = NULL;
936 
937     mVideoSampleReceived = true;
938 
939     if (!mPaused) {
940         if (!mVideoRenderingStarted) {
941             mVideoRenderingStarted = true;
942             notifyVideoRenderingStart();
943         }
944         notifyIfMediaRenderingStarted();
945     }
946 }
947 
notifyVideoRenderingStart()948 void NuPlayer::Renderer::notifyVideoRenderingStart() {
949     sp<AMessage> notify = mNotify->dup();
950     notify->setInt32("what", kWhatVideoRenderingStart);
951     notify->post();
952 }
953 
notifyEOS(bool audio,status_t finalResult,int64_t delayUs)954 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
955     sp<AMessage> notify = mNotify->dup();
956     notify->setInt32("what", kWhatEOS);
957     notify->setInt32("audio", static_cast<int32_t>(audio));
958     notify->setInt32("finalResult", finalResult);
959     notify->post(delayUs);
960 }
961 
notifyAudioOffloadTearDown()962 void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
963     (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
964 }
965 
onQueueBuffer(const sp<AMessage> & msg)966 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
967     int32_t audio;
968     CHECK(msg->findInt32("audio", &audio));
969 
970     setHasMedia(audio);
971 
972     if (mHasVideo) {
973         if (mVideoScheduler == NULL) {
974             mVideoScheduler = new VideoFrameScheduler();
975             mVideoScheduler->init();
976         }
977     }
978 
979     if (dropBufferWhileFlushing(audio, msg)) {
980         return;
981     }
982 
983     sp<ABuffer> buffer;
984     CHECK(msg->findBuffer("buffer", &buffer));
985 
986     sp<AMessage> notifyConsumed;
987     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
988 
989     QueueEntry entry;
990     entry.mBuffer = buffer;
991     entry.mNotifyConsumed = notifyConsumed;
992     entry.mOffset = 0;
993     entry.mFinalResult = OK;
994     entry.mBufferOrdinal = ++mTotalBuffersQueued;
995 
996     Mutex::Autolock autoLock(mLock);
997     if (audio) {
998         mAudioQueue.push_back(entry);
999         postDrainAudioQueue_l();
1000     } else {
1001         mVideoQueue.push_back(entry);
1002         postDrainVideoQueue_l();
1003     }
1004 
1005     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
1006         return;
1007     }
1008 
1009     sp<ABuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
1010     sp<ABuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
1011 
1012     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
1013         // EOS signalled on either queue.
1014         syncQueuesDone_l();
1015         return;
1016     }
1017 
1018     int64_t firstAudioTimeUs;
1019     int64_t firstVideoTimeUs;
1020     CHECK(firstAudioBuffer->meta()
1021             ->findInt64("timeUs", &firstAudioTimeUs));
1022     CHECK(firstVideoBuffer->meta()
1023             ->findInt64("timeUs", &firstVideoTimeUs));
1024 
1025     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
1026 
1027     ALOGV("queueDiff = %.2f secs", diff / 1E6);
1028 
1029     if (diff > 100000ll) {
1030         // Audio data starts More than 0.1 secs before video.
1031         // Drop some audio.
1032 
1033         (*mAudioQueue.begin()).mNotifyConsumed->post();
1034         mAudioQueue.erase(mAudioQueue.begin());
1035         return;
1036     }
1037 
1038     syncQueuesDone_l();
1039 }
1040 
syncQueuesDone_l()1041 void NuPlayer::Renderer::syncQueuesDone_l() {
1042     if (!mSyncQueues) {
1043         return;
1044     }
1045 
1046     mSyncQueues = false;
1047 
1048     if (!mAudioQueue.empty()) {
1049         postDrainAudioQueue_l();
1050     }
1051 
1052     if (!mVideoQueue.empty()) {
1053         postDrainVideoQueue_l();
1054     }
1055 }
1056 
onQueueEOS(const sp<AMessage> & msg)1057 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1058     int32_t audio;
1059     CHECK(msg->findInt32("audio", &audio));
1060 
1061     if (dropBufferWhileFlushing(audio, msg)) {
1062         return;
1063     }
1064 
1065     int32_t finalResult;
1066     CHECK(msg->findInt32("finalResult", &finalResult));
1067 
1068     QueueEntry entry;
1069     entry.mOffset = 0;
1070     entry.mFinalResult = finalResult;
1071 
1072     Mutex::Autolock autoLock(mLock);
1073     if (audio) {
1074         if (mAudioQueue.empty() && mSyncQueues) {
1075             syncQueuesDone_l();
1076         }
1077         mAudioQueue.push_back(entry);
1078         postDrainAudioQueue_l();
1079     } else {
1080         if (mVideoQueue.empty() && mSyncQueues) {
1081             syncQueuesDone_l();
1082         }
1083         mVideoQueue.push_back(entry);
1084         postDrainVideoQueue_l();
1085     }
1086 }
1087 
onFlush(const sp<AMessage> & msg)1088 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1089     int32_t audio, notifyComplete;
1090     CHECK(msg->findInt32("audio", &audio));
1091 
1092     {
1093         Mutex::Autolock autoLock(mFlushLock);
1094         if (audio) {
1095             mFlushingAudio = false;
1096             notifyComplete = mNotifyCompleteAudio;
1097             mNotifyCompleteAudio = false;
1098         } else {
1099             mFlushingVideo = false;
1100             notifyComplete = mNotifyCompleteVideo;
1101             mNotifyCompleteVideo = false;
1102         }
1103     }
1104 
1105     // If we're currently syncing the queues, i.e. dropping audio while
1106     // aligning the first audio/video buffer times and only one of the
1107     // two queues has data, we may starve that queue by not requesting
1108     // more buffers from the decoder. If the other source then encounters
1109     // a discontinuity that leads to flushing, we'll never find the
1110     // corresponding discontinuity on the other queue.
1111     // Therefore we'll stop syncing the queues if at least one of them
1112     // is flushed.
1113     {
1114          Mutex::Autolock autoLock(mLock);
1115          syncQueuesDone_l();
1116          setPauseStartedTimeRealUs(-1);
1117          setAnchorTime(-1, -1);
1118     }
1119 
1120     ALOGV("flushing %s", audio ? "audio" : "video");
1121     if (audio) {
1122         {
1123             Mutex::Autolock autoLock(mLock);
1124             flushQueue(&mAudioQueue);
1125 
1126             ++mAudioQueueGeneration;
1127             prepareForMediaRenderingStart();
1128 
1129             if (offloadingAudio()) {
1130                 setAudioFirstAnchorTime(-1);
1131             }
1132         }
1133 
1134         mDrainAudioQueuePending = false;
1135 
1136         if (offloadingAudio()) {
1137             mAudioSink->pause();
1138             mAudioSink->flush();
1139             mAudioSink->start();
1140         }
1141     } else {
1142         flushQueue(&mVideoQueue);
1143 
1144         mDrainVideoQueuePending = false;
1145         ++mVideoQueueGeneration;
1146 
1147         if (mVideoScheduler != NULL) {
1148             mVideoScheduler->restart();
1149         }
1150 
1151         prepareForMediaRenderingStart();
1152     }
1153 
1154     mVideoSampleReceived = false;
1155 
1156     if (notifyComplete) {
1157         notifyFlushComplete(audio);
1158     }
1159 }
1160 
flushQueue(List<QueueEntry> * queue)1161 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1162     while (!queue->empty()) {
1163         QueueEntry *entry = &*queue->begin();
1164 
1165         if (entry->mBuffer != NULL) {
1166             entry->mNotifyConsumed->post();
1167         }
1168 
1169         queue->erase(queue->begin());
1170         entry = NULL;
1171     }
1172 }
1173 
notifyFlushComplete(bool audio)1174 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1175     sp<AMessage> notify = mNotify->dup();
1176     notify->setInt32("what", kWhatFlushComplete);
1177     notify->setInt32("audio", static_cast<int32_t>(audio));
1178     notify->post();
1179 }
1180 
dropBufferWhileFlushing(bool audio,const sp<AMessage> & msg)1181 bool NuPlayer::Renderer::dropBufferWhileFlushing(
1182         bool audio, const sp<AMessage> &msg) {
1183     bool flushing = false;
1184 
1185     {
1186         Mutex::Autolock autoLock(mFlushLock);
1187         if (audio) {
1188             flushing = mFlushingAudio;
1189         } else {
1190             flushing = mFlushingVideo;
1191         }
1192     }
1193 
1194     if (!flushing) {
1195         return false;
1196     }
1197 
1198     sp<AMessage> notifyConsumed;
1199     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1200         notifyConsumed->post();
1201     }
1202 
1203     return true;
1204 }
1205 
onAudioSinkChanged()1206 void NuPlayer::Renderer::onAudioSinkChanged() {
1207     if (offloadingAudio()) {
1208         return;
1209     }
1210     CHECK(!mDrainAudioQueuePending);
1211     mNumFramesWritten = 0;
1212     mAnchorNumFramesWritten = -1;
1213     uint32_t written;
1214     if (mAudioSink->getFramesWritten(&written) == OK) {
1215         mNumFramesWritten = written;
1216     }
1217 }
1218 
onDisableOffloadAudio()1219 void NuPlayer::Renderer::onDisableOffloadAudio() {
1220     Mutex::Autolock autoLock(mLock);
1221     mFlags &= ~FLAG_OFFLOAD_AUDIO;
1222     ++mAudioQueueGeneration;
1223 }
1224 
onEnableOffloadAudio()1225 void NuPlayer::Renderer::onEnableOffloadAudio() {
1226     Mutex::Autolock autoLock(mLock);
1227     mFlags |= FLAG_OFFLOAD_AUDIO;
1228     ++mAudioQueueGeneration;
1229 }
1230 
onPause()1231 void NuPlayer::Renderer::onPause() {
1232     if (mPaused) {
1233         ALOGW("Renderer::onPause() called while already paused!");
1234         return;
1235     }
1236     int64_t currentPositionUs;
1237     int64_t pausePositionMediaTimeUs;
1238     if (getCurrentPositionFromAnchor(
1239             &currentPositionUs, ALooper::GetNowUs()) == OK) {
1240         pausePositionMediaTimeUs = currentPositionUs;
1241     } else {
1242         // Set paused position to -1 (unavailabe) if we don't have anchor time
1243         // This could happen if client does a seekTo() immediately followed by
1244         // pause(). Renderer will be flushed with anchor time cleared. We don't
1245         // want to leave stale value in mPausePositionMediaTimeUs.
1246         pausePositionMediaTimeUs = -1;
1247     }
1248     {
1249         Mutex::Autolock autoLock(mLock);
1250         mPausePositionMediaTimeUs = pausePositionMediaTimeUs;
1251         ++mAudioQueueGeneration;
1252         ++mVideoQueueGeneration;
1253         prepareForMediaRenderingStart();
1254         mPaused = true;
1255         setPauseStartedTimeRealUs(ALooper::GetNowUs());
1256     }
1257 
1258     mDrainAudioQueuePending = false;
1259     mDrainVideoQueuePending = false;
1260 
1261     if (mHasAudio) {
1262         mAudioSink->pause();
1263         startAudioOffloadPauseTimeout();
1264     }
1265 
1266     ALOGV("now paused audio queue has %d entries, video has %d entries",
1267           mAudioQueue.size(), mVideoQueue.size());
1268 }
1269 
onResume()1270 void NuPlayer::Renderer::onResume() {
1271     if (!mPaused) {
1272         return;
1273     }
1274 
1275     if (mHasAudio) {
1276         cancelAudioOffloadPauseTimeout();
1277         mAudioSink->start();
1278     }
1279 
1280     Mutex::Autolock autoLock(mLock);
1281     mPaused = false;
1282     if (mPauseStartedTimeRealUs != -1) {
1283         int64_t newAnchorRealUs =
1284             mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
1285         setAnchorTime(
1286                 mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
1287     }
1288 
1289     if (!mAudioQueue.empty()) {
1290         postDrainAudioQueue_l();
1291     }
1292 
1293     if (!mVideoQueue.empty()) {
1294         postDrainVideoQueue_l();
1295     }
1296 }
1297 
onSetVideoFrameRate(float fps)1298 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1299     if (mVideoScheduler == NULL) {
1300         mVideoScheduler = new VideoFrameScheduler();
1301     }
1302     mVideoScheduler->init(fps);
1303 }
1304 
1305 // TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
1306 // as it acquires locks and may query the audio driver.
1307 //
1308 // Some calls could conceivably retrieve extrapolated data instead of
1309 // accessing getTimestamp() or getPosition() every time a data buffer with
1310 // a media time is received.
1311 //
getPlayedOutAudioDurationUs(int64_t nowUs)1312 int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
1313     uint32_t numFramesPlayed;
1314     int64_t numFramesPlayedAt;
1315     AudioTimestamp ts;
1316     static const int64_t kStaleTimestamp100ms = 100000;
1317 
1318     status_t res = mAudioSink->getTimestamp(ts);
1319     if (res == OK) {                 // case 1: mixing audio tracks and offloaded tracks.
1320         numFramesPlayed = ts.mPosition;
1321         numFramesPlayedAt =
1322             ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
1323         const int64_t timestampAge = nowUs - numFramesPlayedAt;
1324         if (timestampAge > kStaleTimestamp100ms) {
1325             // This is an audio FIXME.
1326             // getTimestamp returns a timestamp which may come from audio mixing threads.
1327             // After pausing, the MixerThread may go idle, thus the mTime estimate may
1328             // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
1329             // the max latency should be about 25ms with an average around 12ms (to be verified).
1330             // For safety we use 100ms.
1331             ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
1332                     (long long)nowUs, (long long)numFramesPlayedAt);
1333             numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
1334         }
1335         //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
1336     } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
1337         numFramesPlayed = 0;
1338         numFramesPlayedAt = nowUs;
1339         //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
1340         //        numFramesPlayed, (long long)numFramesPlayedAt);
1341     } else {                         // case 3: transitory at new track or audio fast tracks.
1342         res = mAudioSink->getPosition(&numFramesPlayed);
1343         CHECK_EQ(res, (status_t)OK);
1344         numFramesPlayedAt = nowUs;
1345         numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
1346         //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
1347     }
1348 
1349     // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
1350     //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test
1351     int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
1352             + nowUs - numFramesPlayedAt;
1353     if (durationUs < 0) {
1354         // Occurs when numFramesPlayed position is very small and the following:
1355         // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
1356         //     numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
1357         // (2) In case 3, using getPosition and adding mAudioSink->latency() to
1358         //     numFramesPlayedAt, by a time amount greater than numFramesPlayed.
1359         //
1360         // Both of these are transitory conditions.
1361         ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
1362         durationUs = 0;
1363     }
1364     ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
1365             (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
1366     return durationUs;
1367 }
1368 
onAudioOffloadTearDown(AudioOffloadTearDownReason reason)1369 void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
1370     if (mAudioOffloadTornDown) {
1371         return;
1372     }
1373     mAudioOffloadTornDown = true;
1374 
1375     int64_t currentPositionUs;
1376     if (getCurrentPositionOnLooper(&currentPositionUs) != OK) {
1377         currentPositionUs = 0;
1378     }
1379 
1380     mAudioSink->stop();
1381     mAudioSink->flush();
1382 
1383     sp<AMessage> notify = mNotify->dup();
1384     notify->setInt32("what", kWhatAudioOffloadTearDown);
1385     notify->setInt64("positionUs", currentPositionUs);
1386     notify->setInt32("reason", reason);
1387     notify->post();
1388 }
1389 
startAudioOffloadPauseTimeout()1390 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1391     if (offloadingAudio()) {
1392         mWakeLock->acquire();
1393         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
1394         msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
1395         msg->post(kOffloadPauseMaxUs);
1396     }
1397 }
1398 
cancelAudioOffloadPauseTimeout()1399 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1400     if (offloadingAudio()) {
1401         mWakeLock->release(true);
1402         ++mAudioOffloadPauseTimeoutGeneration;
1403     }
1404 }
1405 
onOpenAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags)1406 status_t NuPlayer::Renderer::onOpenAudioSink(
1407         const sp<AMessage> &format,
1408         bool offloadOnly,
1409         bool hasVideo,
1410         uint32_t flags) {
1411     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1412             offloadOnly, offloadingAudio());
1413     bool audioSinkChanged = false;
1414 
1415     int32_t numChannels;
1416     CHECK(format->findInt32("channel-count", &numChannels));
1417 
1418     int32_t channelMask;
1419     if (!format->findInt32("channel-mask", &channelMask)) {
1420         // signal to the AudioSink to derive the mask from count.
1421         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
1422     }
1423 
1424     int32_t sampleRate;
1425     CHECK(format->findInt32("sample-rate", &sampleRate));
1426 
1427     if (offloadingAudio()) {
1428         audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
1429         AString mime;
1430         CHECK(format->findString("mime", &mime));
1431         status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1432 
1433         if (err != OK) {
1434             ALOGE("Couldn't map mime \"%s\" to a valid "
1435                     "audio_format", mime.c_str());
1436             onDisableOffloadAudio();
1437         } else {
1438             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
1439                     mime.c_str(), audioFormat);
1440 
1441             int avgBitRate = -1;
1442             format->findInt32("bit-rate", &avgBitRate);
1443 
1444             int32_t aacProfile = -1;
1445             if (audioFormat == AUDIO_FORMAT_AAC
1446                     && format->findInt32("aac-profile", &aacProfile)) {
1447                 // Redefine AAC format as per aac profile
1448                 mapAACProfileToAudioFormat(
1449                         audioFormat,
1450                         aacProfile);
1451             }
1452 
1453             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
1454             offloadInfo.duration_us = -1;
1455             format->findInt64(
1456                     "durationUs", &offloadInfo.duration_us);
1457             offloadInfo.sample_rate = sampleRate;
1458             offloadInfo.channel_mask = channelMask;
1459             offloadInfo.format = audioFormat;
1460             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
1461             offloadInfo.bit_rate = avgBitRate;
1462             offloadInfo.has_video = hasVideo;
1463             offloadInfo.is_streaming = true;
1464 
1465             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
1466                 ALOGV("openAudioSink: no change in offload mode");
1467                 // no change from previous configuration, everything ok.
1468                 return OK;
1469             }
1470             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1471 
1472             ALOGV("openAudioSink: try to open AudioSink in offload mode");
1473             uint32_t offloadFlags = flags;
1474             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1475             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
1476             audioSinkChanged = true;
1477             mAudioSink->close();
1478             err = mAudioSink->open(
1479                     sampleRate,
1480                     numChannels,
1481                     (audio_channel_mask_t)channelMask,
1482                     audioFormat,
1483                     8 /* bufferCount */,
1484                     &NuPlayer::Renderer::AudioSinkCallback,
1485                     this,
1486                     (audio_output_flags_t)offloadFlags,
1487                     &offloadInfo);
1488 
1489             if (err == OK) {
1490                 // If the playback is offloaded to h/w, we pass
1491                 // the HAL some metadata information.
1492                 // We don't want to do this for PCM because it
1493                 // will be going through the AudioFlinger mixer
1494                 // before reaching the hardware.
1495                 // TODO
1496                 mCurrentOffloadInfo = offloadInfo;
1497                 err = mAudioSink->start();
1498                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
1499             }
1500             if (err != OK) {
1501                 // Clean up, fall back to non offload mode.
1502                 mAudioSink->close();
1503                 onDisableOffloadAudio();
1504                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1505                 ALOGV("openAudioSink: offload failed");
1506             }
1507         }
1508     }
1509     if (!offloadOnly && !offloadingAudio()) {
1510         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
1511         uint32_t pcmFlags = flags;
1512         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
1513 
1514         const PcmInfo info = {
1515                 (audio_channel_mask_t)channelMask,
1516                 (audio_output_flags_t)pcmFlags,
1517                 AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
1518                 numChannels,
1519                 sampleRate
1520         };
1521         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
1522             ALOGV("openAudioSink: no change in pcm mode");
1523             // no change from previous configuration, everything ok.
1524             return OK;
1525         }
1526 
1527         audioSinkChanged = true;
1528         mAudioSink->close();
1529         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1530         status_t err = mAudioSink->open(
1531                     sampleRate,
1532                     numChannels,
1533                     (audio_channel_mask_t)channelMask,
1534                     AUDIO_FORMAT_PCM_16_BIT,
1535                     8 /* bufferCount */,
1536                     NULL,
1537                     NULL,
1538                     (audio_output_flags_t)pcmFlags);
1539         if (err != OK) {
1540             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
1541             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1542             return err;
1543         }
1544         mCurrentPcmInfo = info;
1545         mAudioSink->start();
1546     }
1547     if (audioSinkChanged) {
1548         onAudioSinkChanged();
1549     }
1550     if (offloadingAudio()) {
1551         mAudioOffloadTornDown = false;
1552     }
1553     return OK;
1554 }
1555 
onCloseAudioSink()1556 void NuPlayer::Renderer::onCloseAudioSink() {
1557     mAudioSink->close();
1558     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
1559     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
1560 }
1561 
1562 }  // namespace android
1563 
1564