1 /*
2 * Copyright 2014, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "MediaCodecSource"
19 #define DEBUG_DRIFT_TIME 0
20
21 #include <inttypes.h>
22
23 #include <gui/IGraphicBufferProducer.h>
24 #include <gui/Surface.h>
25 #include <media/ICrypto.h>
26 #include <media/MediaBufferHolder.h>
27 #include <media/MediaCodecBuffer.h>
28 #include <media/MediaSource.h>
29 #include <media/stagefright/foundation/ABuffer.h>
30 #include <media/stagefright/foundation/ADebug.h>
31 #include <media/stagefright/foundation/ALooper.h>
32 #include <media/stagefright/foundation/AMessage.h>
33 #include <media/stagefright/MediaBuffer.h>
34 #include <media/stagefright/MediaCodec.h>
35 #include <media/stagefright/MediaCodecList.h>
36 #include <media/stagefright/MediaCodecSource.h>
37 #include <media/stagefright/MediaErrors.h>
38 #include <media/stagefright/MetaData.h>
39 #include <media/stagefright/Utils.h>
40
41 namespace android {
42
43 const int32_t kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
44 const int32_t kDefaultHwVideoEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
45 const int32_t kDefaultVideoEncoderDataSpace = HAL_DATASPACE_V0_BT709;
46
47 const int kStopTimeoutUs = 300000; // allow 1 sec for shutting down encoder
48 // allow maximum 1 sec for stop time offset. This limits the the delay in the
49 // input source.
50 const int kMaxStopTimeOffsetUs = 1000000;
51
52 struct MediaCodecSource::Puller : public AHandler {
53 explicit Puller(const sp<MediaSource> &source);
54
55 void interruptSource();
56 status_t start(const sp<MetaData> &meta, const sp<AMessage> ¬ify);
57 void stop();
58 void stopSource();
59 void pause();
60 void resume();
61 status_t setStopTimeUs(int64_t stopTimeUs);
62 bool readBuffer(MediaBufferBase **buffer);
63
64 protected:
65 virtual void onMessageReceived(const sp<AMessage> &msg);
66 virtual ~Puller();
67
68 private:
69 enum {
70 kWhatStart = 'msta',
71 kWhatStop,
72 kWhatPull,
73 kWhatSetStopTimeUs,
74 };
75
76 sp<MediaSource> mSource;
77 sp<AMessage> mNotify;
78 sp<ALooper> mLooper;
79 bool mIsAudio;
80
81 struct Queue {
Queueandroid::MediaCodecSource::Puller::Queue82 Queue()
83 : mReadPendingSince(0),
84 mPaused(false),
85 mPulling(false) { }
86 int64_t mReadPendingSince;
87 bool mPaused;
88 bool mPulling;
89 Vector<MediaBufferBase *> mReadBuffers;
90
91 void flush();
92 // if queue is empty, return false and set *|buffer| to NULL . Otherwise, pop
93 // buffer from front of the queue, place it into *|buffer| and return true.
94 bool readBuffer(MediaBufferBase **buffer);
95 // add a buffer to the back of the queue
96 void pushBuffer(MediaBufferBase *mbuf);
97 };
98 Mutexed<Queue> mQueue;
99
100 status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
101 void schedulePull();
102 void handleEOS();
103
104 DISALLOW_EVIL_CONSTRUCTORS(Puller);
105 };
106
Puller(const sp<MediaSource> & source)107 MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
108 : mSource(source),
109 mLooper(new ALooper()),
110 mIsAudio(false)
111 {
112 sp<MetaData> meta = source->getFormat();
113 const char *mime;
114 CHECK(meta->findCString(kKeyMIMEType, &mime));
115
116 mIsAudio = !strncasecmp(mime, "audio/", 6);
117
118 mLooper->setName("pull_looper");
119 }
120
~Puller()121 MediaCodecSource::Puller::~Puller() {
122 mLooper->unregisterHandler(id());
123 mLooper->stop();
124 }
125
pushBuffer(MediaBufferBase * mbuf)126 void MediaCodecSource::Puller::Queue::pushBuffer(MediaBufferBase *mbuf) {
127 mReadBuffers.push_back(mbuf);
128 }
129
readBuffer(MediaBufferBase ** mbuf)130 bool MediaCodecSource::Puller::Queue::readBuffer(MediaBufferBase **mbuf) {
131 if (mReadBuffers.empty()) {
132 *mbuf = NULL;
133 return false;
134 }
135 *mbuf = *mReadBuffers.begin();
136 mReadBuffers.erase(mReadBuffers.begin());
137 return true;
138 }
139
flush()140 void MediaCodecSource::Puller::Queue::flush() {
141 MediaBufferBase *mbuf;
142 while (readBuffer(&mbuf)) {
143 // there are no null buffers in the queue
144 mbuf->release();
145 }
146 }
147
readBuffer(MediaBufferBase ** mbuf)148 bool MediaCodecSource::Puller::readBuffer(MediaBufferBase **mbuf) {
149 Mutexed<Queue>::Locked queue(mQueue);
150 return queue->readBuffer(mbuf);
151 }
152
postSynchronouslyAndReturnError(const sp<AMessage> & msg)153 status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
154 const sp<AMessage> &msg) {
155 sp<AMessage> response;
156 status_t err = msg->postAndAwaitResponse(&response);
157
158 if (err != OK) {
159 return err;
160 }
161
162 if (!response->findInt32("err", &err)) {
163 err = OK;
164 }
165
166 return err;
167 }
168
setStopTimeUs(int64_t stopTimeUs)169 status_t MediaCodecSource::Puller::setStopTimeUs(int64_t stopTimeUs) {
170 sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, this);
171 msg->setInt64("stop-time-us", stopTimeUs);
172 return postSynchronouslyAndReturnError(msg);
173 }
174
start(const sp<MetaData> & meta,const sp<AMessage> & notify)175 status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, const sp<AMessage> ¬ify) {
176 ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
177 mLooper->start(
178 false /* runOnCallingThread */,
179 false /* canCallJava */,
180 PRIORITY_AUDIO);
181 mLooper->registerHandler(this);
182 mNotify = notify;
183
184 sp<AMessage> msg = new AMessage(kWhatStart, this);
185 msg->setObject("meta", meta);
186 return postSynchronouslyAndReturnError(msg);
187 }
188
stop()189 void MediaCodecSource::Puller::stop() {
190 bool interrupt = false;
191 {
192 // mark stopping before actually reaching kWhatStop on the looper, so the pulling will
193 // stop.
194 Mutexed<Queue>::Locked queue(mQueue);
195 queue->mPulling = false;
196 interrupt = queue->mReadPendingSince && (queue->mReadPendingSince < ALooper::GetNowUs() - 1000000);
197 queue->flush(); // flush any unprocessed pulled buffers
198 }
199
200 if (interrupt) {
201 interruptSource();
202 }
203 }
204
interruptSource()205 void MediaCodecSource::Puller::interruptSource() {
206 // call source->stop if read has been pending for over a second
207 // We have to call this outside the looper as looper is pending on the read.
208 mSource->stop();
209 }
210
stopSource()211 void MediaCodecSource::Puller::stopSource() {
212 sp<AMessage> msg = new AMessage(kWhatStop, this);
213 (void)postSynchronouslyAndReturnError(msg);
214 }
215
pause()216 void MediaCodecSource::Puller::pause() {
217 Mutexed<Queue>::Locked queue(mQueue);
218 queue->mPaused = true;
219 }
220
resume()221 void MediaCodecSource::Puller::resume() {
222 Mutexed<Queue>::Locked queue(mQueue);
223 queue->mPaused = false;
224 }
225
schedulePull()226 void MediaCodecSource::Puller::schedulePull() {
227 (new AMessage(kWhatPull, this))->post();
228 }
229
handleEOS()230 void MediaCodecSource::Puller::handleEOS() {
231 ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
232 sp<AMessage> msg = mNotify->dup();
233 msg->setInt32("eos", 1);
234 msg->post();
235 }
236
onMessageReceived(const sp<AMessage> & msg)237 void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
238 switch (msg->what()) {
239 case kWhatStart:
240 {
241 sp<RefBase> obj;
242 CHECK(msg->findObject("meta", &obj));
243
244 {
245 Mutexed<Queue>::Locked queue(mQueue);
246 queue->mPulling = true;
247 }
248
249 status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
250
251 if (err == OK) {
252 schedulePull();
253 }
254
255 sp<AMessage> response = new AMessage;
256 response->setInt32("err", err);
257
258 sp<AReplyToken> replyID;
259 CHECK(msg->senderAwaitsResponse(&replyID));
260 response->postReply(replyID);
261 break;
262 }
263
264 case kWhatSetStopTimeUs:
265 {
266 sp<AReplyToken> replyID;
267 CHECK(msg->senderAwaitsResponse(&replyID));
268 int64_t stopTimeUs;
269 CHECK(msg->findInt64("stop-time-us", &stopTimeUs));
270 status_t err = mSource->setStopTimeUs(stopTimeUs);
271
272 sp<AMessage> response = new AMessage;
273 response->setInt32("err", err);
274 response->postReply(replyID);
275 break;
276 }
277
278 case kWhatStop:
279 {
280 mSource->stop();
281
282 sp<AMessage> response = new AMessage;
283 response->setInt32("err", OK);
284
285 sp<AReplyToken> replyID;
286 CHECK(msg->senderAwaitsResponse(&replyID));
287 response->postReply(replyID);
288 break;
289 }
290
291 case kWhatPull:
292 {
293 Mutexed<Queue>::Locked queue(mQueue);
294 queue->mReadPendingSince = ALooper::GetNowUs();
295 if (!queue->mPulling) {
296 handleEOS();
297 break;
298 }
299
300 queue.unlock();
301 MediaBufferBase *mbuf = NULL;
302 status_t err = mSource->read(&mbuf);
303 queue.lock();
304
305 queue->mReadPendingSince = 0;
306 // if we need to discard buffer
307 if (!queue->mPulling || queue->mPaused || err != OK) {
308 if (mbuf != NULL) {
309 mbuf->release();
310 mbuf = NULL;
311 }
312 if (queue->mPulling && err == OK) {
313 msg->post(); // if simply paused, keep pulling source
314 break;
315 } else if (err == ERROR_END_OF_STREAM) {
316 ALOGV("stream ended, mbuf %p", mbuf);
317 } else if (err != OK) {
318 ALOGE("error %d reading stream.", err);
319 }
320 }
321
322 if (mbuf != NULL) {
323 queue->pushBuffer(mbuf);
324 }
325
326 queue.unlock();
327
328 if (mbuf != NULL) {
329 mNotify->post();
330 msg->post();
331 } else {
332 handleEOS();
333 }
334 break;
335 }
336
337 default:
338 TRESPASS();
339 }
340 }
341
Output()342 MediaCodecSource::Output::Output()
343 : mEncoderReachedEOS(false),
344 mErrorCode(OK) {
345 }
346
347 // static
Create(const sp<ALooper> & looper,const sp<AMessage> & format,const sp<MediaSource> & source,const sp<PersistentSurface> & persistentSurface,uint32_t flags)348 sp<MediaCodecSource> MediaCodecSource::Create(
349 const sp<ALooper> &looper,
350 const sp<AMessage> &format,
351 const sp<MediaSource> &source,
352 const sp<PersistentSurface> &persistentSurface,
353 uint32_t flags) {
354 sp<MediaCodecSource> mediaSource = new MediaCodecSource(
355 looper, format, source, persistentSurface, flags);
356
357 if (mediaSource->init() == OK) {
358 return mediaSource;
359 }
360 return NULL;
361 }
362
setInputBufferTimeOffset(int64_t timeOffsetUs)363 status_t MediaCodecSource::setInputBufferTimeOffset(int64_t timeOffsetUs) {
364 sp<AMessage> msg = new AMessage(kWhatSetInputBufferTimeOffset, mReflector);
365 msg->setInt64("time-offset-us", timeOffsetUs);
366 return postSynchronouslyAndReturnError(msg);
367 }
368
getFirstSampleSystemTimeUs()369 int64_t MediaCodecSource::getFirstSampleSystemTimeUs() {
370 sp<AMessage> msg = new AMessage(kWhatGetFirstSampleSystemTimeUs, mReflector);
371 sp<AMessage> response;
372 msg->postAndAwaitResponse(&response);
373 int64_t timeUs;
374 if (!response->findInt64("time-us", &timeUs)) {
375 timeUs = -1ll;
376 }
377 return timeUs;
378 }
379
start(MetaData * params)380 status_t MediaCodecSource::start(MetaData* params) {
381 sp<AMessage> msg = new AMessage(kWhatStart, mReflector);
382 msg->setObject("meta", params);
383 return postSynchronouslyAndReturnError(msg);
384 }
385
stop()386 status_t MediaCodecSource::stop() {
387 sp<AMessage> msg = new AMessage(kWhatStop, mReflector);
388 return postSynchronouslyAndReturnError(msg);
389 }
390
391
setStopTimeUs(int64_t stopTimeUs)392 status_t MediaCodecSource::setStopTimeUs(int64_t stopTimeUs) {
393 sp<AMessage> msg = new AMessage(kWhatSetStopTimeUs, mReflector);
394 msg->setInt64("stop-time-us", stopTimeUs);
395 return postSynchronouslyAndReturnError(msg);
396 }
397
pause(MetaData * params)398 status_t MediaCodecSource::pause(MetaData* params) {
399 sp<AMessage> msg = new AMessage(kWhatPause, mReflector);
400 msg->setObject("meta", params);
401 msg->post();
402 return OK;
403 }
404
getFormat()405 sp<MetaData> MediaCodecSource::getFormat() {
406 Mutexed<sp<MetaData>>::Locked meta(mMeta);
407 return *meta;
408 }
409
getGraphicBufferProducer()410 sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
411 CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
412 return mGraphicBufferProducer;
413 }
414
read(MediaBufferBase ** buffer,const ReadOptions *)415 status_t MediaCodecSource::read(
416 MediaBufferBase** buffer, const ReadOptions* /* options */) {
417 Mutexed<Output>::Locked output(mOutput);
418
419 *buffer = NULL;
420 while (output->mBufferQueue.size() == 0 && !output->mEncoderReachedEOS) {
421 output.waitForCondition(output->mCond);
422 }
423 if (!output->mEncoderReachedEOS) {
424 *buffer = *output->mBufferQueue.begin();
425 output->mBufferQueue.erase(output->mBufferQueue.begin());
426 return OK;
427 }
428 return output->mErrorCode;
429 }
430
signalBufferReturned(MediaBufferBase * buffer)431 void MediaCodecSource::signalBufferReturned(MediaBufferBase *buffer) {
432 buffer->setObserver(0);
433 buffer->release();
434 }
435
MediaCodecSource(const sp<ALooper> & looper,const sp<AMessage> & outputFormat,const sp<MediaSource> & source,const sp<PersistentSurface> & persistentSurface,uint32_t flags)436 MediaCodecSource::MediaCodecSource(
437 const sp<ALooper> &looper,
438 const sp<AMessage> &outputFormat,
439 const sp<MediaSource> &source,
440 const sp<PersistentSurface> &persistentSurface,
441 uint32_t flags)
442 : mLooper(looper),
443 mOutputFormat(outputFormat),
444 mMeta(new MetaData),
445 mFlags(flags),
446 mIsVideo(false),
447 mStarted(false),
448 mStopping(false),
449 mDoMoreWorkPending(false),
450 mSetEncoderFormat(false),
451 mEncoderFormat(0),
452 mEncoderDataSpace(0),
453 mPersistentSurface(persistentSurface),
454 mInputBufferTimeOffsetUs(0),
455 mFirstSampleSystemTimeUs(-1ll),
456 mPausePending(false),
457 mFirstSampleTimeUs(-1ll),
458 mGeneration(0) {
459 CHECK(mLooper != NULL);
460
461 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
462 mPuller = new Puller(source);
463 }
464 }
465
~MediaCodecSource()466 MediaCodecSource::~MediaCodecSource() {
467 releaseEncoder();
468
469 mCodecLooper->stop();
470 mLooper->unregisterHandler(mReflector->id());
471 }
472
init()473 status_t MediaCodecSource::init() {
474 status_t err = initEncoder();
475
476 if (err != OK) {
477 releaseEncoder();
478 }
479
480 return err;
481 }
482
initEncoder()483 status_t MediaCodecSource::initEncoder() {
484
485 mReflector = new AHandlerReflector<MediaCodecSource>(this);
486 mLooper->registerHandler(mReflector);
487
488 mCodecLooper = new ALooper;
489 mCodecLooper->setName("codec_looper");
490 mCodecLooper->start();
491
492 if (mFlags & FLAG_USE_SURFACE_INPUT) {
493 mOutputFormat->setInt32("create-input-buffers-suspended", 1);
494 }
495
496 AString outputMIME;
497 CHECK(mOutputFormat->findString("mime", &outputMIME));
498 mIsVideo = outputMIME.startsWithIgnoreCase("video/");
499
500 AString name;
501 status_t err = NO_INIT;
502 if (mOutputFormat->findString("testing-name", &name)) {
503 mEncoder = MediaCodec::CreateByComponentName(mCodecLooper, name);
504
505 mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
506 mEncoder->setCallback(mEncoderActivityNotify);
507
508 err = mEncoder->configure(
509 mOutputFormat,
510 NULL /* nativeWindow */,
511 NULL /* crypto */,
512 MediaCodec::CONFIGURE_FLAG_ENCODE);
513 } else {
514 Vector<AString> matchingCodecs;
515 MediaCodecList::findMatchingCodecs(
516 outputMIME.c_str(), true /* encoder */,
517 ((mFlags & FLAG_PREFER_SOFTWARE_CODEC) ? MediaCodecList::kPreferSoftwareCodecs : 0),
518 &matchingCodecs);
519
520 for (size_t ix = 0; ix < matchingCodecs.size(); ++ix) {
521 mEncoder = MediaCodec::CreateByComponentName(
522 mCodecLooper, matchingCodecs[ix]);
523
524 if (mEncoder == NULL) {
525 continue;
526 }
527
528 ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
529
530 mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector);
531 mEncoder->setCallback(mEncoderActivityNotify);
532
533 err = mEncoder->configure(
534 mOutputFormat,
535 NULL /* nativeWindow */,
536 NULL /* crypto */,
537 MediaCodec::CONFIGURE_FLAG_ENCODE);
538
539 if (err == OK) {
540 break;
541 }
542 mEncoder->release();
543 mEncoder = NULL;
544 }
545 }
546
547 if (err != OK) {
548 return err;
549 }
550
551 mEncoder->getOutputFormat(&mOutputFormat);
552 sp<MetaData> meta = new MetaData;
553 convertMessageToMetaData(mOutputFormat, meta);
554 mMeta.lock().set(meta);
555
556 if (mFlags & FLAG_USE_SURFACE_INPUT) {
557 CHECK(mIsVideo);
558
559 if (mPersistentSurface != NULL) {
560 // When using persistent surface, we are only interested in the
561 // consumer, but have to use PersistentSurface as a wrapper to
562 // pass consumer over messages (similar to BufferProducerWrapper)
563 err = mEncoder->setInputSurface(mPersistentSurface);
564 } else {
565 err = mEncoder->createInputSurface(&mGraphicBufferProducer);
566 }
567
568 if (err != OK) {
569 return err;
570 }
571 }
572
573 sp<AMessage> inputFormat;
574 int32_t usingSwReadOften;
575 mSetEncoderFormat = false;
576 if (mEncoder->getInputFormat(&inputFormat) == OK) {
577 mSetEncoderFormat = true;
578 if (inputFormat->findInt32("using-sw-read-often", &usingSwReadOften)
579 && usingSwReadOften) {
580 // this is a SW encoder; signal source to allocate SW readable buffers
581 mEncoderFormat = kDefaultSwVideoEncoderFormat;
582 } else {
583 mEncoderFormat = kDefaultHwVideoEncoderFormat;
584 }
585 if (!inputFormat->findInt32("android._dataspace", &mEncoderDataSpace)) {
586 mEncoderDataSpace = kDefaultVideoEncoderDataSpace;
587 }
588 ALOGV("setting dataspace %#x, format %#x", mEncoderDataSpace, mEncoderFormat);
589 }
590
591 err = mEncoder->start();
592
593 if (err != OK) {
594 return err;
595 }
596
597 {
598 Mutexed<Output>::Locked output(mOutput);
599 output->mEncoderReachedEOS = false;
600 output->mErrorCode = OK;
601 }
602
603 return OK;
604 }
605
releaseEncoder()606 void MediaCodecSource::releaseEncoder() {
607 if (mEncoder == NULL) {
608 return;
609 }
610
611 mEncoder->release();
612 mEncoder.clear();
613 }
614
postSynchronouslyAndReturnError(const sp<AMessage> & msg)615 status_t MediaCodecSource::postSynchronouslyAndReturnError(
616 const sp<AMessage> &msg) {
617 sp<AMessage> response;
618 status_t err = msg->postAndAwaitResponse(&response);
619
620 if (err != OK) {
621 return err;
622 }
623
624 if (!response->findInt32("err", &err)) {
625 err = OK;
626 }
627
628 return err;
629 }
630
signalEOS(status_t err)631 void MediaCodecSource::signalEOS(status_t err) {
632 bool reachedEOS = false;
633 {
634 Mutexed<Output>::Locked output(mOutput);
635 reachedEOS = output->mEncoderReachedEOS;
636 if (!reachedEOS) {
637 ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
638 // release all unread media buffers
639 for (List<MediaBufferBase*>::iterator it = output->mBufferQueue.begin();
640 it != output->mBufferQueue.end(); it++) {
641 (*it)->release();
642 }
643 output->mBufferQueue.clear();
644 output->mEncoderReachedEOS = true;
645 output->mErrorCode = err;
646 output->mCond.signal();
647
648 reachedEOS = true;
649 output.unlock();
650 releaseEncoder();
651 }
652 }
653
654 if (mStopping && reachedEOS) {
655 ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
656 if (mPuller != NULL) {
657 mPuller->stopSource();
658 }
659 ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
660 // posting reply to everyone that's waiting
661 List<sp<AReplyToken>>::iterator it;
662 for (it = mStopReplyIDQueue.begin();
663 it != mStopReplyIDQueue.end(); it++) {
664 (new AMessage)->postReply(*it);
665 }
666 mStopReplyIDQueue.clear();
667 mStopping = false;
668 ++mGeneration;
669 }
670 }
671
resume(int64_t resumeStartTimeUs)672 void MediaCodecSource::resume(int64_t resumeStartTimeUs) {
673 CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
674 if (mEncoder != NULL) {
675 sp<AMessage> params = new AMessage;
676 params->setInt32("drop-input-frames", false);
677 if (resumeStartTimeUs > 0) {
678 params->setInt64("drop-start-time-us", resumeStartTimeUs);
679 }
680 mEncoder->setParameters(params);
681 }
682 }
683
feedEncoderInputBuffers()684 status_t MediaCodecSource::feedEncoderInputBuffers() {
685 MediaBufferBase* mbuf = NULL;
686 while (!mAvailEncoderInputIndices.empty() && mPuller->readBuffer(&mbuf)) {
687 size_t bufferIndex = *mAvailEncoderInputIndices.begin();
688 mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
689
690 int64_t timeUs = 0ll;
691 uint32_t flags = 0;
692 size_t size = 0;
693
694 if (mbuf != NULL) {
695 CHECK(mbuf->meta_data().findInt64(kKeyTime, &timeUs));
696 if (mFirstSampleSystemTimeUs < 0ll) {
697 mFirstSampleSystemTimeUs = systemTime() / 1000;
698 if (mPausePending) {
699 mPausePending = false;
700 onPause(mFirstSampleSystemTimeUs);
701 mbuf->release();
702 mAvailEncoderInputIndices.push_back(bufferIndex);
703 return OK;
704 }
705 }
706
707 timeUs += mInputBufferTimeOffsetUs;
708
709 // push decoding time for video, or drift time for audio
710 if (mIsVideo) {
711 mDecodingTimeQueue.push_back(timeUs);
712 } else {
713 #if DEBUG_DRIFT_TIME
714 if (mFirstSampleTimeUs < 0ll) {
715 mFirstSampleTimeUs = timeUs;
716 }
717 int64_t driftTimeUs = 0;
718 if (mbuf->meta_data().findInt64(kKeyDriftTime, &driftTimeUs)
719 && driftTimeUs) {
720 driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
721 }
722 mDriftTimeQueue.push_back(driftTimeUs);
723 #endif // DEBUG_DRIFT_TIME
724 }
725
726 sp<MediaCodecBuffer> inbuf;
727 status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
728
729 if (err != OK || inbuf == NULL || inbuf->data() == NULL
730 || mbuf->data() == NULL || mbuf->size() == 0) {
731 mbuf->release();
732 signalEOS();
733 break;
734 }
735
736 size = mbuf->size();
737
738 memcpy(inbuf->data(), mbuf->data(), size);
739
740 if (mIsVideo) {
741 // video encoder will release MediaBuffer when done
742 // with underlying data.
743 inbuf->meta()->setObject("mediaBufferHolder", new MediaBufferHolder(mbuf));
744 mbuf->release();
745 } else {
746 mbuf->release();
747 }
748 } else {
749 flags = MediaCodec::BUFFER_FLAG_EOS;
750 }
751
752 status_t err = mEncoder->queueInputBuffer(
753 bufferIndex, 0, size, timeUs, flags);
754
755 if (err != OK) {
756 return err;
757 }
758 }
759
760 return OK;
761 }
762
onStart(MetaData * params)763 status_t MediaCodecSource::onStart(MetaData *params) {
764 if (mStopping) {
765 ALOGE("Failed to start while we're stopping");
766 return INVALID_OPERATION;
767 }
768 int64_t startTimeUs;
769 if (params == NULL || !params->findInt64(kKeyTime, &startTimeUs)) {
770 startTimeUs = -1ll;
771 }
772
773 if (mStarted) {
774 ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
775 if (mPausePending) {
776 mPausePending = false;
777 return OK;
778 }
779 if (mIsVideo) {
780 mEncoder->requestIDRFrame();
781 }
782 if (mFlags & FLAG_USE_SURFACE_INPUT) {
783 resume(startTimeUs);
784 } else {
785 CHECK(mPuller != NULL);
786 mPuller->resume();
787 }
788 return OK;
789 }
790
791 ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
792
793 status_t err = OK;
794
795 if (mFlags & FLAG_USE_SURFACE_INPUT) {
796 if (mEncoder != NULL) {
797 sp<AMessage> params = new AMessage;
798 params->setInt32("drop-input-frames", false);
799 if (startTimeUs >= 0) {
800 params->setInt64("skip-frames-before", startTimeUs);
801 }
802 mEncoder->setParameters(params);
803 }
804 } else {
805 CHECK(mPuller != NULL);
806 sp<MetaData> meta = params;
807 if (mSetEncoderFormat) {
808 if (meta == NULL) {
809 meta = new MetaData;
810 }
811 meta->setInt32(kKeyPixelFormat, mEncoderFormat);
812 meta->setInt32(kKeyColorSpace, mEncoderDataSpace);
813 }
814
815 sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector);
816 err = mPuller->start(meta.get(), notify);
817 if (err != OK) {
818 return err;
819 }
820 }
821
822 ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
823
824 mStarted = true;
825 return OK;
826 }
827
onPause(int64_t pauseStartTimeUs)828 void MediaCodecSource::onPause(int64_t pauseStartTimeUs) {
829 if ((mFlags & FLAG_USE_SURFACE_INPUT) && (mEncoder != NULL)) {
830 sp<AMessage> params = new AMessage;
831 params->setInt32("drop-input-frames", true);
832 params->setInt64("drop-start-time-us", pauseStartTimeUs);
833 mEncoder->setParameters(params);
834 } else {
835 CHECK(mPuller != NULL);
836 mPuller->pause();
837 }
838 }
839
onMessageReceived(const sp<AMessage> & msg)840 void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
841 switch (msg->what()) {
842 case kWhatPullerNotify:
843 {
844 int32_t eos = 0;
845 if (msg->findInt32("eos", &eos) && eos) {
846 ALOGV("puller (%s) reached EOS", mIsVideo ? "video" : "audio");
847 signalEOS();
848 break;
849 }
850
851 if (mEncoder == NULL) {
852 ALOGV("got msg '%s' after encoder shutdown.", msg->debugString().c_str());
853 break;
854 }
855
856 feedEncoderInputBuffers();
857 break;
858 }
859 case kWhatEncoderActivity:
860 {
861 if (mEncoder == NULL) {
862 break;
863 }
864
865 int32_t cbID;
866 CHECK(msg->findInt32("callbackID", &cbID));
867 if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
868 int32_t index;
869 CHECK(msg->findInt32("index", &index));
870
871 mAvailEncoderInputIndices.push_back(index);
872 feedEncoderInputBuffers();
873 } else if (cbID == MediaCodec::CB_OUTPUT_FORMAT_CHANGED) {
874 status_t err = mEncoder->getOutputFormat(&mOutputFormat);
875 if (err != OK) {
876 signalEOS(err);
877 break;
878 }
879 sp<MetaData> meta = new MetaData;
880 convertMessageToMetaData(mOutputFormat, meta);
881 mMeta.lock().set(meta);
882 } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
883 int32_t index;
884 size_t offset;
885 size_t size;
886 int64_t timeUs;
887 int32_t flags;
888
889 CHECK(msg->findInt32("index", &index));
890 CHECK(msg->findSize("offset", &offset));
891 CHECK(msg->findSize("size", &size));
892 CHECK(msg->findInt64("timeUs", &timeUs));
893 CHECK(msg->findInt32("flags", &flags));
894
895 if (flags & MediaCodec::BUFFER_FLAG_EOS) {
896 mEncoder->releaseOutputBuffer(index);
897 signalEOS();
898 break;
899 }
900
901 sp<MediaCodecBuffer> outbuf;
902 status_t err = mEncoder->getOutputBuffer(index, &outbuf);
903 if (err != OK || outbuf == NULL || outbuf->data() == NULL
904 || outbuf->size() == 0) {
905 signalEOS();
906 break;
907 }
908
909 MediaBufferBase *mbuf = new MediaBuffer(outbuf->size());
910 mbuf->setObserver(this);
911 mbuf->add_ref();
912
913 if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
914 if (mIsVideo) {
915 int64_t decodingTimeUs;
916 if (mFlags & FLAG_USE_SURFACE_INPUT) {
917 if (mFirstSampleSystemTimeUs < 0ll) {
918 mFirstSampleSystemTimeUs = systemTime() / 1000;
919 if (mPausePending) {
920 mPausePending = false;
921 onPause(mFirstSampleSystemTimeUs);
922 mbuf->release();
923 break;
924 }
925 }
926 // Timestamp offset is already adjusted in GraphicBufferSource.
927 // GraphicBufferSource is supposed to discard samples
928 // queued before start, and offset timeUs by start time
929 CHECK_GE(timeUs, 0ll);
930 // TODO:
931 // Decoding time for surface source is unavailable,
932 // use presentation time for now. May need to move
933 // this logic into MediaCodec.
934 decodingTimeUs = timeUs;
935 } else {
936 CHECK(!mDecodingTimeQueue.empty());
937 decodingTimeUs = *(mDecodingTimeQueue.begin());
938 mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
939 }
940 mbuf->meta_data().setInt64(kKeyDecodingTime, decodingTimeUs);
941
942 ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
943 timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
944 } else {
945 int64_t driftTimeUs = 0;
946 #if DEBUG_DRIFT_TIME
947 CHECK(!mDriftTimeQueue.empty());
948 driftTimeUs = *(mDriftTimeQueue.begin());
949 mDriftTimeQueue.erase(mDriftTimeQueue.begin());
950 mbuf->meta_data().setInt64(kKeyDriftTime, driftTimeUs);
951 #endif // DEBUG_DRIFT_TIME
952 ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
953 timeUs, timeUs / 1E6, driftTimeUs);
954 }
955 mbuf->meta_data().setInt64(kKeyTime, timeUs);
956 } else {
957 mbuf->meta_data().setInt64(kKeyTime, 0ll);
958 mbuf->meta_data().setInt32(kKeyIsCodecConfig, true);
959 }
960 if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
961 mbuf->meta_data().setInt32(kKeyIsSyncFrame, true);
962 }
963 memcpy(mbuf->data(), outbuf->data(), outbuf->size());
964
965 {
966 Mutexed<Output>::Locked output(mOutput);
967 output->mBufferQueue.push_back(mbuf);
968 output->mCond.signal();
969 }
970
971 mEncoder->releaseOutputBuffer(index);
972 } else if (cbID == MediaCodec::CB_ERROR) {
973 status_t err;
974 CHECK(msg->findInt32("err", &err));
975 ALOGE("Encoder (%s) reported error : 0x%x",
976 mIsVideo ? "video" : "audio", err);
977 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
978 mStopping = true;
979 mPuller->stop();
980 }
981 signalEOS();
982 }
983 break;
984 }
985 case kWhatStart:
986 {
987 sp<AReplyToken> replyID;
988 CHECK(msg->senderAwaitsResponse(&replyID));
989
990 sp<RefBase> obj;
991 CHECK(msg->findObject("meta", &obj));
992 MetaData *params = static_cast<MetaData *>(obj.get());
993
994 sp<AMessage> response = new AMessage;
995 response->setInt32("err", onStart(params));
996 response->postReply(replyID);
997 break;
998 }
999 case kWhatStop:
1000 {
1001 ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
1002
1003 sp<AReplyToken> replyID;
1004 CHECK(msg->senderAwaitsResponse(&replyID));
1005
1006 if (mOutput.lock()->mEncoderReachedEOS) {
1007 // if we already reached EOS, reply and return now
1008 ALOGI("encoder (%s) already stopped",
1009 mIsVideo ? "video" : "audio");
1010 (new AMessage)->postReply(replyID);
1011 break;
1012 }
1013
1014 mStopReplyIDQueue.push_back(replyID);
1015 if (mStopping) {
1016 // nothing to do if we're already stopping, reply will be posted
1017 // to all when we're stopped.
1018 break;
1019 }
1020
1021 mStopping = true;
1022
1023 int64_t timeoutUs = kStopTimeoutUs;
1024 // if using surface, signal source EOS and wait for EOS to come back.
1025 // otherwise, stop puller (which also clears the input buffer queue)
1026 // and wait for the EOS message. We cannot call source->stop() because
1027 // the encoder may still be processing input buffers.
1028 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1029 mEncoder->signalEndOfInputStream();
1030 // Increase the timeout if there is delay in the GraphicBufferSource
1031 sp<AMessage> inputFormat;
1032 int64_t stopTimeOffsetUs;
1033 if (mEncoder->getInputFormat(&inputFormat) == OK &&
1034 inputFormat->findInt64("android._stop-time-offset-us", &stopTimeOffsetUs) &&
1035 stopTimeOffsetUs > 0) {
1036 if (stopTimeOffsetUs > kMaxStopTimeOffsetUs) {
1037 ALOGW("Source stopTimeOffsetUs %lld too large, limit at %lld us",
1038 (long long)stopTimeOffsetUs, (long long)kMaxStopTimeOffsetUs);
1039 stopTimeOffsetUs = kMaxStopTimeOffsetUs;
1040 }
1041 timeoutUs += stopTimeOffsetUs;
1042 } else {
1043 // Use kMaxStopTimeOffsetUs if stop time offset is not provided by input source
1044 timeoutUs = kMaxStopTimeOffsetUs;
1045 }
1046 } else {
1047 mPuller->stop();
1048 }
1049
1050 // complete stop even if encoder/puller stalled
1051 sp<AMessage> timeoutMsg = new AMessage(kWhatStopStalled, mReflector);
1052 timeoutMsg->setInt32("generation", mGeneration);
1053 timeoutMsg->post(timeoutUs);
1054 break;
1055 }
1056
1057 case kWhatStopStalled:
1058 {
1059 int32_t generation;
1060 CHECK(msg->findInt32("generation", &generation));
1061 if (generation != mGeneration) {
1062 break;
1063 }
1064
1065 if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
1066 ALOGV("source (%s) stopping", mIsVideo ? "video" : "audio");
1067 mPuller->interruptSource();
1068 ALOGV("source (%s) stopped", mIsVideo ? "video" : "audio");
1069 }
1070 signalEOS();
1071 break;
1072 }
1073
1074 case kWhatPause:
1075 {
1076 if (mFirstSampleSystemTimeUs < 0) {
1077 mPausePending = true;
1078 } else {
1079 sp<RefBase> obj;
1080 CHECK(msg->findObject("meta", &obj));
1081 MetaData *params = static_cast<MetaData *>(obj.get());
1082 int64_t pauseStartTimeUs = -1;
1083 if (params == NULL || !params->findInt64(kKeyTime, &pauseStartTimeUs)) {
1084 pauseStartTimeUs = -1ll;
1085 }
1086 onPause(pauseStartTimeUs);
1087 }
1088 break;
1089 }
1090 case kWhatSetInputBufferTimeOffset:
1091 {
1092 sp<AReplyToken> replyID;
1093 CHECK(msg->senderAwaitsResponse(&replyID));
1094 status_t err = OK;
1095 CHECK(msg->findInt64("time-offset-us", &mInputBufferTimeOffsetUs));
1096
1097 // Propagate the timestamp offset to GraphicBufferSource.
1098 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1099 sp<AMessage> params = new AMessage;
1100 params->setInt64("time-offset-us", mInputBufferTimeOffsetUs);
1101 err = mEncoder->setParameters(params);
1102 }
1103
1104 sp<AMessage> response = new AMessage;
1105 response->setInt32("err", err);
1106 response->postReply(replyID);
1107 break;
1108 }
1109 case kWhatSetStopTimeUs:
1110 {
1111 sp<AReplyToken> replyID;
1112 CHECK(msg->senderAwaitsResponse(&replyID));
1113 status_t err = OK;
1114 int64_t stopTimeUs;
1115 CHECK(msg->findInt64("stop-time-us", &stopTimeUs));
1116
1117 // Propagate the stop time to GraphicBufferSource.
1118 if (mFlags & FLAG_USE_SURFACE_INPUT) {
1119 sp<AMessage> params = new AMessage;
1120 params->setInt64("stop-time-us", stopTimeUs);
1121 err = mEncoder->setParameters(params);
1122 } else {
1123 err = mPuller->setStopTimeUs(stopTimeUs);
1124 }
1125
1126 sp<AMessage> response = new AMessage;
1127 response->setInt32("err", err);
1128 response->postReply(replyID);
1129 break;
1130 }
1131 case kWhatGetFirstSampleSystemTimeUs:
1132 {
1133 sp<AReplyToken> replyID;
1134 CHECK(msg->senderAwaitsResponse(&replyID));
1135
1136 sp<AMessage> response = new AMessage;
1137 response->setInt64("time-us", mFirstSampleSystemTimeUs);
1138 response->postReply(replyID);
1139 break;
1140 }
1141 default:
1142 TRESPASS();
1143 }
1144 }
1145
1146 } // namespace android
1147