1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 #include <fcntl.h>
19 #include <stdlib.h>
20 #include <string.h>
21 #include <sys/time.h>
22 #include <sys/types.h>
23 #include <sys/stat.h>
24 
25 //#define LOG_NDEBUG 0
26 #define LOG_TAG "stagefright"
27 #include <media/stagefright/foundation/ADebug.h>
28 
29 #include "jpeg.h"
30 #include "SineSource.h"
31 
32 #include <binder/IServiceManager.h>
33 #include <binder/ProcessState.h>
34 #include <media/DataSource.h>
35 #include <media/MediaSource.h>
36 #include <media/ICrypto.h>
37 #include <media/IMediaHTTPService.h>
38 #include <media/IMediaPlayerService.h>
39 #include <media/stagefright/foundation/ABuffer.h>
40 #include <media/stagefright/foundation/ALooper.h>
41 #include <media/stagefright/foundation/AMessage.h>
42 #include <media/stagefright/foundation/AUtils.h>
43 #include "include/NuCachedSource2.h"
44 #include <media/stagefright/AudioPlayer.h>
45 #include <media/stagefright/DataSourceFactory.h>
46 #include <media/stagefright/JPEGSource.h>
47 #include <media/stagefright/InterfaceUtils.h>
48 #include <media/stagefright/MediaCodec.h>
49 #include <media/stagefright/MediaCodecConstants.h>
50 #include <media/stagefright/MediaCodecList.h>
51 #include <media/stagefright/MediaDefs.h>
52 #include <media/stagefright/MediaErrors.h>
53 #include <media/stagefright/MediaExtractor.h>
54 #include <media/stagefright/MediaExtractorFactory.h>
55 #include <media/stagefright/MetaData.h>
56 #include <media/stagefright/SimpleDecodingSource.h>
57 #include <media/stagefright/Utils.h>
58 #include <media/mediametadataretriever.h>
59 
60 #include <media/stagefright/foundation/hexdump.h>
61 #include <media/stagefright/MPEG2TSWriter.h>
62 #include <media/stagefright/MPEG4Writer.h>
63 
64 #include <private/media/VideoFrame.h>
65 
66 #include <gui/GLConsumer.h>
67 #include <gui/Surface.h>
68 #include <gui/SurfaceComposerClient.h>
69 
70 #include <android/hardware/media/omx/1.0/IOmx.h>
71 
72 using namespace android;
73 
74 static long gNumRepetitions;
75 static long gMaxNumFrames;  // 0 means decode all available.
76 static long gReproduceBug;  // if not -1.
77 static bool gPreferSoftwareCodec;
78 static bool gForceToUseHardwareCodec;
79 static bool gPlaybackAudio;
80 static bool gWriteMP4;
81 static bool gDisplayHistogram;
82 static bool gVerbose = false;
83 static bool showProgress = true;
84 static String8 gWriteMP4Filename;
85 static String8 gComponentNameOverride;
86 
87 static sp<ANativeWindow> gSurface;
88 
getNowUs()89 static int64_t getNowUs() {
90     struct timeval tv;
91     gettimeofday(&tv, NULL);
92 
93     return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
94 }
95 
CompareIncreasing(const int64_t * a,const int64_t * b)96 static int CompareIncreasing(const int64_t *a, const int64_t *b) {
97     return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
98 }
99 
displayDecodeHistogram(Vector<int64_t> * decodeTimesUs)100 static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
101     printf("decode times:\n");
102 
103     decodeTimesUs->sort(CompareIncreasing);
104 
105     size_t n = decodeTimesUs->size();
106     int64_t minUs = decodeTimesUs->itemAt(0);
107     int64_t maxUs = decodeTimesUs->itemAt(n - 1);
108 
109     printf("min decode time %" PRId64 " us (%.2f secs)\n", minUs, minUs / 1E6);
110     printf("max decode time %" PRId64 " us (%.2f secs)\n", maxUs, maxUs / 1E6);
111 
112     size_t counts[100];
113     for (size_t i = 0; i < 100; ++i) {
114         counts[i] = 0;
115     }
116 
117     for (size_t i = 0; i < n; ++i) {
118         int64_t x = decodeTimesUs->itemAt(i);
119 
120         size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
121         if (slot == 100) { slot = 99; }
122 
123         ++counts[slot];
124     }
125 
126     for (size_t i = 0; i < 100; ++i) {
127         int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
128 
129         double fps = 1E6 / slotUs;
130         printf("[%.2f fps]: %zu\n", fps, counts[i]);
131     }
132 }
133 
displayAVCProfileLevelIfPossible(const sp<MetaData> & meta)134 static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
135     uint32_t type;
136     const void *data;
137     size_t size;
138     if (meta->findData(kKeyAVCC, &type, &data, &size)) {
139         const uint8_t *ptr = (const uint8_t *)data;
140         CHECK(size >= 7);
141         CHECK(ptr[0] == 1);  // configurationVersion == 1
142         uint8_t profile = ptr[1];
143         uint8_t level = ptr[3];
144         fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
145     }
146 }
147 
dumpSource(const sp<MediaSource> & source,const String8 & filename)148 static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
149     FILE *out = fopen(filename.string(), "wb");
150 
151     CHECK_EQ((status_t)OK, source->start());
152 
153     status_t err;
154     for (;;) {
155         MediaBufferBase *mbuf;
156         err = source->read(&mbuf);
157 
158         if (err == INFO_FORMAT_CHANGED) {
159             continue;
160         } else if (err != OK) {
161             break;
162         }
163 
164         if (gVerbose) {
165             MetaDataBase &meta = mbuf->meta_data();
166             fprintf(stdout, "sample format: %s\n", meta.toString().c_str());
167         }
168 
169         CHECK_EQ(
170                 fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(),
171                        1,
172                        mbuf->range_length(),
173                        out),
174                 mbuf->range_length());
175 
176         mbuf->release();
177         mbuf = NULL;
178     }
179 
180     CHECK_EQ((status_t)OK, source->stop());
181 
182     fclose(out);
183     out = NULL;
184 }
185 
playSource(sp<MediaSource> & source)186 static void playSource(sp<MediaSource> &source) {
187     sp<MetaData> meta = source->getFormat();
188 
189     const char *mime;
190     CHECK(meta->findCString(kKeyMIMEType, &mime));
191 
192     sp<MediaSource> rawSource;
193     if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
194         rawSource = source;
195     } else {
196         int flags = 0;
197         if (gPreferSoftwareCodec) {
198             flags |= MediaCodecList::kPreferSoftwareCodecs;
199         }
200         if (gForceToUseHardwareCodec) {
201             CHECK(!gPreferSoftwareCodec);
202             flags |= MediaCodecList::kHardwareCodecsOnly;
203         }
204         rawSource = SimpleDecodingSource::Create(
205                 source, flags, gSurface,
206                 gComponentNameOverride.isEmpty() ? nullptr : gComponentNameOverride.c_str(),
207                 !gComponentNameOverride.isEmpty());
208         if (rawSource == NULL) {
209             return;
210         }
211         displayAVCProfileLevelIfPossible(meta);
212     }
213 
214     source.clear();
215 
216     status_t err = rawSource->start();
217 
218     if (err != OK) {
219         fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
220         return;
221     }
222 
223     if (gPlaybackAudio) {
224         AudioPlayer *player = new AudioPlayer(NULL);
225         player->setSource(rawSource);
226         rawSource.clear();
227 
228         err = player->start(true /* sourceAlreadyStarted */);
229 
230         if (err == OK) {
231             status_t finalStatus;
232             while (!player->reachedEOS(&finalStatus)) {
233                 usleep(100000ll);
234             }
235         } else {
236             fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
237         }
238 
239         delete player;
240         player = NULL;
241 
242         return;
243     } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
244         int64_t durationUs;
245         CHECK(meta->findInt64(kKeyDuration, &durationUs));
246 
247         status_t err;
248         MediaBufferBase *buffer;
249         MediaSource::ReadOptions options;
250         int64_t seekTimeUs = -1;
251         for (;;) {
252             err = rawSource->read(&buffer, &options);
253             options.clearSeekTo();
254 
255             bool shouldSeek = false;
256             if (err == INFO_FORMAT_CHANGED) {
257                 CHECK(buffer == NULL);
258 
259                 printf("format changed.\n");
260                 continue;
261             } else if (err != OK) {
262                 printf("reached EOF.\n");
263 
264                 shouldSeek = true;
265             } else {
266                 int64_t timestampUs;
267                 CHECK(buffer->meta_data().findInt64(kKeyTime, &timestampUs));
268 
269                 bool failed = false;
270 
271                 if (seekTimeUs >= 0) {
272                     int64_t diff = timestampUs - seekTimeUs;
273 
274                     if (diff < 0) {
275                         diff = -diff;
276                     }
277 
278                     if ((gReproduceBug == 4 && diff > 500000)
279                         || (gReproduceBug == 5 && timestampUs < 0)) {
280                         printf("wanted: %.2f secs, got: %.2f secs\n",
281                                seekTimeUs / 1E6, timestampUs / 1E6);
282 
283                         printf("ERROR: ");
284                         failed = true;
285                     }
286                 }
287 
288                 printf("buffer has timestamp %" PRId64 " us (%.2f secs)\n",
289                        timestampUs, timestampUs / 1E6);
290 
291                 buffer->release();
292                 buffer = NULL;
293 
294                 if (failed) {
295                     break;
296                 }
297 
298                 shouldSeek = ((double)rand() / RAND_MAX) < 0.1;
299 
300                 if (gReproduceBug == 3) {
301                     shouldSeek = false;
302                 }
303             }
304 
305             seekTimeUs = -1;
306 
307             if (shouldSeek) {
308                 seekTimeUs = (rand() * (float)durationUs) / RAND_MAX;
309                 options.setSeekTo(seekTimeUs);
310 
311                 printf("seeking to %" PRId64 " us (%.2f secs)\n",
312                        seekTimeUs, seekTimeUs / 1E6);
313             }
314         }
315 
316         rawSource->stop();
317 
318         return;
319     }
320 
321     int n = 0;
322     int64_t startTime = getNowUs();
323 
324     long numIterationsLeft = gNumRepetitions;
325     MediaSource::ReadOptions options;
326 
327     int64_t sumDecodeUs = 0;
328     int64_t totalBytes = 0;
329 
330     Vector<int64_t> decodeTimesUs;
331 
332     while (numIterationsLeft-- > 0) {
333         long numFrames = 0;
334 
335         MediaBufferBase *buffer;
336 
337         for (;;) {
338             int64_t startDecodeUs = getNowUs();
339             status_t err = rawSource->read(&buffer, &options);
340             int64_t delayDecodeUs = getNowUs() - startDecodeUs;
341 
342             options.clearSeekTo();
343 
344             if (err != OK) {
345                 CHECK(buffer == NULL);
346 
347                 if (err == INFO_FORMAT_CHANGED) {
348                     printf("format changed.\n");
349                     continue;
350                 }
351 
352                 break;
353             }
354 
355             if (buffer->range_length() > 0) {
356                 if (gDisplayHistogram && n > 0) {
357                     // Ignore the first time since it includes some setup
358                     // cost.
359                     decodeTimesUs.push(delayDecodeUs);
360                 }
361 
362                 if (gVerbose) {
363                     MetaDataBase &meta = buffer->meta_data();
364                     fprintf(stdout, "%ld sample format: %s\n", numFrames, meta.toString().c_str());
365                 } else if (showProgress && (n++ % 16) == 0) {
366                     printf(".");
367                     fflush(stdout);
368                 }
369             }
370 
371             sumDecodeUs += delayDecodeUs;
372             totalBytes += buffer->range_length();
373 
374             buffer->release();
375             buffer = NULL;
376 
377             ++numFrames;
378             if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
379                 break;
380             }
381 
382             if (gReproduceBug == 1 && numFrames == 40) {
383                 printf("seeking past the end now.");
384                 options.setSeekTo(0x7fffffffL);
385             } else if (gReproduceBug == 2 && numFrames == 40) {
386                 printf("seeking to 5 secs.");
387                 options.setSeekTo(5000000);
388             }
389         }
390 
391         if (showProgress) {
392             printf("$");
393             fflush(stdout);
394         }
395 
396         options.setSeekTo(0);
397     }
398 
399     rawSource->stop();
400     printf("\n");
401 
402     int64_t delay = getNowUs() - startTime;
403     if (!strncasecmp("video/", mime, 6)) {
404         printf("avg. %.2f fps\n", n * 1E6 / delay);
405 
406         printf("avg. time to decode one buffer %.2f usecs\n",
407                (double)sumDecodeUs / n);
408 
409         printf("decoded a total of %d frame(s).\n", n);
410 
411         if (gDisplayHistogram) {
412             displayDecodeHistogram(&decodeTimesUs);
413         }
414     } else if (!strncasecmp("audio/", mime, 6)) {
415         // Frame count makes less sense for audio, as the output buffer
416         // sizes may be different across decoders.
417         printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);
418 
419         printf("decoded a total of %" PRId64 " bytes\n", totalBytes);
420     }
421 }
422 
423 ////////////////////////////////////////////////////////////////////////////////
424 
425 struct DetectSyncSource : public MediaSource {
426     explicit DetectSyncSource(const sp<MediaSource> &source);
427 
428     virtual status_t start(MetaData *params = NULL);
429     virtual status_t stop();
430     virtual sp<MetaData> getFormat();
431 
432     virtual status_t read(
433             MediaBufferBase **buffer, const ReadOptions *options);
434 
435 private:
436     enum StreamType {
437         AVC,
438         MPEG4,
439         H263,
440         OTHER,
441     };
442 
443     sp<MediaSource> mSource;
444     StreamType mStreamType;
445     bool mSawFirstIDRFrame;
446 
447     DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
448 };
449 
DetectSyncSource(const sp<MediaSource> & source)450 DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
451     : mSource(source),
452       mStreamType(OTHER),
453       mSawFirstIDRFrame(false) {
454     const char *mime;
455     CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
456 
457     if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
458         mStreamType = AVC;
459     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
460         mStreamType = MPEG4;
461         CHECK(!"sync frame detection not implemented yet for MPEG4");
462     } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
463         mStreamType = H263;
464         CHECK(!"sync frame detection not implemented yet for H.263");
465     }
466 }
467 
start(MetaData * params)468 status_t DetectSyncSource::start(MetaData *params) {
469     mSawFirstIDRFrame = false;
470 
471     return mSource->start(params);
472 }
473 
stop()474 status_t DetectSyncSource::stop() {
475     return mSource->stop();
476 }
477 
getFormat()478 sp<MetaData> DetectSyncSource::getFormat() {
479     return mSource->getFormat();
480 }
481 
isIDRFrame(MediaBufferBase * buffer)482 static bool isIDRFrame(MediaBufferBase *buffer) {
483     const uint8_t *data =
484         (const uint8_t *)buffer->data() + buffer->range_offset();
485     size_t size = buffer->range_length();
486     for (size_t i = 0; i + 3 < size; ++i) {
487         if (!memcmp("\x00\x00\x01", &data[i], 3)) {
488             uint8_t nalType = data[i + 3] & 0x1f;
489             if (nalType == 5) {
490                 return true;
491             }
492         }
493     }
494 
495     return false;
496 }
497 
read(MediaBufferBase ** buffer,const ReadOptions * options)498 status_t DetectSyncSource::read(
499         MediaBufferBase **buffer, const ReadOptions *options) {
500     for (;;) {
501         status_t err = mSource->read(buffer, options);
502 
503         if (err != OK) {
504             return err;
505         }
506 
507         if (mStreamType == AVC) {
508             bool isIDR = isIDRFrame(*buffer);
509             (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, isIDR);
510             if (isIDR) {
511                 mSawFirstIDRFrame = true;
512             }
513         } else {
514             (*buffer)->meta_data().setInt32(kKeyIsSyncFrame, true);
515         }
516 
517         if (mStreamType != AVC || mSawFirstIDRFrame) {
518             break;
519         }
520 
521         // Ignore everything up to the first IDR frame.
522         (*buffer)->release();
523         *buffer = NULL;
524     }
525 
526     return OK;
527 }
528 
529 ////////////////////////////////////////////////////////////////////////////////
530 
writeSourcesToMP4(Vector<sp<MediaSource>> & sources,bool syncInfoPresent)531 static void writeSourcesToMP4(
532         Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
533 #if 0
534     sp<MPEG4Writer> writer =
535         new MPEG4Writer(gWriteMP4Filename.string());
536 #else
537     int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
538     if (fd < 0) {
539         fprintf(stderr, "couldn't open file");
540         return;
541     }
542     sp<MPEG2TSWriter> writer =
543         new MPEG2TSWriter(fd);
544 #endif
545 
546     // at most one minute.
547     writer->setMaxFileDuration(60000000ll);
548 
549     for (size_t i = 0; i < sources.size(); ++i) {
550         sp<MediaSource> source = sources.editItemAt(i);
551 
552         CHECK_EQ(writer->addSource(
553                     syncInfoPresent ? source : new DetectSyncSource(source)),
554                 (status_t)OK);
555     }
556 
557     sp<MetaData> params = new MetaData;
558     params->setInt32(kKeyRealTimeRecording, false);
559     CHECK_EQ(writer->start(params.get()), (status_t)OK);
560 
561     while (!writer->reachedEOS()) {
562         usleep(100000);
563     }
564     writer->stop();
565 }
566 
performSeekTest(const sp<MediaSource> & source)567 static void performSeekTest(const sp<MediaSource> &source) {
568     CHECK_EQ((status_t)OK, source->start());
569 
570     int64_t durationUs;
571     CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));
572 
573     for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs;
574             seekTimeUs += 60000ll) {
575         MediaSource::ReadOptions options;
576         options.setSeekTo(
577                 seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
578 
579         MediaBufferBase *buffer;
580         status_t err;
581         for (;;) {
582             err = source->read(&buffer, &options);
583 
584             options.clearSeekTo();
585 
586             if (err == INFO_FORMAT_CHANGED) {
587                 CHECK(buffer == NULL);
588                 continue;
589             }
590 
591             if (err != OK) {
592                 CHECK(buffer == NULL);
593                 break;
594             }
595 
596             CHECK(buffer != NULL);
597 
598             if (buffer->range_length() > 0) {
599                 break;
600             }
601 
602             buffer->release();
603             buffer = NULL;
604         }
605 
606         if (err == OK) {
607             int64_t timeUs;
608             CHECK(buffer->meta_data().findInt64(kKeyTime, &timeUs));
609 
610             printf("%" PRId64 "\t%" PRId64 "\t%" PRId64 "\n",
611                    seekTimeUs, timeUs, seekTimeUs - timeUs);
612 
613             buffer->release();
614             buffer = NULL;
615         } else {
616             printf("ERROR\n");
617             break;
618         }
619     }
620 
621     CHECK_EQ((status_t)OK, source->stop());
622 }
623 
usage(const char * me)624 static void usage(const char *me) {
625     fprintf(stderr, "usage: %s [options] [input_filename]\n", me);
626     fprintf(stderr, "       -h(elp)\n");
627     fprintf(stderr, "       -a(udio)\n");
628     fprintf(stderr, "       -n repetitions\n");
629     fprintf(stderr, "       -l(ist) components\n");
630     fprintf(stderr, "       -m max-number-of-frames-to-decode in each pass\n");
631     fprintf(stderr, "       -b bug to reproduce\n");
632     fprintf(stderr, "       -i(nfo) dump codec info (profiles and color formats supported, details)\n");
633     fprintf(stderr, "       -t(humbnail) extract video thumbnail or album art\n");
634     fprintf(stderr, "       -s(oftware) prefer software codec\n");
635     fprintf(stderr, "       -r(hardware) force to use hardware codec\n");
636     fprintf(stderr, "       -o playback audio\n");
637     fprintf(stderr, "       -w(rite) filename (write to .mp4 file)\n");
638     fprintf(stderr, "       -k seek test\n");
639     fprintf(stderr, "       -N(ame) of the component\n");
640     fprintf(stderr, "       -x display a histogram of decoding times/fps "
641                     "(video only)\n");
642     fprintf(stderr, "       -q don't show progress indicator\n");
643     fprintf(stderr, "       -S allocate buffers from a surface\n");
644     fprintf(stderr, "       -T allocate buffers from a surface texture\n");
645     fprintf(stderr, "       -d(ump) output_filename (raw stream data to a file)\n");
646     fprintf(stderr, "       -D(ump) output_filename (decoded PCM data to a file)\n");
647     fprintf(stderr, "       -v be more verbose\n");
648 }
649 
dumpCodecDetails(bool queryDecoders)650 static void dumpCodecDetails(bool queryDecoders) {
651     const char *codecType = queryDecoders? "Decoder" : "Encoder";
652     printf("\n%s infos by media types:\n"
653            "=============================\n", codecType);
654 
655     sp<IMediaCodecList> list = MediaCodecList::getInstance();
656     size_t numCodecs = list->countCodecs();
657 
658     // gather all media types supported by codec class, and link to codecs that support them
659     KeyedVector<AString, Vector<sp<MediaCodecInfo>>> allMediaTypes;
660     for (size_t codec_ix = 0; codec_ix < numCodecs; ++codec_ix) {
661         sp<MediaCodecInfo> info = list->getCodecInfo(codec_ix);
662         if (info->isEncoder() == !queryDecoders) {
663             Vector<AString> supportedMediaTypes;
664             info->getSupportedMediaTypes(&supportedMediaTypes);
665             if (!supportedMediaTypes.size()) {
666                 printf("warning: %s does not support any media types\n",
667                         info->getCodecName());
668             } else {
669                 for (const AString &mediaType : supportedMediaTypes) {
670                     if (allMediaTypes.indexOfKey(mediaType) < 0) {
671                         allMediaTypes.add(mediaType, Vector<sp<MediaCodecInfo>>());
672                     }
673                     allMediaTypes.editValueFor(mediaType).add(info);
674                 }
675             }
676         }
677     }
678 
679     KeyedVector<AString, bool> visitedCodecs;
680     for (size_t type_ix = 0; type_ix < allMediaTypes.size(); ++type_ix) {
681         const AString &mediaType = allMediaTypes.keyAt(type_ix);
682         printf("\nMedia type '%s':\n", mediaType.c_str());
683 
684         for (const sp<MediaCodecInfo> &info : allMediaTypes.valueAt(type_ix)) {
685             sp<MediaCodecInfo::Capabilities> caps = info->getCapabilitiesFor(mediaType.c_str());
686             if (caps == NULL) {
687                 printf("warning: %s does not have capabilities for type %s\n",
688                         info->getCodecName(), mediaType.c_str());
689                 continue;
690             }
691             printf("  %s \"%s\" supports\n",
692                        codecType, info->getCodecName());
693 
694             auto printList = [](const char *type, const Vector<AString> &values){
695                 printf("    %s: [", type);
696                 for (size_t j = 0; j < values.size(); ++j) {
697                     printf("\n      %s%s", values[j].c_str(),
698                             j == values.size() - 1 ? " " : ",");
699                 }
700                 printf("]\n");
701             };
702 
703             if (visitedCodecs.indexOfKey(info->getCodecName()) < 0) {
704                 visitedCodecs.add(info->getCodecName(), true);
705                 {
706                     Vector<AString> aliases;
707                     info->getAliases(&aliases);
708                     // quote alias
709                     for (AString &alias : aliases) {
710                         alias.insert("\"", 1, 0);
711                         alias.append('"');
712                     }
713                     printList("aliases", aliases);
714                 }
715                 {
716                     uint32_t attrs = info->getAttributes();
717                     Vector<AString> list;
718                     list.add(AStringPrintf("encoder: %d", !!(attrs & MediaCodecInfo::kFlagIsEncoder)));
719                     list.add(AStringPrintf("vendor: %d", !!(attrs & MediaCodecInfo::kFlagIsVendor)));
720                     list.add(AStringPrintf("software-only: %d", !!(attrs & MediaCodecInfo::kFlagIsSoftwareOnly)));
721                     list.add(AStringPrintf("hw-accelerated: %d", !!(attrs & MediaCodecInfo::kFlagIsHardwareAccelerated)));
722                     printList(AStringPrintf("attributes: %#x", attrs).c_str(), list);
723                 }
724 
725                 printf("    owner: \"%s\"\n", info->getOwnerName());
726                 printf("    rank: %u\n", info->getRank());
727             } else {
728                 printf("    aliases, attributes, owner, rank: see above\n");
729             }
730 
731             {
732                 Vector<AString> list;
733                 Vector<MediaCodecInfo::ProfileLevel> profileLevels;
734                 caps->getSupportedProfileLevels(&profileLevels);
735                 for (const MediaCodecInfo::ProfileLevel &pl : profileLevels) {
736                     const char *niceProfile =
737                         mediaType.equalsIgnoreCase(MIMETYPE_AUDIO_AAC)   ? asString_AACObject(pl.mProfile) :
738                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Profile(pl.mProfile) :
739                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Profile(pl.mProfile) :
740                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Profile(pl.mProfile) :
741                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC)   ? asString_AVCProfile(pl.mProfile) :
742                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8)   ? asString_VP8Profile(pl.mProfile) :
743                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCProfile(pl.mProfile) :
744                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Profile(pl.mProfile) :
745                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Profile(pl.mProfile) :"??";
746                     const char *niceLevel =
747                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG2) ? asString_MPEG2Level(pl.mLevel) :
748                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_H263)  ? asString_H263Level(pl.mLevel) :
749                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_MPEG4) ? asString_MPEG4Level(pl.mLevel) :
750                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AVC)   ? asString_AVCLevel(pl.mLevel) :
751                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP8)   ? asString_VP8Level(pl.mLevel) :
752                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_HEVC)  ? asString_HEVCTierLevel(pl.mLevel) :
753                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_VP9)   ? asString_VP9Level(pl.mLevel) :
754                         mediaType.equalsIgnoreCase(MIMETYPE_VIDEO_AV1)   ? asString_AV1Level(pl.mLevel) :
755                         "??";
756 
757                     list.add(AStringPrintf("% 5u/% 5u (%s/%s)",
758                             pl.mProfile, pl.mLevel, niceProfile, niceLevel));
759                 }
760                 printList("profile/levels", list);
761             }
762 
763             {
764                 Vector<AString> list;
765                 Vector<uint32_t> colors;
766                 caps->getSupportedColorFormats(&colors);
767                 for (uint32_t color : colors) {
768                     list.add(AStringPrintf("%#x (%s)", color,
769                             asString_ColorFormat((int32_t)color)));
770                 }
771                 printList("colors", list);
772             }
773 
774             printf("    details: %s\n", caps->getDetails()->debugString(6).c_str());
775         }
776     }
777 }
778 
main(int argc,char ** argv)779 int main(int argc, char **argv) {
780     android::ProcessState::self()->startThreadPool();
781 
782     bool audioOnly = false;
783     bool listComponents = false;
784     bool dumpCodecInfo = false;
785     bool extractThumbnail = false;
786     bool seekTest = false;
787     bool useSurfaceAlloc = false;
788     bool useSurfaceTexAlloc = false;
789     bool dumpStream = false;
790     bool dumpPCMStream = false;
791     String8 dumpStreamFilename;
792     gNumRepetitions = 1;
793     gMaxNumFrames = 0;
794     gReproduceBug = -1;
795     gPreferSoftwareCodec = false;
796     gForceToUseHardwareCodec = false;
797     gPlaybackAudio = false;
798     gWriteMP4 = false;
799     gDisplayHistogram = false;
800 
801     sp<android::ALooper> looper;
802 
803     int res;
804     while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
805         switch (res) {
806             case 'a':
807             {
808                 audioOnly = true;
809                 break;
810             }
811 
812             case 'q':
813             {
814                 showProgress = false;
815                 break;
816             }
817 
818             case 'd':
819             {
820                 dumpStream = true;
821                 dumpStreamFilename.setTo(optarg);
822                 break;
823             }
824 
825             case 'D':
826             {
827                 dumpPCMStream = true;
828                 audioOnly = true;
829                 dumpStreamFilename.setTo(optarg);
830                 break;
831             }
832 
833             case 'N':
834             {
835                 gComponentNameOverride.setTo(optarg);
836                 break;
837             }
838 
839             case 'l':
840             {
841                 listComponents = true;
842                 break;
843             }
844 
845             case 'm':
846             case 'n':
847             case 'b':
848             {
849                 char *end;
850                 long x = strtol(optarg, &end, 10);
851 
852                 if (*end != '\0' || end == optarg || x <= 0) {
853                     x = 1;
854                 }
855 
856                 if (res == 'n') {
857                     gNumRepetitions = x;
858                 } else if (res == 'm') {
859                     gMaxNumFrames = x;
860                 } else {
861                     CHECK_EQ(res, 'b');
862                     gReproduceBug = x;
863                 }
864                 break;
865             }
866 
867             case 'w':
868             {
869                 gWriteMP4 = true;
870                 gWriteMP4Filename.setTo(optarg);
871                 break;
872             }
873 
874             case 'i':
875             {
876                 dumpCodecInfo = true;
877                 break;
878             }
879 
880             case 't':
881             {
882                 extractThumbnail = true;
883                 break;
884             }
885 
886             case 's':
887             {
888                 gPreferSoftwareCodec = true;
889                 break;
890             }
891 
892             case 'r':
893             {
894                 gForceToUseHardwareCodec = true;
895                 break;
896             }
897 
898             case 'o':
899             {
900                 gPlaybackAudio = true;
901                 break;
902             }
903 
904             case 'k':
905             {
906                 seekTest = true;
907                 break;
908             }
909 
910             case 'x':
911             {
912                 gDisplayHistogram = true;
913                 break;
914             }
915 
916             case 'S':
917             {
918                 useSurfaceAlloc = true;
919                 break;
920             }
921 
922             case 'T':
923             {
924                 useSurfaceTexAlloc = true;
925                 break;
926             }
927 
928             case 'v':
929             {
930                 gVerbose = true;
931                 break;
932             }
933 
934             case '?':
935             case 'h':
936             default:
937             {
938                 usage(argv[0]);
939                 exit(1);
940                 break;
941             }
942         }
943     }
944 
945     if (gPlaybackAudio && !audioOnly) {
946         // This doesn't make any sense if we're decoding the video track.
947         gPlaybackAudio = false;
948     }
949 
950     argc -= optind;
951     argv += optind;
952 
953     if (extractThumbnail) {
954         sp<IServiceManager> sm = defaultServiceManager();
955         sp<IBinder> binder = sm->getService(String16("media.player"));
956         sp<IMediaPlayerService> service =
957             interface_cast<IMediaPlayerService>(binder);
958 
959         CHECK(service.get() != NULL);
960 
961         sp<IMediaMetadataRetriever> retriever =
962             service->createMetadataRetriever();
963 
964         CHECK(retriever != NULL);
965 
966         for (int k = 0; k < argc; ++k) {
967             const char *filename = argv[k];
968 
969             bool failed = true;
970 
971             int fd = open(filename, O_RDONLY | O_LARGEFILE);
972             CHECK_GE(fd, 0);
973 
974             off64_t fileSize = lseek64(fd, 0, SEEK_END);
975             CHECK_GE(fileSize, 0ll);
976 
977             CHECK_EQ(retriever->setDataSource(fd, 0, fileSize), (status_t)OK);
978 
979             close(fd);
980             fd = -1;
981 
982             sp<IMemory> mem =
983                     retriever->getFrameAtTime(-1,
984                             MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
985                             HAL_PIXEL_FORMAT_RGB_565,
986                             false /*metaOnly*/);
987 
988             if (mem != NULL) {
989                 failed = false;
990                 printf("getFrameAtTime(%s) => OK\n", filename);
991 
992                 VideoFrame *frame = (VideoFrame *)mem->pointer();
993 
994                 CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
995                             frame->getFlattenedData(),
996                             frame->mWidth, frame->mHeight), 0);
997             }
998 
999             {
1000                 mem = retriever->extractAlbumArt();
1001 
1002                 if (mem != NULL) {
1003                     failed = false;
1004                     printf("extractAlbumArt(%s) => OK\n", filename);
1005                 }
1006             }
1007 
1008             if (failed) {
1009                 printf("both getFrameAtTime and extractAlbumArt "
1010                     "failed on file '%s'.\n", filename);
1011             }
1012         }
1013 
1014         return 0;
1015     }
1016 
1017     if (dumpCodecInfo) {
1018         dumpCodecDetails(true /* queryDecoders */);
1019         dumpCodecDetails(false /* queryDecoders */);
1020     }
1021 
1022     if (listComponents) {
1023         using ::android::hardware::hidl_vec;
1024         using ::android::hardware::hidl_string;
1025         using namespace ::android::hardware::media::omx::V1_0;
1026         sp<IOmx> omx = IOmx::getService();
1027         CHECK(omx.get() != nullptr);
1028 
1029         hidl_vec<IOmx::ComponentInfo> nodeList;
1030         auto transStatus = omx->listNodes([](
1031                 const auto& status, const auto& nodeList) {
1032                     CHECK(status == Status::OK);
1033                     for (const auto& info : nodeList) {
1034                         printf("%s\t Roles: ", info.mName.c_str());
1035                         for (const auto& role : info.mRoles) {
1036                             printf("%s\t", role.c_str());
1037                         }
1038                     }
1039                 });
1040         CHECK(transStatus.isOk());
1041     }
1042 
1043     sp<SurfaceComposerClient> composerClient;
1044     sp<SurfaceControl> control;
1045 
1046     if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1047         if (useSurfaceAlloc) {
1048             composerClient = new SurfaceComposerClient;
1049             CHECK_EQ(composerClient->initCheck(), (status_t)OK);
1050 
1051             control = composerClient->createSurface(
1052                     String8("A Surface"),
1053                     1280,
1054                     800,
1055                     PIXEL_FORMAT_RGB_565,
1056                     0);
1057 
1058             CHECK(control != NULL);
1059             CHECK(control->isValid());
1060 
1061             SurfaceComposerClient::Transaction{}
1062                     .setLayer(control, INT_MAX)
1063                     .show(control)
1064                     .apply();
1065 
1066             gSurface = control->getSurface();
1067             CHECK(gSurface != NULL);
1068         } else {
1069             CHECK(useSurfaceTexAlloc);
1070 
1071             sp<IGraphicBufferProducer> producer;
1072             sp<IGraphicBufferConsumer> consumer;
1073             BufferQueue::createBufferQueue(&producer, &consumer);
1074             sp<GLConsumer> texture = new GLConsumer(consumer, 0 /* tex */,
1075                     GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
1076                     false /* isControlledByApp */);
1077             gSurface = new Surface(producer);
1078         }
1079     }
1080 
1081     status_t err = OK;
1082 
1083     for (int k = 0; k < argc && err == OK; ++k) {
1084         bool syncInfoPresent = true;
1085 
1086         const char *filename = argv[k];
1087 
1088         sp<DataSource> dataSource =
1089             DataSourceFactory::CreateFromURI(NULL /* httpService */, filename);
1090 
1091         if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
1092             fprintf(stderr, "Unable to create data source.\n");
1093             return 1;
1094         }
1095 
1096         bool isJPEG = false;
1097 
1098         size_t len = strlen(filename);
1099         if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
1100             isJPEG = true;
1101         }
1102 
1103         Vector<sp<MediaSource> > mediaSources;
1104         sp<MediaSource> mediaSource;
1105 
1106         if (isJPEG) {
1107             mediaSource = new JPEGSource(dataSource);
1108             if (gWriteMP4) {
1109                 mediaSources.push(mediaSource);
1110             }
1111         } else if (!strncasecmp("sine:", filename, 5)) {
1112             char *end;
1113             long sampleRate = strtol(filename + 5, &end, 10);
1114 
1115             if (end == filename + 5) {
1116                 sampleRate = 44100;
1117             }
1118             mediaSource = new SineSource(sampleRate, 1);
1119             if (gWriteMP4) {
1120                 mediaSources.push(mediaSource);
1121             }
1122         } else {
1123             sp<IMediaExtractor> extractor = MediaExtractorFactory::Create(dataSource);
1124 
1125             if (extractor == NULL) {
1126                 fprintf(stderr, "could not create extractor.\n");
1127                 return -1;
1128             }
1129 
1130             sp<MetaData> meta = extractor->getMetaData();
1131 
1132             if (meta != NULL) {
1133                 const char *mime;
1134                 if (!meta->findCString(kKeyMIMEType, &mime)) {
1135                     fprintf(stderr, "extractor did not provide MIME type.\n");
1136                     return -1;
1137                 }
1138 
1139                 if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
1140                     syncInfoPresent = false;
1141                 }
1142             }
1143 
1144             size_t numTracks = extractor->countTracks();
1145 
1146             if (gWriteMP4) {
1147                 bool haveAudio = false;
1148                 bool haveVideo = false;
1149                 for (size_t i = 0; i < numTracks; ++i) {
1150                     sp<MediaSource> source = CreateMediaSourceFromIMediaSource(
1151                             extractor->getTrack(i));
1152                     if (source == nullptr) {
1153                         fprintf(stderr, "skip NULL track %zu, track count %zu.\n", i, numTracks);
1154                         continue;
1155                     }
1156 
1157                     const char *mime;
1158                     CHECK(source->getFormat()->findCString(
1159                                 kKeyMIMEType, &mime));
1160 
1161                     bool useTrack = false;
1162                     if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
1163                         haveAudio = true;
1164                         useTrack = true;
1165                     } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
1166                         haveVideo = true;
1167                         useTrack = true;
1168                     }
1169 
1170                     if (useTrack) {
1171                         mediaSources.push(source);
1172 
1173                         if (haveAudio && haveVideo) {
1174                             break;
1175                         }
1176                     }
1177                 }
1178             } else {
1179                 sp<MetaData> meta;
1180                 size_t i;
1181                 for (i = 0; i < numTracks; ++i) {
1182                     meta = extractor->getTrackMetaData(
1183                             i, MediaExtractor::kIncludeExtensiveMetaData);
1184 
1185                     if (meta == NULL) {
1186                         continue;
1187                     }
1188                     const char *mime;
1189                     meta->findCString(kKeyMIMEType, &mime);
1190 
1191                     if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
1192                         break;
1193                     }
1194 
1195                     if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
1196                         break;
1197                     }
1198 
1199                     meta = NULL;
1200                 }
1201 
1202                 if (meta == NULL) {
1203                     fprintf(stderr,
1204                             "No suitable %s track found. The '-a' option will "
1205                             "target audio tracks only, the default is to target "
1206                             "video tracks only.\n",
1207                             audioOnly ? "audio" : "video");
1208                     return -1;
1209                 }
1210 
1211                 int64_t thumbTimeUs;
1212                 if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
1213                     printf("thumbnailTime: %" PRId64 " us (%.2f secs)\n",
1214                            thumbTimeUs, thumbTimeUs / 1E6);
1215                 }
1216 
1217                 mediaSource = CreateMediaSourceFromIMediaSource(extractor->getTrack(i));
1218                 if (mediaSource == nullptr) {
1219                     fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks);
1220                     return -1;
1221                 }
1222             }
1223         }
1224 
1225         if (gWriteMP4) {
1226             writeSourcesToMP4(mediaSources, syncInfoPresent);
1227         } else if (dumpStream) {
1228             dumpSource(mediaSource, dumpStreamFilename);
1229         } else if (dumpPCMStream) {
1230             sp<MediaSource> decSource = SimpleDecodingSource::Create(mediaSource);
1231             dumpSource(decSource, dumpStreamFilename);
1232         } else if (seekTest) {
1233             performSeekTest(mediaSource);
1234         } else {
1235             playSource(mediaSource);
1236         }
1237     }
1238 
1239     if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
1240         gSurface.clear();
1241 
1242         if (useSurfaceAlloc) {
1243             composerClient->dispose();
1244         }
1245     }
1246 
1247     return 0;
1248 }
1249