1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import android.content.pm.PackageManager;
20 import android.media.MediaCodec;
21 import android.media.MediaCodecInfo;
22 import android.media.MediaCodecInfo.CodecCapabilities;
23 import android.media.MediaCodecInfo.CodecProfileLevel;
24 import android.media.MediaCodecList;
25 import android.media.MediaExtractor;
26 import android.media.MediaFormat;
27 import android.os.Build;
28 import android.platform.test.annotations.AppModeFull;
29 import android.util.Log;
30 import android.view.Surface;
31 
32 import com.android.compatibility.common.util.ApiLevelUtil;
33 import com.android.compatibility.common.util.MediaUtils;
34 
35 import android.opengl.GLES20;
36 import javax.microedition.khronos.opengles.GL10;
37 
38 import java.io.IOException;
39 import java.lang.System;
40 import java.nio.ByteBuffer;
41 import java.util.ArrayList;
42 import java.util.Arrays;
43 import java.util.List;
44 import java.util.Locale;
45 import java.util.Vector;
46 import java.util.concurrent.CopyOnWriteArrayList;
47 import java.util.zip.CRC32;
48 
49 @MediaHeavyPresubmitTest
50 @AppModeFull
51 public class AdaptivePlaybackTest extends MediaPlayerTestBase {
52     private static final String TAG = "AdaptivePlaybackTest";
53     private boolean verify = false;
54     private static final int MIN_FRAMES_BEFORE_DRC = 2;
55 
56     private static boolean sIsAtLeastS = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.S);
57 
H264(CodecFactory factory)58     public Iterable<Codec> H264(CodecFactory factory) {
59         return factory.createCodecList(
60                 MediaFormat.MIMETYPE_VIDEO_AVC,
61                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
62                 "video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
63                 "bbb_s1_720x480_mp4_h264_mp3_2mbps_30fps_aac_lc_5ch_320kbps_48000hz.mp4");
64     }
65 
HEVC(CodecFactory factory)66     public Iterable<Codec> HEVC(CodecFactory factory) {
67         return factory.createCodecList(
68                 MediaFormat.MIMETYPE_VIDEO_HEVC,
69                 "bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
70                 "bbb_s4_1280x720_mp4_hevc_mp31_4mbps_30fps_aac_he_stereo_80kbps_32000hz.mp4",
71                 "bbb_s1_352x288_mp4_hevc_mp2_600kbps_30fps_aac_he_stereo_96kbps_48000hz.mp4");
72     }
73 
Mpeg2(CodecFactory factory)74     public Iterable<Codec> Mpeg2(CodecFactory factory) {
75         return factory.createCodecList(
76                 MediaFormat.MIMETYPE_VIDEO_MPEG2,
77                 "video_640x360_mp4_mpeg2_2000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
78                 "video_1280x720_mp4_mpeg2_3000kbps_30fps_aac_stereo_128kbps_48000hz.mp4");
79     }
80 
H263(CodecFactory factory)81     public Iterable<Codec> H263(CodecFactory factory) {
82         return factory.createCodecList(
83                 MediaFormat.MIMETYPE_VIDEO_H263,
84                 "video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",
85                 "video_352x288_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp");
86     }
87 
Mpeg4(CodecFactory factory)88     public Iterable<Codec> Mpeg4(CodecFactory factory) {
89         return factory.createCodecList(
90                 MediaFormat.MIMETYPE_VIDEO_MPEG4,
91                 "video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
92                 "video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
93                 "video_176x144_mp4_mpeg4_300kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
94     }
95 
VP8(CodecFactory factory)96     public Iterable<Codec> VP8(CodecFactory factory) {
97         return factory.createCodecList(
98                 MediaFormat.MIMETYPE_VIDEO_VP8,
99                 "video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
100                 "bbb_s3_1280x720_webm_vp8_8mbps_60fps_opus_6ch_384kbps_48000hz.webm",
101                 "bbb_s1_320x180_webm_vp8_800kbps_30fps_opus_5ch_320kbps_48000hz.webm");
102     }
103 
VP9(CodecFactory factory)104     public Iterable<Codec> VP9(CodecFactory factory) {
105         return factory.createCodecList(
106                 MediaFormat.MIMETYPE_VIDEO_VP9,
107                 "video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
108                 "bbb_s4_1280x720_webm_vp9_0p31_4mbps_30fps_opus_stereo_128kbps_48000hz.webm",
109                 "bbb_s1_320x180_webm_vp9_0p11_600kbps_30fps_vorbis_mono_64kbps_48000hz.webm");
110     }
111 
AV1(CodecFactory factory)112     public Iterable<Codec> AV1(CodecFactory factory) {
113         return factory.createCodecList(
114                 MediaFormat.MIMETYPE_VIDEO_AV1,
115                 "video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
116                 "video_1280x720_webm_av1_2000kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
117                 "video_320x180_webm_av1_200kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
118     }
119 
120     CodecFactory ALL = new CodecFactory();
121     CodecFactory SW  = new SWCodecFactory();
122     CodecFactory HW  = new HWCodecFactory();
123 
H264()124     public Iterable<Codec> H264()  { return H264(ALL);  }
HEVC()125     public Iterable<Codec> HEVC()  { return HEVC(ALL);  }
VP8()126     public Iterable<Codec> VP8()   { return VP8(ALL);   }
VP9()127     public Iterable<Codec> VP9()   { return VP9(ALL);   }
AV1()128     public Iterable<Codec> AV1()   { return AV1(ALL);   }
Mpeg2()129     public Iterable<Codec> Mpeg2() { return Mpeg2(ALL); }
Mpeg4()130     public Iterable<Codec> Mpeg4() { return Mpeg4(ALL); }
H263()131     public Iterable<Codec> H263()  { return H263(ALL);  }
132 
AllCodecs()133     public Iterable<Codec> AllCodecs() {
134         return chain(H264(ALL), HEVC(ALL), VP8(ALL), VP9(ALL), AV1(ALL), Mpeg2(ALL), Mpeg4(ALL), H263(ALL));
135     }
136 
SWCodecs()137     public Iterable<Codec> SWCodecs() {
138         return chain(H264(SW), HEVC(SW), VP8(SW), VP9(SW), AV1(SW), Mpeg2(SW), Mpeg4(SW), H263(SW));
139     }
140 
HWCodecs()141     public Iterable<Codec> HWCodecs() {
142         return chain(H264(HW), HEVC(HW), VP8(HW), VP9(HW), AV1(HW), Mpeg2(HW), Mpeg4(HW), H263(HW));
143     }
144 
145     /* tests for adaptive codecs */
146     Test adaptiveEarlyEos     = new EarlyEosTest().adaptive();
147     Test adaptiveEosFlushSeek = new EosFlushSeekTest().adaptive();
148     Test adaptiveSkipAhead    = new AdaptiveSkipTest(true /* forward */);
149     Test adaptiveSkipBack     = new AdaptiveSkipTest(false /* forward */);
150 
151     /* DRC tests for adaptive codecs */
152     Test adaptiveReconfigDrc      = new ReconfigDrcTest().adaptive();
153     Test adaptiveSmallReconfigDrc = new ReconfigDrcTest().adaptiveSmall();
154     Test adaptiveDrc      = new AdaptiveDrcTest(); /* adaptive */
155     Test adaptiveSmallDrc = new AdaptiveDrcTest().adaptiveSmall();
156 
157     /* tests for regular codecs */
158     Test earlyEos          = new EarlyEosTest();
159     Test eosFlushSeek      = new EosFlushSeekTest();
160     Test flushConfigureDrc = new ReconfigDrcTest();
161 
162     Test[] allTests = {
163         adaptiveEarlyEos,
164         adaptiveEosFlushSeek,
165         adaptiveSkipAhead,
166         adaptiveSkipBack,
167         adaptiveReconfigDrc,
168         adaptiveSmallReconfigDrc,
169         adaptiveDrc,
170         adaptiveSmallDrc,
171         earlyEos,
172         eosFlushSeek,
173         flushConfigureDrc,
174     };
175 
176     /* helpers to run sets of tests */
runEOS()177     public void runEOS() { ex(AllCodecs(), new Test[] {
178         adaptiveEarlyEos,
179         adaptiveEosFlushSeek,
180         adaptiveReconfigDrc,
181         adaptiveSmallReconfigDrc,
182         earlyEos,
183         eosFlushSeek,
184         flushConfigureDrc,
185     }); }
186 
runAll()187     public void runAll() { ex(AllCodecs(), allTests); }
runSW()188     public void runSW()  { ex(SWCodecs(),  allTests); }
runHW()189     public void runHW()  { ex(HWCodecs(),  allTests); }
190 
verifyAll()191     public void verifyAll() { verify = true; try { runAll(); } finally { verify = false; } }
verifySW()192     public void verifySW()  { verify = true; try { runSW();  } finally { verify = false; } }
verifyHW()193     public void verifyHW()  { verify = true; try { runHW();  } finally { verify = false; } }
194 
runH264()195     public void runH264()  { ex(H264(),  allTests); }
runHEVC()196     public void runHEVC()  { ex(HEVC(),  allTests); }
runVP8()197     public void runVP8()   { ex(VP8(),   allTests); }
runVP9()198     public void runVP9()   { ex(VP9(),   allTests); }
runAV1()199     public void runAV1()   { ex(AV1(),   allTests); }
runMpeg2()200     public void runMpeg2() { ex(Mpeg2(), allTests); }
runMpeg4()201     public void runMpeg4() { ex(Mpeg4(), allTests); }
runH263()202     public void runH263()  { ex(H263(),  allTests); }
203 
onlyH264HW()204     public void onlyH264HW()  { ex(H264(HW),  allTests); }
onlyHEVCHW()205     public void onlyHEVCHW()  { ex(HEVC(HW),  allTests); }
onlyVP8HW()206     public void onlyVP8HW()   { ex(VP8(HW),   allTests); }
onlyVP9HW()207     public void onlyVP9HW()   { ex(VP9(HW),   allTests); }
onlyAV1HW()208     public void onlyAV1HW()   { ex(AV1(HW),   allTests); }
onlyMpeg2HW()209     public void onlyMpeg2HW() { ex(Mpeg2(HW), allTests); }
onlyMpeg4HW()210     public void onlyMpeg4HW() { ex(Mpeg4(HW), allTests); }
onlyH263HW()211     public void onlyH263HW()  { ex(H263(HW),  allTests); }
212 
onlyH264SW()213     public void onlyH264SW()  { ex(H264(SW),  allTests); }
onlyHEVCSW()214     public void onlyHEVCSW()  { ex(HEVC(SW),  allTests); }
onlyVP8SW()215     public void onlyVP8SW()   { ex(VP8(SW),   allTests); }
onlyVP9SW()216     public void onlyVP9SW()   { ex(VP9(SW),   allTests); }
onlyAV1SW()217     public void onlyAV1SW()   { ex(AV1(SW),   allTests); }
onlyMpeg2SW()218     public void onlyMpeg2SW() { ex(Mpeg2(SW), allTests); }
onlyMpeg4SW()219     public void onlyMpeg4SW() { ex(Mpeg4(SW), allTests); }
onlyH263SW()220     public void onlyH263SW()  { ex(H263(SW),  allTests); }
221 
bytebuffer()222     public void bytebuffer() { ex(H264(SW), new EarlyEosTest().byteBuffer()); }
onlyTexture()223     public void onlyTexture() { ex(H264(HW), new EarlyEosTest().texture()); }
224 
225     /* inidividual tests */
testH264_adaptiveEarlyEos()226     public void testH264_adaptiveEarlyEos()  { ex(H264(),  adaptiveEarlyEos); }
testHEVC_adaptiveEarlyEos()227     public void testHEVC_adaptiveEarlyEos()  { ex(HEVC(),  adaptiveEarlyEos); }
testVP8_adaptiveEarlyEos()228     public void testVP8_adaptiveEarlyEos()   { ex(VP8(),   adaptiveEarlyEos); }
testVP9_adaptiveEarlyEos()229     public void testVP9_adaptiveEarlyEos()   { ex(VP9(),   adaptiveEarlyEos); }
testAV1_adaptiveEarlyEos()230     public void testAV1_adaptiveEarlyEos()   { ex(AV1(),   adaptiveEarlyEos); }
testMpeg2_adaptiveEarlyEos()231     public void testMpeg2_adaptiveEarlyEos() { ex(Mpeg2(), adaptiveEarlyEos); }
testMpeg4_adaptiveEarlyEos()232     public void testMpeg4_adaptiveEarlyEos() { ex(Mpeg4(), adaptiveEarlyEos); }
testH263_adaptiveEarlyEos()233     public void testH263_adaptiveEarlyEos()  { ex(H263(),  adaptiveEarlyEos); }
234 
testH264_adaptiveEosFlushSeek()235     public void testH264_adaptiveEosFlushSeek()  { ex(H264(),  adaptiveEosFlushSeek); }
testHEVC_adaptiveEosFlushSeek()236     public void testHEVC_adaptiveEosFlushSeek()  { ex(HEVC(),  adaptiveEosFlushSeek); }
testVP8_adaptiveEosFlushSeek()237     public void testVP8_adaptiveEosFlushSeek()   { ex(VP8(),   adaptiveEosFlushSeek); }
testVP9_adaptiveEosFlushSeek()238     public void testVP9_adaptiveEosFlushSeek()   { ex(VP9(),   adaptiveEosFlushSeek); }
testAV1_adaptiveEosFlushSeek()239     public void testAV1_adaptiveEosFlushSeek()   { ex(AV1(),   adaptiveEosFlushSeek); }
testMpeg2_adaptiveEosFlushSeek()240     public void testMpeg2_adaptiveEosFlushSeek() { ex(Mpeg2(), adaptiveEosFlushSeek); }
testMpeg4_adaptiveEosFlushSeek()241     public void testMpeg4_adaptiveEosFlushSeek() { ex(Mpeg4(), adaptiveEosFlushSeek); }
testH263_adaptiveEosFlushSeek()242     public void testH263_adaptiveEosFlushSeek()  { ex(H263(),  adaptiveEosFlushSeek); }
243 
testH264_adaptiveSkipAhead()244     public void testH264_adaptiveSkipAhead()  { ex(H264(),  adaptiveSkipAhead); }
testHEVC_adaptiveSkipAhead()245     public void testHEVC_adaptiveSkipAhead()  { ex(HEVC(),  adaptiveSkipAhead); }
testVP8_adaptiveSkipAhead()246     public void testVP8_adaptiveSkipAhead()   { ex(VP8(),   adaptiveSkipAhead); }
testVP9_adaptiveSkipAhead()247     public void testVP9_adaptiveSkipAhead()   { ex(VP9(),   adaptiveSkipAhead); }
testAV1_adaptiveSkipAhead()248     public void testAV1_adaptiveSkipAhead()   { ex(AV1(),   adaptiveSkipAhead); }
testMpeg2_adaptiveSkipAhead()249     public void testMpeg2_adaptiveSkipAhead() { ex(Mpeg2(), adaptiveSkipAhead); }
testMpeg4_adaptiveSkipAhead()250     public void testMpeg4_adaptiveSkipAhead() { ex(Mpeg4(), adaptiveSkipAhead); }
testH263_adaptiveSkipAhead()251     public void testH263_adaptiveSkipAhead()  { ex(H263(),  adaptiveSkipAhead); }
252 
testH264_adaptiveSkipBack()253     public void testH264_adaptiveSkipBack()  { ex(H264(),  adaptiveSkipBack); }
testHEVC_adaptiveSkipBack()254     public void testHEVC_adaptiveSkipBack()  { ex(HEVC(),  adaptiveSkipBack); }
testVP8_adaptiveSkipBack()255     public void testVP8_adaptiveSkipBack()   { ex(VP8(),   adaptiveSkipBack); }
testVP9_adaptiveSkipBack()256     public void testVP9_adaptiveSkipBack()   { ex(VP9(),   adaptiveSkipBack); }
testAV1_adaptiveSkipBack()257     public void testAV1_adaptiveSkipBack()   { ex(AV1(),   adaptiveSkipBack); }
testMpeg2_adaptiveSkipBack()258     public void testMpeg2_adaptiveSkipBack() { ex(Mpeg2(), adaptiveSkipBack); }
testMpeg4_adaptiveSkipBack()259     public void testMpeg4_adaptiveSkipBack() { ex(Mpeg4(), adaptiveSkipBack); }
testH263_adaptiveSkipBack()260     public void testH263_adaptiveSkipBack()  { ex(H263(),  adaptiveSkipBack); }
261 
testH264_adaptiveReconfigDrc()262     public void testH264_adaptiveReconfigDrc()  { ex(H264(),  adaptiveReconfigDrc); }
testHEVC_adaptiveReconfigDrc()263     public void testHEVC_adaptiveReconfigDrc()  { ex(HEVC(),  adaptiveReconfigDrc); }
testVP8_adaptiveReconfigDrc()264     public void testVP8_adaptiveReconfigDrc()   { ex(VP8(),   adaptiveReconfigDrc); }
testVP9_adaptiveReconfigDrc()265     public void testVP9_adaptiveReconfigDrc()   { ex(VP9(),   adaptiveReconfigDrc); }
testAV1_adaptiveReconfigDrc()266     public void testAV1_adaptiveReconfigDrc()   { ex(AV1(),   adaptiveReconfigDrc); }
testMpeg2_adaptiveReconfigDrc()267     public void testMpeg2_adaptiveReconfigDrc() { ex(Mpeg2(), adaptiveReconfigDrc); }
testMpeg4_adaptiveReconfigDrc()268     public void testMpeg4_adaptiveReconfigDrc() { ex(Mpeg4(), adaptiveReconfigDrc); }
testH263_adaptiveReconfigDrc()269     public void testH263_adaptiveReconfigDrc()  { ex(H263(),  adaptiveReconfigDrc); }
270 
testH264_adaptiveSmallReconfigDrc()271     public void testH264_adaptiveSmallReconfigDrc()  { ex(H264(),  adaptiveSmallReconfigDrc); }
testHEVC_adaptiveSmallReconfigDrc()272     public void testHEVC_adaptiveSmallReconfigDrc()  { ex(HEVC(),  adaptiveSmallReconfigDrc); }
testVP8_adaptiveSmallReconfigDrc()273     public void testVP8_adaptiveSmallReconfigDrc()   { ex(VP8(),   adaptiveSmallReconfigDrc); }
testVP9_adaptiveSmallReconfigDrc()274     public void testVP9_adaptiveSmallReconfigDrc()   { ex(VP9(),   adaptiveSmallReconfigDrc); }
testAV1_adaptiveSmallReconfigDrc()275     public void testAV1_adaptiveSmallReconfigDrc()   { ex(AV1(),   adaptiveSmallReconfigDrc); }
testMpeg2_adaptiveSmallReconfigDrc()276     public void testMpeg2_adaptiveSmallReconfigDrc() { ex(Mpeg2(), adaptiveSmallReconfigDrc); }
testMpeg4_adaptiveSmallReconfigDrc()277     public void testMpeg4_adaptiveSmallReconfigDrc() { ex(Mpeg4(), adaptiveSmallReconfigDrc); }
testH263_adaptiveSmallReconfigDrc()278     public void testH263_adaptiveSmallReconfigDrc()  { ex(H263(),  adaptiveSmallReconfigDrc); }
279 
testH264_adaptiveDrc()280     public void testH264_adaptiveDrc() { ex(H264(), adaptiveDrc); }
testHEVC_adaptiveDrc()281     public void testHEVC_adaptiveDrc() { ex(HEVC(), adaptiveDrc); }
testVP8_adaptiveDrc()282     public void testVP8_adaptiveDrc()  { ex(VP8(),  adaptiveDrc); }
testVP9_adaptiveDrc()283     public void testVP9_adaptiveDrc()  { ex(VP9(),  adaptiveDrc); }
testAV1_adaptiveDrc()284     public void testAV1_adaptiveDrc()  { ex(AV1(),  adaptiveDrc); }
testMpeg2_adaptiveDrc()285     public void testMpeg2_adaptiveDrc() { ex(Mpeg2(), adaptiveDrc); }
testMpeg4_adaptiveDrc()286     public void testMpeg4_adaptiveDrc() { ex(Mpeg4(), adaptiveDrc); }
testH263_adaptiveDrc()287     public void testH263_adaptiveDrc() { ex(H263(), adaptiveDrc); }
288 
testH264_adaptiveDrcEarlyEos()289     public void testH264_adaptiveDrcEarlyEos() { ex(H264(), new AdaptiveDrcEarlyEosTest()); }
testHEVC_adaptiveDrcEarlyEos()290     public void testHEVC_adaptiveDrcEarlyEos() { ex(HEVC(), new AdaptiveDrcEarlyEosTest()); }
testVP8_adaptiveDrcEarlyEos()291     public void testVP8_adaptiveDrcEarlyEos()  { ex(VP8(),  new AdaptiveDrcEarlyEosTest()); }
testVP9_adaptiveDrcEarlyEos()292     public void testVP9_adaptiveDrcEarlyEos()  { ex(VP9(),  new AdaptiveDrcEarlyEosTest()); }
testAV1_adaptiveDrcEarlyEos()293     public void testAV1_adaptiveDrcEarlyEos()  { ex(AV1(),  new AdaptiveDrcEarlyEosTest()); }
testMpeg2_adaptiveDrcEarlyEos()294     public void testMpeg2_adaptiveDrcEarlyEos(){ ex(Mpeg2(), new AdaptiveDrcEarlyEosTest()); }
295 
testH264_adaptiveSmallDrc()296     public void testH264_adaptiveSmallDrc()  { ex(H264(),  adaptiveSmallDrc); }
testHEVC_adaptiveSmallDrc()297     public void testHEVC_adaptiveSmallDrc()  { ex(HEVC(),  adaptiveSmallDrc); }
testVP8_adaptiveSmallDrc()298     public void testVP8_adaptiveSmallDrc()   { ex(VP8(),   adaptiveSmallDrc); }
testVP9_adaptiveSmallDrc()299     public void testVP9_adaptiveSmallDrc()   { ex(VP9(),   adaptiveSmallDrc); }
testAV1_adaptiveSmallDrc()300     public void testAV1_adaptiveSmallDrc()   { ex(AV1(),   adaptiveSmallDrc); }
testMpeg2_adaptiveSmallDrc()301     public void testMpeg2_adaptiveSmallDrc() { ex(Mpeg2(), adaptiveSmallDrc); }
302 
testH264_earlyEos()303     public void testH264_earlyEos()  { ex(H264(),  earlyEos); }
testHEVC_earlyEos()304     public void testHEVC_earlyEos()  { ex(HEVC(),  earlyEos); }
testVP8_earlyEos()305     public void testVP8_earlyEos()   { ex(VP8(),   earlyEos); }
testVP9_earlyEos()306     public void testVP9_earlyEos()   { ex(VP9(),   earlyEos); }
testAV1_earlyEos()307     public void testAV1_earlyEos()   { ex(AV1(),   earlyEos); }
testMpeg2_earlyEos()308     public void testMpeg2_earlyEos() { ex(Mpeg2(), earlyEos); }
testMpeg4_earlyEos()309     public void testMpeg4_earlyEos() { ex(Mpeg4(), earlyEos); }
testH263_earlyEos()310     public void testH263_earlyEos()  { ex(H263(),  earlyEos); }
311 
testH264_eosFlushSeek()312     public void testH264_eosFlushSeek()  { ex(H264(),  eosFlushSeek); }
testHEVC_eosFlushSeek()313     public void testHEVC_eosFlushSeek()  { ex(HEVC(),  eosFlushSeek); }
testVP8_eosFlushSeek()314     public void testVP8_eosFlushSeek()   { ex(VP8(),   eosFlushSeek); }
testVP9_eosFlushSeek()315     public void testVP9_eosFlushSeek()   { ex(VP9(),   eosFlushSeek); }
testAV1_eosFlushSeek()316     public void testAV1_eosFlushSeek()   { ex(AV1(),   eosFlushSeek); }
testMpeg2_eosFlushSeek()317     public void testMpeg2_eosFlushSeek() { ex(Mpeg2(), eosFlushSeek); }
testMpeg4_eosFlushSeek()318     public void testMpeg4_eosFlushSeek() { ex(Mpeg4(), eosFlushSeek); }
testH263_eosFlushSeek()319     public void testH263_eosFlushSeek()  { ex(H263(),  eosFlushSeek); }
320 
testH264_flushConfigureDrc()321     public void testH264_flushConfigureDrc()  { ex(H264(),  flushConfigureDrc); }
testHEVC_flushConfigureDrc()322     public void testHEVC_flushConfigureDrc()  { ex(HEVC(),  flushConfigureDrc); }
testVP8_flushConfigureDrc()323     public void testVP8_flushConfigureDrc()   { ex(VP8(),   flushConfigureDrc); }
testVP9_flushConfigureDrc()324     public void testVP9_flushConfigureDrc()   { ex(VP9(),   flushConfigureDrc); }
testAV1_flushConfigureDrc()325     public void testAV1_flushConfigureDrc()   { ex(AV1(),   flushConfigureDrc); }
testMpeg2_flushConfigureDrc()326     public void testMpeg2_flushConfigureDrc() { ex(Mpeg2(), flushConfigureDrc); }
testMpeg4_flushConfigureDrc()327     public void testMpeg4_flushConfigureDrc() { ex(Mpeg4(), flushConfigureDrc); }
testH263_flushConfigureDrc()328     public void testH263_flushConfigureDrc()  { ex(H263(),  flushConfigureDrc); }
329 
330     /* only use unchecked exceptions to allow brief test methods */
ex(Iterable<Codec> codecList, Test test)331     private void ex(Iterable<Codec> codecList, Test test) {
332         ex(codecList, new Test[] { test } );
333     }
334 
ex(Iterable<Codec> codecList, Test[] testList)335     private void ex(Iterable<Codec> codecList, Test[] testList) {
336         if (codecList == null) {
337             Log.i(TAG, "CodecList was empty. Skipping test.");
338             return;
339         }
340 
341         TestList tests = new TestList();
342         for (Codec c : codecList) {
343             for (Test test : testList) {
344                 if (test.isValid(c)) {
345                     test.addTests(tests, c);
346                 }
347             }
348         }
349         try {
350             tests.run();
351         } catch (Throwable t) {
352             throw new RuntimeException(t);
353         }
354     }
355 
356     /* need an inner class to have access to the activity */
357     abstract class ActivityTest extends Test {
358         TestSurface mNullSurface = new ActivitySurface(null);
getSurface()359         protected TestSurface getSurface() {
360             if (mUseSurface) {
361                 return new ActivitySurface(getActivity().getSurfaceHolder().getSurface());
362             } else if (mUseSurfaceTexture) {
363                 return new DecoderSurface(1280, 720, mCRC);
364             }
365             return mNullSurface;
366         }
367     }
368 
369     static final int NUM_FRAMES = 50;
370 
371     /**
372      * Queue some frames with an EOS on the last one.  Test that we have decoded as many
373      * frames as we queued.  This tests the EOS handling of the codec to see if all queued
374      * (and out-of-order) frames are actually decoded and returned.
375      *
376      * Also test flushing prior to sending CSD, and immediately after sending CSD.
377      */
378     class EarlyEosTest extends ActivityTest {
379         // using bitfields to create a directed state graph that terminates at FLUSH_NEVER
380         static final int FLUSH_BEFORE_CSD = (1 << 1);
381         static final int FLUSH_AFTER_CSD = (1 << 0);
382         static final int FLUSH_NEVER = 0;
383 
isValid(Codec c)384         public boolean isValid(Codec c) {
385             return getFormat(c) != null;
386         }
addTests(TestList tests, final Codec c)387         public void addTests(TestList tests, final Codec c) {
388             int state = FLUSH_BEFORE_CSD;
389             for (int i = NUM_FRAMES / 2; i > 0; --i, state >>= 1) {
390                 final int queuedFrames = i;
391                 final int earlyFlushMode = state;
392                 tests.add(
393                     new Step("testing early EOS at " + queuedFrames, this, c) {
394                         public void run() {
395                             Decoder decoder = new Decoder(c.name);
396                             try {
397                                 MediaFormat fmt = stepFormat();
398                                 MediaFormat configFmt = fmt;
399                                 if (earlyFlushMode == FLUSH_BEFORE_CSD) {
400                                     // flush before CSD requires not submitting CSD with configure
401                                     configFmt = Media.removeCSD(fmt);
402                                 }
403                                 decoder.configureAndStart(configFmt, stepSurface());
404                                 if (earlyFlushMode != FLUSH_NEVER) {
405                                     decoder.flush();
406                                     // We must always queue CSD after a flush that is potentially
407                                     // before we receive output format has changed.  This should
408                                     // work even after we receive the format change.
409                                     decoder.queueCSD(fmt);
410                                 }
411                                 int decodedFrames = -decoder.queueInputBufferRange(
412                                         stepMedia(),
413                                         0 /* startFrame */,
414                                         queuedFrames,
415                                         true /* sendEos */,
416                                         true /* waitForEos */);
417                                 if (decodedFrames <= 0) {
418                                     Log.w(TAG, "Did not receive EOS -- negating frame count");
419                                 }
420                                 decoder.stop();
421                                 if (decodedFrames != queuedFrames) {
422                                     warn("decoded " + decodedFrames + " frames out of " +
423                                             queuedFrames + " queued");
424                                 }
425                             } finally {
426                                 warn(decoder.getWarnings());
427                                 decoder.releaseQuietly();
428                             }
429                         }
430                     });
431                 if (verify) {
432                     i >>= 1;
433                 }
434             }
435         }
436     };
437 
438     /**
439      * Similar to EarlyEosTest, but we keep the component alive and running in between the steps.
440      * This is how seeking should be done if all frames must be outputted.  This also tests that
441      * PTS can be repeated after flush.
442      */
443     class EosFlushSeekTest extends ActivityTest {
444         Decoder mDecoder; // test state
isValid(Codec c)445         public boolean isValid(Codec c) {
446             return getFormat(c) != null;
447         }
addTests(TestList tests, final Codec c)448         public void addTests(TestList tests, final Codec c) {
449             tests.add(
450                 new Step("testing EOS & flush before seek - init", this, c) {
451                     public void run() {
452                         mDecoder = new Decoder(c.name);
453                         mDecoder.configureAndStart(stepFormat(), stepSurface());
454                     }});
455 
456             for (int i = NUM_FRAMES; i > 0; i--) {
457                 final int queuedFrames = i;
458                 tests.add(
459                     new Step("testing EOS & flush before seeking after " + queuedFrames +
460                             " frames", this, c) {
461                         public void run() {
462                             int decodedFrames = -mDecoder.queueInputBufferRange(
463                                     stepMedia(),
464                                     0 /* startFrame */,
465                                     queuedFrames,
466                                     true /* sendEos */,
467                                     true /* waitForEos */);
468                             if (decodedFrames != queuedFrames) {
469                                 warn("decoded " + decodedFrames + " frames out of " +
470                                         queuedFrames + " queued");
471                             }
472                             warn(mDecoder.getWarnings());
473                             mDecoder.clearWarnings();
474                             mDecoder.flush();
475                             // First run will trigger output format change exactly once,
476                             // and subsequent runs should not trigger format change.
477                             // this part of test is new for Android12
478                             if (sIsAtLeastS) {
479                                 assertEquals(1, mDecoder.getOutputFormatChangeCount());
480                             }
481                         }
482                     });
483                 if (verify) {
484                     i >>= 1;
485                 }
486             }
487 
488             tests.add(
489                 new Step("testing EOS & flush before seek - finally", this, c) {
490                     public void run() {
491                         try {
492                             mDecoder.stop();
493                         } finally {
494                             mDecoder.release();
495                         }
496                     }});
497         }
498     };
499 
500     /**
501      * Similar to EosFlushSeekTest, but we change the media size between the steps.
502      * This is how dynamic resolution switching can be done on codecs that do not support
503      * adaptive playback.
504      */
505     class ReconfigDrcTest extends ActivityTest {
506         Decoder mDecoder;  // test state
isValid(Codec c)507         public boolean isValid(Codec c) {
508             return getFormat(c) != null && c.mediaList.length > 1;
509         }
addTests(TestList tests, final Codec c)510         public void addTests(TestList tests, final Codec c) {
511             tests.add(
512                 new Step("testing DRC with reconfigure - init", this, c) {
513                     public void run() {
514                         mDecoder = new Decoder(c.name);
515                     }});
516 
517             for (int i = NUM_FRAMES, ix = 0; i > 0; i--, ix++) {
518                 final int queuedFrames = i;
519                 final int mediaIx = ix % c.mediaList.length;
520                 tests.add(
521                     new Step("testing DRC with reconfigure after " + queuedFrames + " frames",
522                             this, c, mediaIx) {
523                         public void run() {
524                             try {
525                                 mDecoder.configureAndStart(stepFormat(), stepSurface());
526                                 int decodedFrames = -mDecoder.queueInputBufferRange(
527                                         stepMedia(),
528                                         0 /* startFrame */,
529                                         queuedFrames,
530                                         true /* sendEos */,
531                                         true /* waitForEos */);
532                                 if (decodedFrames != queuedFrames) {
533                                     warn("decoded " + decodedFrames + " frames out of " +
534                                             queuedFrames + " queued");
535                                 }
536                                 warn(mDecoder.getWarnings());
537                                 mDecoder.clearWarnings();
538                                 mDecoder.flush();
539                             } finally {
540                                 mDecoder.stop();
541                             }
542                         }
543                     });
544                 if (verify) {
545                     i >>= 1;
546                 }
547             }
548             tests.add(
549                 new Step("testing DRC with reconfigure - finally", this, c) {
550                     public void run() {
551                         mDecoder.release();
552                     }});
553         }
554     };
555 
556     /* ADAPTIVE-ONLY TESTS - only run on codecs that support adaptive playback */
557 
558     /**
559      * Test dynamic resolution change support.  Queue various sized media segments
560      * with different resolutions, verify that all queued frames were decoded.  Here
561      * PTS will grow between segments.
562      */
563     class AdaptiveDrcTest extends ActivityTest {
564         Decoder mDecoder;
565         int mAdjustTimeUs;
566         int mDecodedFrames;
567         int mQueuedFrames;
568 
AdaptiveDrcTest()569         public AdaptiveDrcTest() {
570             super();
571             adaptive();
572         }
isValid(Codec c)573         public boolean isValid(Codec c) {
574             checkAdaptiveFormat();
575             return c.adaptive && c.mediaList.length > 1;
576         }
addTests(TestList tests, final Codec c)577         public void addTests(TestList tests, final Codec c) {
578             tests.add(
579                 new Step("testing DRC with no reconfigure - init", this, c) {
580                     public void run() throws Throwable {
581                         // FIXME wait 2 seconds to allow system to free up previous codecs
582                         try {
583                             Thread.sleep(2000);
584                         } catch (InterruptedException e) {}
585                         mDecoder = new Decoder(c.name);
586                         mDecoder.configureAndStart(stepFormat(), stepSurface());
587                         mAdjustTimeUs = 0;
588                         mDecodedFrames = 0;
589                         mQueuedFrames = 0;
590                     }});
591 
592             for (int i = NUM_FRAMES, ix = 0; i >= MIN_FRAMES_BEFORE_DRC; i--, ix++) {
593                 final int mediaIx = ix % c.mediaList.length;
594                 final int segmentSize = i;
595                 tests.add(
596                     new Step("testing DRC with no reconfigure after " + i + " frames",
597                             this, c, mediaIx) {
598                         public void run() throws Throwable {
599                             mQueuedFrames += segmentSize;
600                             boolean lastSequence = segmentSize == MIN_FRAMES_BEFORE_DRC;
601                             if (verify) {
602                                 lastSequence = (segmentSize >> 1) <= MIN_FRAMES_BEFORE_DRC;
603                             }
604                             int frames = mDecoder.queueInputBufferRange(
605                                     stepMedia(),
606                                     0 /* startFrame */,
607                                     segmentSize,
608                                     lastSequence /* sendEos */,
609                                     lastSequence /* expectEos */,
610                                     mAdjustTimeUs,
611                                     // Try sleeping after first queue so that we can verify
612                                     // output format change event happens at the right time.
613                                     true /* sleepAfterFirstQueue */);
614                             if (lastSequence && frames >= 0) {
615                                 warn("did not receive EOS, received " + frames + " frames");
616                             } else if (!lastSequence && frames < 0) {
617                                 warn("received EOS, received " + (-frames) + " frames");
618                             }
619                             warn(mDecoder.getWarnings());
620                             mDecoder.clearWarnings();
621 
622                             mDecodedFrames += Math.abs(frames);
623                             mAdjustTimeUs += 1 + stepMedia().getTimestampRangeValue(
624                                     0, segmentSize, Media.RANGE_END);
625                         }});
626                 if (verify) {
627                     i >>= 1;
628                 }
629             }
630             tests.add(
631                 new Step("testing DRC with no reconfigure - init", this, c) {
632                     public void run() throws Throwable {
633                         if (mDecodedFrames != mQueuedFrames) {
634                             warn("decoded " + mDecodedFrames + " frames out of " +
635                                     mQueuedFrames + " queued");
636                         }
637                         try {
638                             mDecoder.stop();
639                         } finally {
640                             mDecoder.release();
641                         }
642                     }
643                 });
644         }
645     };
646 
647     /**
648      * Queue EOS shortly after a dynamic resolution change.  Test that all frames were
649      * decoded.
650      */
651     class AdaptiveDrcEarlyEosTest extends ActivityTest {
AdaptiveDrcEarlyEosTest()652         public AdaptiveDrcEarlyEosTest() {
653             super();
654             adaptive();
655         }
isValid(Codec c)656         public boolean isValid(Codec c) {
657             checkAdaptiveFormat();
658             return c.adaptive && c.mediaList.length > 1;
659         }
testStep(final Codec c, final int framesBeforeDrc, final int framesBeforeEos)660         public Step testStep(final Codec c, final int framesBeforeDrc,
661                 final int framesBeforeEos) {
662             return new Step("testing DRC with no reconfigure after " + framesBeforeDrc +
663                     " frames and subsequent EOS after " + framesBeforeEos + " frames",
664                     this, c) {
665                 public void run() throws Throwable {
666                     Decoder decoder = new Decoder(c.name);
667                     int queuedFrames = framesBeforeDrc + framesBeforeEos;
668                     int framesA = 0;
669                     int framesB = 0;
670                     try {
671                         decoder.configureAndStart(stepFormat(), stepSurface());
672                         Media media = c.mediaList[0];
673 
674                         framesA = decoder.queueInputBufferRange(
675                                 media,
676                                 0 /* startFrame */,
677                                 framesBeforeDrc,
678                                 false /* sendEos */,
679                                 false /* expectEos */);
680                         if (framesA < 0) {
681                             warn("received unexpected EOS, received " + (-framesA) + " frames");
682                         }
683                         long adjustTimeUs = 1 + media.getTimestampRangeValue(
684                                 0, framesBeforeDrc, Media.RANGE_END);
685 
686                         media = c.mediaList[1];
687                         framesB = decoder.queueInputBufferRange(
688                                 media,
689                                 0 /* startFrame */,
690                                 framesBeforeEos,
691                                 true /* sendEos */,
692                                 true /* expectEos */,
693                                 adjustTimeUs,
694                                 false /* sleepAfterFirstQueue */);
695                         if (framesB >= 0) {
696                             warn("did not receive EOS, received " + (-framesB) + " frames");
697                         }
698                         decoder.stop();
699                         warn(decoder.getWarnings());
700                     } finally {
701                         int decodedFrames = Math.abs(framesA) + Math.abs(framesB);
702                         if (decodedFrames != queuedFrames) {
703                             warn("decoded " + decodedFrames + " frames out of " + queuedFrames +
704                                     " queued");
705                         }
706                         decoder.release();
707                     }
708                 }
709             };
710         }
addTests(TestList tests, Codec c)711         public void addTests(TestList tests, Codec c) {
712             for (int drcFrame = 6; drcFrame >= MIN_FRAMES_BEFORE_DRC; drcFrame--) {
713                 for (int eosFrame = 6; eosFrame >= 1; eosFrame--) {
714                     tests.add(testStep(c, drcFrame, eosFrame));
715                 }
716             }
717         }
718     };
719 
720     /**
721      * Similar to AdaptiveDrcTest, but tests that PTS can change at adaptive boundaries both
722      * forward and backward without the need to flush.
723      */
724     class AdaptiveSkipTest extends ActivityTest {
725         boolean forward;
726         public AdaptiveSkipTest(boolean fwd) {
727             forward = fwd;
728             adaptive();
729         }
730         public boolean isValid(Codec c) {
731             checkAdaptiveFormat();
732             return c.adaptive;
733         }
734         Decoder mDecoder;
735         int mAdjustTimeUs = 0;
736         int mDecodedFrames = 0;
737         int mQueuedFrames = 0;
738         public void addTests(TestList tests, final Codec c) {
739             tests.add(
740                 new Step("testing flushless skipping - init", this, c) {
741                     public void run() throws Throwable {
742                         mDecoder = new Decoder(c.name);
743                         mDecoder.configureAndStart(stepFormat(), stepSurface());
744                         mAdjustTimeUs = 0;
745                         mDecodedFrames = 0;
746                         mQueuedFrames = 0;
747                     }});
748 
749             for (int i = 2, ix = 0; i <= NUM_FRAMES; i++, ix++) {
750                 final int mediaIx = ix % c.mediaList.length;
751                 final int segmentSize = i;
752                 final boolean lastSequence;
753                 if (verify) {
754                     lastSequence = (segmentSize << 1) + 1 > NUM_FRAMES;
755                 } else {
756                     lastSequence = segmentSize >= NUM_FRAMES;
757                 }
758                 tests.add(
759                     new Step("testing flushless skipping " + (forward ? "forward" : "backward") +
760                             " after " + i + " frames", this, c) {
761                         public void run() throws Throwable {
762                             int frames = mDecoder.queueInputBufferRange(
763                                 stepMedia(),
764                                 0 /* startFrame */,
765                                 segmentSize,
766                                 lastSequence /* sendEos */,
767                                 lastSequence /* expectEos */,
768                                 mAdjustTimeUs,
769                                 false /* sleepAfterFirstQueue */);
770                             if (lastSequence && frames >= 0) {
771                                 warn("did not receive EOS, received " + frames + " frames");
772                             } else if (!lastSequence && frames < 0) {
773                                 warn("received unexpected EOS, received " + (-frames) + " frames");
774                             }
775                             warn(mDecoder.getWarnings());
776                             mDecoder.clearWarnings();
777 
778                             mQueuedFrames += segmentSize;
779                             mDecodedFrames += Math.abs(frames);
780                             if (forward) {
781                                 mAdjustTimeUs += 10000000 + stepMedia().getTimestampRangeValue(
782                                         0, segmentSize, Media.RANGE_DURATION);
783                             }
784                         }});
785                 if (verify) {
786                     i <<= 1;
787                 }
788             }
789 
790             tests.add(
791                 new Step("testing flushless skipping - finally", this, c) {
792                     public void run() throws Throwable {
793                         if (mDecodedFrames != mQueuedFrames) {
794                             warn("decoded " + mDecodedFrames + " frames out of " + mQueuedFrames +
795                                     " queued");
796                         }
797                         try {
798                             mDecoder.stop();
799                         } finally {
800                             mDecoder.release();
801                         }
802                     }});
803         }
804     };
805 
806     // not yet used
807     static long checksum(ByteBuffer buf, int size, CRC32 crc) {
808         assertTrue(size >= 0);
809         assertTrue(size <= buf.capacity());
810         crc.reset();
811         if (buf.hasArray()) {
812             crc.update(buf.array(), buf.arrayOffset(), size);
813         } else {
814            int pos = buf.position();
815            buf.rewind();
816            final int rdsize = Math.min(4096, size);
817            byte bb[] = new byte[rdsize];
818            int chk;
819            for (int i = 0; i < size; i += chk) {
820                 chk = Math.min(rdsize, size - i);
821                 buf.get(bb, 0, chk);
822                 crc.update(bb, 0, chk);
823             }
824             buf.position(pos);
825         }
826         return crc.getValue();
827     }
828 
829     CRC32 mCRC;
830 
831     @Override
832     protected void setUp() throws Exception {
833         super.setUp();
834         mCRC = new CRC32();
835     }
836 
837     /* ====================================================================== */
838     /*                              UTILITY FUNCTIONS                         */
839     /* ====================================================================== */
840     static String byteBufferToString(ByteBuffer buf, int start, int len) {
841         int oldPosition = buf.position();
842         buf.position(start);
843         int strlen = 2; // {}
844         boolean ellipsis = len < buf.limit();
845         if (ellipsis) {
846             strlen += 3; // ...
847         } else {
848             len = buf.limit();
849         }
850         strlen += 3 * len - (len > 0 ? 1 : 0); // XX,XX
851         char[] res = new char[strlen];
852         res[0] = '{';
853         res[strlen - 1] = '}';
854         if (ellipsis) {
855             res[strlen - 2] = res[strlen - 3] = res[strlen - 4] = '.';
856         }
857         for (int i = 1; i < len; i++) {
858             res[i * 3] = ',';
859         }
860         for (int i = 0; i < len; i++) {
861             byte b = buf.get();
862             int d = (b >> 4) & 15;
863             res[i * 3 + 1] = (char)(d + (d > 9 ? 'a' - 10 : '0'));
864             d = (b & 15);
865             res[i * 3 + 2] = (char)(d + (d > 9 ? 'a' - 10 : '0'));
866         }
867         buf.position(oldPosition);
868         return new String(res);
869     }
870 
871     static <E> Iterable<E> chain(Iterable<E> ... iterables) {
872         /* simple chainer using ArrayList */
873         ArrayList<E> items = new ArrayList<E>();
874         for (Iterable<E> it: iterables) {
875             for (E el: it) {
876                 items.add(el);
877             }
878         }
879         return items;
880     }
881 
882     class Decoder implements MediaCodec.OnFrameRenderedListener {
883         private final static String TAG = "AdaptiveDecoder";
884         final long kTimeOutUs = 5000;
885         final long kCSDTimeOutUs = 1000000;
886         MediaCodec mCodec;
887         ByteBuffer[] mInputBuffers;
888         ByteBuffer[] mOutputBuffers;
889         TestSurface mSurface;
890         boolean mDoChecksum;
891         boolean mQueuedEos;
892         ArrayList<Long> mTimeStamps;
893         // We might add items when iterating mWarnings.
894         // Use CopyOnWrieArrayList to avoid ConcurrentModificationException.
895         CopyOnWriteArrayList<String> mWarnings;
896         Vector<Long> mRenderedTimeStamps; // using Vector as it is implicitly synchronized
897         long mLastRenderNanoTime;
898         int mFramesNotifiedRendered;
899         // True iff previous dequeue request returned INFO_OUTPUT_FORMAT_CHANGED.
900         boolean mOutputFormatChanged;
901         // Number of output format change event
902         int mOutputFormatChangeCount;
903         // Save the timestamps of the first frame of each sequence.
904         // Note: this is the only time output format change could happen.
905         ArrayList<Long> mFirstQueueTimestamps;
906 
907         public Decoder(String codecName) {
908             MediaCodec codec = null;
909             try {
910                 codec = MediaCodec.createByCodecName(codecName);
911             } catch (Exception e) {
912                 throw new RuntimeException("couldn't create codec " + codecName, e);
913             }
914             Log.i(TAG, "using codec: " + codec.getName());
915             mCodec = codec;
916             mDoChecksum = false;
917             mQueuedEos = false;
918             mTimeStamps = new ArrayList<Long>();
919             mWarnings = new CopyOnWriteArrayList<String>();
920             mRenderedTimeStamps = new Vector<Long>();
921             mLastRenderNanoTime = System.nanoTime();
922             mFramesNotifiedRendered = 0;
923             mOutputFormatChanged = false;
924             mOutputFormatChangeCount = 0;
925             mFirstQueueTimestamps = new ArrayList<Long>();
926 
927             codec.setOnFrameRenderedListener(this, null);
928         }
929 
930         public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) {
931             final long NSECS_IN_1SEC = 1000000000;
932             if (!mRenderedTimeStamps.remove(presentationTimeUs)) {
933                 warn("invalid (rendered) timestamp " + presentationTimeUs + ", rendered " +
934                         mRenderedTimeStamps);
935             }
936             assert nanoTime > mLastRenderNanoTime;
937             mLastRenderNanoTime = nanoTime;
938             ++mFramesNotifiedRendered;
939             assert nanoTime > System.nanoTime() - NSECS_IN_1SEC;
940         }
941 
942         public String getName() {
943             return mCodec.getName();
944         }
945 
946         public Iterable<String> getWarnings() {
947             return mWarnings;
948         }
949 
950         private void warn(String warning) {
951             mWarnings.add(warning);
952             Log.w(TAG, warning);
953         }
954 
955         public void clearWarnings() {
956             mWarnings.clear();
957         }
958 
959         public int getOutputFormatChangeCount() {
960             return mOutputFormatChangeCount;
961         }
962 
963         public void configureAndStart(MediaFormat format, TestSurface surface) {
964             mSurface = surface;
965             Log.i(TAG, "configure(" + format + ", " + mSurface.getSurface() + ")");
966             mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */);
967             Log.i(TAG, "start");
968             mCodec.start();
969 
970             // inject some minimal setOutputSurface test
971             // TODO: change this test to also change the surface midstream
972             try {
973                 mCodec.setOutputSurface(null);
974                 fail("should not be able to set surface to NULL");
975             } catch (IllegalArgumentException e) {}
976             mCodec.setOutputSurface(mSurface.getSurface());
977 
978             mInputBuffers = mCodec.getInputBuffers();
979             mOutputBuffers = mCodec.getOutputBuffers();
980             Log.i(TAG, "configured " + mInputBuffers.length + " input[" +
981                   mInputBuffers[0].capacity() + "] and " +
982                   mOutputBuffers.length + "output[" +
983                   (mOutputBuffers[0] == null ? null : mOutputBuffers[0].capacity()) + "]");
984             mQueuedEos = false;
985             mRenderedTimeStamps.clear();
986             mLastRenderNanoTime = System.nanoTime();
987             mFramesNotifiedRendered = 0;
988         }
989 
990         public void stop() {
991             Log.i(TAG, "stop");
992             mCodec.stop();
993             // if we have queued 32 frames or more, at least one should have been notified
994             // to have rendered.
995             if (mRenderedTimeStamps.size() > 32 && mFramesNotifiedRendered == 0) {
996                 fail("rendered " + mRenderedTimeStamps.size() +
997                         " frames, but none have been notified.");
998             }
999         }
1000 
1001         public void flush() {
1002             Log.i(TAG, "flush");
1003             mCodec.flush();
1004             mQueuedEos = false;
1005             mTimeStamps.clear();
1006         }
1007 
1008         public String dequeueAndReleaseOutputBuffer(MediaCodec.BufferInfo info) {
1009             int ix = mCodec.dequeueOutputBuffer(info, kTimeOutUs);
1010             if (ix == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1011                 mOutputBuffers = mCodec.getOutputBuffers();
1012                 Log.d(TAG, "output buffers have changed.");
1013                 return null;
1014             } else if (ix == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1015                 MediaFormat format = mCodec.getOutputFormat();
1016                 Log.d(TAG, "output format has changed to " + format);
1017                 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
1018                 mDoChecksum = isRecognizedFormat(colorFormat);
1019                 mOutputFormatChanged = true;
1020                 ++mOutputFormatChangeCount;
1021                 return null;
1022             } else if (ix < 0) {
1023                 Log.v(TAG, "no output");
1024                 return null;
1025             }
1026             /* create checksum */
1027             long sum = 0;
1028 
1029             Log.v(TAG, "dequeue #" + ix + " => { [" + info.size + "] flags=" + info.flags +
1030                     " @" + info.presentationTimeUs + "}");
1031 
1032             // we get a nonzero size for valid decoded frames
1033             boolean doRender = (info.size != 0);
1034 
1035             if (doRender) {
1036                 mRenderedTimeStamps.add(info.presentationTimeUs);
1037                 if (!mTimeStamps.remove(info.presentationTimeUs)) {
1038                     warn("invalid (decoded) timestamp " + info.presentationTimeUs + ", queued " +
1039                             mTimeStamps);
1040                 }
1041             }
1042 
1043             if (mSurface.getSurface() == null) {
1044                 if (mDoChecksum) {
1045                     sum = checksum(mOutputBuffers[ix], info.size, mCRC);
1046                 }
1047                 mCodec.releaseOutputBuffer(ix, doRender);
1048             } else if (doRender) {
1049                 // If using SurfaceTexture, as soon as we call releaseOutputBuffer, the
1050                 // buffer will be forwarded to SurfaceTexture to convert to a texture.
1051                 // The API doesn't guarantee that the texture will be available before
1052                 // the call returns, so we need to wait for the onFrameAvailable callback
1053                 // to fire.  If we don't wait, we risk dropping frames.
1054                 mSurface.prepare();
1055                 mCodec.releaseOutputBuffer(ix, doRender);
1056                 mSurface.waitForDraw();
1057                 if (mDoChecksum) {
1058                     sum = mSurface.checksum();
1059                 }
1060             } else {
1061                 mCodec.releaseOutputBuffer(ix, doRender);
1062             }
1063 
1064             if (mOutputFormatChanged) {
1065                 // Previous dequeue was output format change; format change must
1066                 // correspond to a new sequence, so it must happen right before
1067                 // the first frame of one of the sequences.
1068                 // this part of test is new for Android12
1069                 if (sIsAtLeastS) {
1070                     assertTrue("Codec " + getName() + " cannot find formatchange " + info.presentationTimeUs +
1071                         " in " + mFirstQueueTimestamps,
1072                         mFirstQueueTimestamps.remove(info.presentationTimeUs));
1073                 }
1074                 mOutputFormatChanged = false;
1075             }
1076 
1077             return String.format(Locale.US, "{pts=%d, flags=%x, data=0x%x}",
1078                                  info.presentationTimeUs, info.flags, sum);
1079         }
1080 
1081         /* returns true iff queued a frame */
1082         public boolean queueInputBuffer(Media media, int frameIx, boolean EOS) {
1083             return queueInputBuffer(media, frameIx, EOS, 0);
1084         }
1085 
1086         public boolean queueInputBuffer(Media media, int frameIx, boolean EOS, long adjustTimeUs) {
1087             if (mQueuedEos) {
1088                 return false;
1089             }
1090 
1091             int ix = mCodec.dequeueInputBuffer(kTimeOutUs);
1092 
1093             if (ix < 0) {
1094                 return false;
1095             }
1096 
1097             ByteBuffer buf = mInputBuffers[ix];
1098             Media.Frame frame = media.getFrame(frameIx);
1099             buf.clear();
1100 
1101             long presentationTimeUs = adjustTimeUs;
1102             int flags = 0;
1103             if (frame != null) {
1104                 buf.put((ByteBuffer)frame.buf.clear());
1105                 presentationTimeUs += frame.presentationTimeUs;
1106                 flags = frame.flags;
1107             }
1108 
1109             if (EOS) {
1110                 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
1111                 mQueuedEos = true;
1112             }
1113 
1114             mTimeStamps.add(presentationTimeUs);
1115             Log.v(TAG, "queue { [" + buf.position() + "]=" + byteBufferToString(buf, 0, 16) +
1116                     " flags=" + flags + " @" + presentationTimeUs + "} => #" + ix);
1117             mCodec.queueInputBuffer(
1118                     ix, 0 /* offset */, buf.position(), presentationTimeUs, flags);
1119             return true;
1120         }
1121 
1122         /* returns number of frames received multiplied by -1 if received EOS, 1 otherwise */
1123         public int queueInputBufferRange(
1124                 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd,
1125                 boolean waitForEos) {
1126             return queueInputBufferRange(
1127                     media, frameStartIx, frameEndIx, sendEosAtEnd, waitForEos, 0, false);
1128         }
1129 
1130         public void queueCSD(MediaFormat format) {
1131             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1132             for (int csdIx = 0; ; ++csdIx) {
1133                 ByteBuffer csdBuf = format.getByteBuffer("csd-" + csdIx);
1134                 if (csdBuf == null) {
1135                     break;
1136                 }
1137 
1138                 int ix = mCodec.dequeueInputBuffer(kCSDTimeOutUs);
1139                 if (ix < 0) {
1140                     fail("Could not dequeue input buffer for CSD #" + csdIx);
1141                     return;
1142                 }
1143 
1144                 ByteBuffer buf = mInputBuffers[ix];
1145                 buf.clear();
1146                 buf.put((ByteBuffer)csdBuf.clear());
1147                 Log.v(TAG, "queue-CSD { [" + buf.position() + "]=" +
1148                         byteBufferToString(buf, 0, 16) + "} => #" + ix);
1149                 mCodec.queueInputBuffer(
1150                         ix, 0 /* offset */, buf.position(), 0 /* timeUs */,
1151                         MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
1152             }
1153         }
1154 
1155         public int queueInputBufferRange(
1156                 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd,
1157                 boolean waitForEos, long adjustTimeUs, boolean sleepAfterFirstQueue) {
1158             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1159             int frameIx = frameStartIx;
1160             int numFramesDecoded = 0;
1161             boolean sawOutputEos = false;
1162             int deadDecoderCounter = 0;
1163             ArrayList<String> frames = new ArrayList<String>();
1164             String buf = null;
1165             // After all input buffers are queued, dequeue as many output buffers as possible.
1166             while ((waitForEos && !sawOutputEos) || frameIx < frameEndIx || buf != null) {
1167                 if (frameIx < frameEndIx) {
1168                     if (queueInputBuffer(
1169                             media,
1170                             frameIx,
1171                             sendEosAtEnd && (frameIx + 1 == frameEndIx),
1172                             adjustTimeUs)) {
1173                         if (frameIx == frameStartIx) {
1174                             if (sleepAfterFirstQueue) {
1175                                 // MediaCodec detects and processes output format change upon
1176                                 // the first frame. It must not send the event prematurely with
1177                                 // pending buffers to be dequeued. Sleep after the first frame
1178                                 // with new resolution to make sure MediaCodec had enough time
1179                                 // to process the frame with pending buffers.
1180                                 try {
1181                                     Thread.sleep(100);
1182                                 } catch (InterruptedException e) {}
1183                             }
1184                             mFirstQueueTimestamps.add(mTimeStamps.get(mTimeStamps.size() - 1));
1185                         }
1186                         frameIx++;
1187                     }
1188                 }
1189 
1190                 buf = dequeueAndReleaseOutputBuffer(info);
1191                 if (buf != null) {
1192                     // Some decoders output a 0-sized buffer at the end. Disregard those.
1193                     if (info.size > 0) {
1194                         deadDecoderCounter = 0;
1195                         numFramesDecoded++;
1196                     }
1197 
1198                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1199                         Log.d(TAG, "saw output EOS.");
1200                         sawOutputEos = true;
1201                     }
1202                 }
1203                 if (++deadDecoderCounter >= 100) {
1204                     warn("have not received an output frame for a while");
1205                     break;
1206                 }
1207             }
1208 
1209             if (numFramesDecoded < frameEndIx - frameStartIx - 16) {
1210                 fail("Queued " + (frameEndIx - frameStartIx) + " frames but only received " +
1211                         numFramesDecoded);
1212             }
1213             return (sawOutputEos ? -1 : 1) * numFramesDecoded;
1214         }
1215 
1216         void release() {
1217             Log.i(TAG, "release");
1218             mCodec.release();
1219             mSurface.release();
1220             mInputBuffers = null;
1221             mOutputBuffers = null;
1222             mCodec = null;
1223             mSurface = null;
1224         }
1225 
1226         // don't fail on exceptions in release()
1227         void releaseQuietly() {
1228             try {
1229                 Log.i(TAG, "release");
1230                 mCodec.release();
1231             } catch (Throwable e) {
1232                 Log.e(TAG, "Exception while releasing codec", e);
1233             }
1234             mSurface.release();
1235             mInputBuffers = null;
1236             mOutputBuffers = null;
1237             mCodec = null;
1238             mSurface = null;
1239         }
1240     };
1241 
1242     /* from EncodeDecodeTest */
1243     private static boolean isRecognizedFormat(int colorFormat) {
1244         switch (colorFormat) {
1245             // these are the formats we know how to handle for this test
1246             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
1247             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
1248             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
1249             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
1250             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
1251                 return true;
1252             default:
1253                 return false;
1254         }
1255     }
1256 
1257     private int countFrames(
1258             String codecName, MediaCodecInfo codecInfo, Media media, int eosframe, TestSurface s)
1259             throws Exception {
1260         Decoder codec = new Decoder(codecName);
1261         codec.configureAndStart(media.getFormat(), s /* surface */);
1262 
1263         int numframes = codec.queueInputBufferRange(
1264                 media, 0, eosframe, true /* sendEos */, true /* waitForEos */);
1265         if (numframes >= 0) {
1266             Log.w(TAG, "Did not receive EOS");
1267         } else {
1268             numframes *= -1;
1269         }
1270 
1271         codec.stop();
1272         codec.release();
1273         return numframes;
1274     }
1275 }
1276 
1277 /* ====================================================================== */
1278 /*                             Video Media Asset                          */
1279 /* ====================================================================== */
1280 class Media {
1281     private final static String TAG = "AdaptiveMedia";
1282     private MediaFormat mFormat;
1283     private MediaFormat mAdaptiveFormat;
1284     static class Frame {
1285         long presentationTimeUs;
1286         int flags;
1287         ByteBuffer buf;
1288         public Frame(long _pts, int _flags, ByteBuffer _buf) {
1289             presentationTimeUs = _pts;
1290             flags = _flags;
1291             buf = _buf;
1292         }
1293     };
1294     private Frame[] mFrames;
1295 
1296     public Frame getFrame(int ix) {
1297         /* this works even on short sample as frame is allocated as null */
1298         if (ix >= 0 && ix < mFrames.length) {
1299             return mFrames[ix];
1300         }
1301         return null;
1302     }
1303     private Media(MediaFormat format, MediaFormat adaptiveFormat, int numFrames) {
1304         /* need separate copies of format as once we add adaptive flags to
1305            MediaFormat, we cannot remove them */
1306         mFormat = format;
1307         mAdaptiveFormat = adaptiveFormat;
1308         mFrames = new Frame[numFrames];
1309     }
1310 
1311     public MediaFormat getFormat() {
1312         return mFormat;
1313     }
1314 
1315     public static MediaFormat removeCSD(MediaFormat orig) {
1316         MediaFormat copy = MediaFormat.createVideoFormat(
1317                 orig.getString(orig.KEY_MIME),
1318                 orig.getInteger(orig.KEY_WIDTH), orig.getInteger(orig.KEY_HEIGHT));
1319         for (String k : new String[] {
1320                 orig.KEY_FRAME_RATE, orig.KEY_MAX_WIDTH, orig.KEY_MAX_HEIGHT,
1321                 orig.KEY_MAX_INPUT_SIZE
1322         }) {
1323             if (orig.containsKey(k)) {
1324                 try {
1325                     copy.setInteger(k, orig.getInteger(k));
1326                 } catch (ClassCastException e) {
1327                     try {
1328                         copy.setFloat(k, orig.getFloat(k));
1329                     } catch (ClassCastException e2) {
1330                         // Could not copy value. Don't fail here, as having non-standard
1331                         // value types for defined keys is permissible by the media API
1332                         // for optional keys.
1333                     }
1334                 }
1335             }
1336         }
1337         return copy;
1338     }
1339 
1340     public MediaFormat getAdaptiveFormat(int width, int height, int maxInputSize) {
1341         mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width);
1342         mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height);
1343         mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
1344         return mAdaptiveFormat;
1345     }
1346 
1347     public String getMime() {
1348         return mFormat.getString(MediaFormat.KEY_MIME);
1349     }
1350 
1351     public int getMaxInputSize() {
1352         return mFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE);
1353     }
1354 
1355     public void setMaxInputSize(int maxInputSize) {
1356         mFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
1357     }
1358 
1359     public int getWidth() {
1360         return mFormat.getInteger(MediaFormat.KEY_WIDTH);
1361     }
1362 
1363     public int getHeight() {
1364         return mFormat.getInteger(MediaFormat.KEY_HEIGHT);
1365     }
1366 
1367     public final static int RANGE_START = 0;
1368     public final static int RANGE_END = 1;
1369     public final static int RANGE_DURATION = 2;
1370 
1371     public long getTimestampRangeValue(int frameStartIx, int frameEndIx, int kind) {
1372         long min = Long.MAX_VALUE, max = Long.MIN_VALUE;
1373         for (int frameIx = frameStartIx; frameIx < frameEndIx; frameIx++) {
1374             Frame frame = getFrame(frameIx);
1375             if (frame != null) {
1376                 if (min > frame.presentationTimeUs) {
1377                     min = frame.presentationTimeUs;
1378                 }
1379                 if (max < frame.presentationTimeUs) {
1380                     max = frame.presentationTimeUs;
1381                 }
1382             }
1383         }
1384         if (kind == RANGE_START) {
1385             return min;
1386         } else if (kind == RANGE_END) {
1387             return max;
1388         } else if (kind == RANGE_DURATION) {
1389             return max - min;
1390         } else {
1391             throw new IllegalArgumentException("kind is not valid: " + kind);
1392         }
1393     }
1394 
1395     public static Media read(final String video, int numFrames)
1396             throws java.io.IOException {
1397 
1398         Preconditions.assertTestFileExists(video);
1399         MediaExtractor extractor = new MediaExtractor();
1400         extractor.setDataSource(video);
1401 
1402         Media media = new Media(
1403                 extractor.getTrackFormat(0), extractor.getTrackFormat(0), numFrames);
1404         extractor.selectTrack(0);
1405 
1406         Log.i(TAG, "format=" + media.getFormat());
1407         ArrayList<ByteBuffer> csds = new ArrayList<ByteBuffer>();
1408         for (String tag: new String[] { "csd-0", "csd-1" }) {
1409             if (media.getFormat().containsKey(tag)) {
1410                 ByteBuffer csd = media.getFormat().getByteBuffer(tag);
1411                 Log.i(TAG, tag + "=" + AdaptivePlaybackTest.byteBufferToString(csd, 0, 16));
1412                 csds.add(csd);
1413             }
1414         }
1415 
1416         int maxInputSize = 0;
1417         ByteBuffer readBuf = ByteBuffer.allocate(2000000);
1418         for (int ix = 0; ix < numFrames; ix++) {
1419             int sampleSize = extractor.readSampleData(readBuf, 0 /* offset */);
1420 
1421             if (sampleSize < 0) {
1422                 throw new IllegalArgumentException("media is too short at " + ix + " frames");
1423             } else {
1424                 readBuf.position(0).limit(sampleSize);
1425                 for (ByteBuffer csd: csds) {
1426                     sampleSize += csd.capacity();
1427                 }
1428 
1429                 if (maxInputSize < sampleSize) {
1430                     maxInputSize = sampleSize;
1431                 }
1432 
1433                 ByteBuffer buf = ByteBuffer.allocate(sampleSize);
1434                 for (ByteBuffer csd: csds) {
1435                     csd.clear();
1436                     buf.put(csd);
1437                     csd.clear();
1438                     Log.i(TAG, "csd[" + csd.capacity() + "]");
1439                 }
1440                 Log.i(TAG, "frame-" + ix + "[" + sampleSize + "]");
1441                 csds.clear();
1442                 buf.put(readBuf);
1443                 media.mFrames[ix] = new Frame(
1444                     extractor.getSampleTime(),
1445                     extractor.getSampleFlags(),
1446                     buf);
1447                 extractor.advance();
1448             }
1449         }
1450         extractor.release();
1451 
1452         /* Override MAX_INPUT_SIZE in format, as CSD is being combined
1453          * with one of the input buffers */
1454         media.setMaxInputSize(maxInputSize);
1455         return media;
1456     }
1457 }
1458 
1459 /* ====================================================================== */
1460 /*                      Codec, CodecList and CodecFactory                 */
1461 /* ====================================================================== */
1462 class Codec {
1463     private final static String TAG = "AdaptiveCodec";
1464 
1465     public String name;
1466     public CodecCapabilities capabilities;
1467     public Media[] mediaList;
1468     public boolean adaptive;
1469     public boolean vendor;
1470     public Codec(MediaCodecInfo info, CodecCapabilities c, Media[] m) {
1471         name = info.getName();
1472         capabilities = c;
1473         List<Media> medias = new ArrayList<Media>();
1474 
1475         if (capabilities == null) {
1476             adaptive = false;
1477             vendor = true;
1478         } else {
1479             Log.w(TAG, "checking capabilities of " + name + " for " + m[0].getMime());
1480             adaptive = capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback);
1481             vendor = info.isVendor();
1482             for (Media media : m) {
1483                 if (media.getHeight() >= 720 &&
1484                         !capabilities.isFormatSupported(media.getFormat())) {
1485                     // skip if 720p and up is unsupported
1486                     Log.w(TAG, "codec " + name + " doesn't support " + media.getFormat());
1487                     continue;
1488                 }
1489                 medias.add(media);
1490             }
1491         }
1492 
1493         if (medias.size() < 2) {
1494             Log.e(TAG, "codec " + name + " doesn't support required resolutions");
1495         }
1496         mediaList = medias.subList(0, 2).toArray(new Media[2]);
1497     }
1498 }
1499 
1500 class CodecList extends ArrayList<Codec> { };
1501 
1502 /* all codecs of mime, plus named codec if exists */
1503 class CodecFamily extends CodecList {
1504     private final static String TAG = "AdaptiveCodecFamily";
1505     private static final int NUM_FRAMES = AdaptivePlaybackTest.NUM_FRAMES;
1506     static final String mInpPrefix = WorkDir.getMediaDirString();
1507 
1508     public CodecFamily(String mime, final String ... resources) {
1509         try {
1510             /* read all media */
1511             Media[] mediaList = new Media[resources.length];
1512             for (int i = 0; i < resources.length; i++) {
1513                 Log.v(TAG, "reading media " + mInpPrefix + resources[i]);
1514                 Media media = Media.read(mInpPrefix + resources[i], NUM_FRAMES);
1515                 assert media.getMime().equals(mime):
1516                         "test stream " + mInpPrefix + resources[i] + " has " + media.getMime() +
1517                         " mime type instead of " + mime;
1518 
1519                 /* assuming the first timestamp is the smallest */
1520                 long firstPTS = media.getFrame(0).presentationTimeUs;
1521                 long smallestPTS = media.getTimestampRangeValue(0, NUM_FRAMES, Media.RANGE_START);
1522 
1523                 assert firstPTS == smallestPTS:
1524                         "first frame timestamp (" + firstPTS + ") is not smallest (" +
1525                         smallestPTS + ")";
1526 
1527                 mediaList[i] = media;
1528             }
1529 
1530             /* enumerate codecs */
1531             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
1532             for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
1533                 if (codecInfo.isAlias()) {
1534                     continue;
1535                 }
1536                 if (codecInfo.isEncoder()) {
1537                     continue;
1538                 }
1539                 for (String type : codecInfo.getSupportedTypes()) {
1540                     if (type.equals(mime)) {
1541                         add(new Codec(
1542                                 codecInfo,
1543                                 codecInfo.getCapabilitiesForType(mime),
1544                                 mediaList));
1545                         break;
1546                     }
1547                 }
1548             }
1549         } catch (Throwable t) {
1550             Log.wtf("Constructor failed", t);
1551             throw new RuntimeException("constructor failed", t);
1552         }
1553     }
1554 }
1555 
1556 /* all codecs of mime, except named codec if exists */
1557 class CodecFamilySpecific extends CodecList {
1558     public CodecFamilySpecific(String mime, boolean isGoogle, final String ... resources) {
1559         for (Codec c: new CodecFamily(mime, resources)) {
1560             if (!c.vendor == isGoogle) {
1561                 add(c);
1562             }
1563         }
1564     }
1565 }
1566 
1567 class CodecFactory {
1568     public CodecList createCodecList(String mime, final String ...resources) {
1569         return new CodecFamily(mime, resources);
1570     }
1571 }
1572 
1573 class SWCodecFactory extends CodecFactory {
1574     public CodecList createCodecList(String mime, final String ...resources) {
1575         return new CodecFamilySpecific(mime, true, resources);
1576     }
1577 }
1578 
1579 class HWCodecFactory extends CodecFactory {
1580     public CodecList createCodecList(String mime, final String ...resources) {
1581         return new CodecFamilySpecific(mime, false, resources);
1582     }
1583 }
1584 
1585 /* ====================================================================== */
1586 /*                  Test Steps, Test (Case)s, and Test List               */
1587 /* ====================================================================== */
1588 class StepRunner implements Runnable {
1589     public StepRunner(Step s) {
1590         mStep = s;
1591         mThrowed = null;
1592     }
1593     public void run() {
1594         try {
1595             mStep.run();
1596         } catch (Throwable e) {
1597             mThrowed = e;
1598         }
1599     }
1600     public void throwThrowed() throws Throwable {
1601         if (mThrowed != null) {
1602             throw mThrowed;
1603         }
1604     }
1605     private Throwable mThrowed;
1606     private Step mStep;
1607 }
1608 
1609 class TestList extends ArrayList<Step> {
1610     private final static String TAG = "AdaptiveTestList";
1611     public void run() throws Throwable {
1612         Throwable res = null;
1613         for (Step step: this) {
1614             try {
1615                 Log.i(TAG, step.getDescription());
1616                 if (step.stepSurface().needsToRunInSeparateThread()) {
1617                     StepRunner runner = new StepRunner(step);
1618                     Thread th = new Thread(runner, "stepWrapper");
1619                     th.start();
1620                     th.join();
1621                     runner.throwThrowed();
1622                 } else {
1623                     step.run();
1624                 }
1625             } catch (Throwable e) {
1626                 Log.e(TAG, "while " + step.getDescription(), e);
1627                 res = e;
1628                 mFailedSteps++;
1629             } finally {
1630                 mWarnings += step.getWarnings();
1631             }
1632         }
1633         if (res != null) {
1634             throw new RuntimeException(
1635                 mFailedSteps + " failed steps, " + mWarnings + " warnings",
1636                 res);
1637         }
1638     }
1639     public int getWarnings() {
1640         return mWarnings;
1641     }
1642     public int getFailures() {
1643         return mFailedSteps;
1644     }
1645     private int mFailedSteps;
1646     private int mWarnings;
1647 }
1648 
1649 abstract class Test {
1650     public static final int FORMAT_ADAPTIVE_LARGEST = 1;
1651     public static final int FORMAT_ADAPTIVE_FIRST = 2;
1652     public static final int FORMAT_REGULAR = 3;
1653 
1654     protected int mFormatType;
1655     protected boolean mUseSurface;
1656     protected boolean mUseSurfaceTexture;
1657 
1658     public Test() {
1659         mFormatType = FORMAT_REGULAR;
1660         mUseSurface = true;
1661         mUseSurfaceTexture = false;
1662     }
1663 
1664     public Test adaptive() {
1665         mFormatType = FORMAT_ADAPTIVE_LARGEST;
1666         return this;
1667     }
1668 
1669     public Test adaptiveSmall() {
1670         mFormatType = FORMAT_ADAPTIVE_FIRST;
1671         return this;
1672     }
1673 
1674     public Test byteBuffer() {
1675         mUseSurface = false;
1676         mUseSurfaceTexture = false;
1677         return this;
1678     }
1679 
1680     public Test texture() {
1681         mUseSurface = false;
1682         mUseSurfaceTexture = true;
1683         return this;
1684     }
1685 
1686     public void checkAdaptiveFormat() {
1687         assert mFormatType != FORMAT_REGULAR:
1688                 "must be used with adaptive format";
1689     }
1690 
1691     abstract protected TestSurface getSurface();
1692 
1693     /* TRICKY: format is updated in each test run as we are actually reusing the
1694        same 2 MediaFormat objects returned from MediaExtractor.  Therefore,
1695        format must be explicitly obtained in each test step.
1696 
1697        returns null if codec does not support the format.
1698        */
1699     protected MediaFormat getFormat(Codec c) {
1700         return getFormat(c, 0);
1701     }
1702 
1703     protected MediaFormat getFormat(Codec c, int i) {
1704         MediaFormat format = null;
1705         if (mFormatType == FORMAT_REGULAR) {
1706             format = c.mediaList[i].getFormat();
1707         } else if (mFormatType == FORMAT_ADAPTIVE_FIRST && c.adaptive) {
1708             format = c.mediaList[i].getAdaptiveFormat(
1709                 c.mediaList[i].getWidth(), c.mediaList[i].getHeight(), c.mediaList[i].getMaxInputSize());
1710             for (Media media : c.mediaList) {
1711                 /* get the largest max input size for all media and use that */
1712                 if (media.getMaxInputSize() > format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE)) {
1713                     format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, media.getMaxInputSize());
1714                 }
1715             }
1716         } else if (mFormatType == FORMAT_ADAPTIVE_LARGEST && c.adaptive) {
1717             /* update adaptive format to max size used */
1718             format = c.mediaList[i].getAdaptiveFormat(0, 0, 0);
1719             for (Media media : c.mediaList) {
1720                 /* get the largest width, and the largest height independently */
1721                 if (media.getWidth() > format.getInteger(MediaFormat.KEY_MAX_WIDTH)) {
1722                     format.setInteger(MediaFormat.KEY_MAX_WIDTH, media.getWidth());
1723                 }
1724                 if (media.getHeight() > format.getInteger(MediaFormat.KEY_MAX_HEIGHT)) {
1725                     format.setInteger(MediaFormat.KEY_MAX_HEIGHT, media.getHeight());
1726                 }
1727                 if (media.getMaxInputSize() > format.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE)) {
1728                     format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, media.getMaxInputSize());
1729                 }
1730             }
1731         }
1732         return format;
1733     }
1734 
1735     public boolean isValid(Codec c) { return true; }
1736     public abstract void addTests(TestList tests, Codec c);
1737 }
1738 
1739 abstract class Step {
1740     private static final String TAG = "AdaptiveStep";
1741 
1742     public Step(String title, Test instance, Codec codec, Media media) {
1743         mTest = instance;
1744         mCodec = codec;
1745         mMedia = media;
1746         mDescription = title + " on " + stepSurface().getSurface() + " using " +
1747             mCodec.name + " and " + stepFormat();
1748     }
1749     public Step(String title, Test instance, Codec codec, int mediaIx) {
1750         this(title, instance, codec, codec.mediaList[mediaIx]);
1751     }
1752     public Step(String title, Test instance, Codec codec) {
1753         this(title, instance, codec, 0);
1754     }
1755     public Step(String description) {
1756         mDescription = description;
1757     }
1758     public Step() { }
1759 
1760     public abstract void run() throws Throwable;
1761 
1762     private String mDescription;
1763     private Test mTest;
1764     private Codec mCodec;
1765     private Media mMedia;
1766     private int mWarnings;
1767 
1768     /* TRICKY: use non-standard getter names so that we don't conflict with the getters
1769        in the Test classes, as most test Steps are defined as anonymous classes inside
1770        the test classes. */
1771     public MediaFormat stepFormat() {
1772         int ix = Arrays.asList(mCodec.mediaList).indexOf(mMedia);
1773         return mTest.getFormat(mCodec, ix);
1774     }
1775 
1776     public TestSurface stepSurface() {
1777         return mTest.getSurface();
1778     }
1779 
1780     public Media  stepMedia()       { return mMedia; }
1781 
1782     public String getDescription() { return mDescription; }
1783     public int    getWarnings()    { return mWarnings; }
1784 
1785     public void warn(String message) {
1786         Log.e(TAG, "WARNING: " + message + " in " + getDescription());
1787         mWarnings++;
1788     }
1789     public void warn(String message, Throwable t) {
1790         Log.e(TAG, "WARNING: " + message + " in " + getDescription(), t);
1791         mWarnings++;
1792     }
1793     public void warn(Iterable<String> warnings) {
1794         for (String warning: warnings) {
1795             warn(warning);
1796         }
1797     }
1798 }
1799 
1800 interface TestSurface {
1801     public Surface getSurface();
1802     public long checksum();
1803     public void release();
1804     public void prepare();         // prepare surface prior to render
1805     public void waitForDraw();     // wait for rendering to take place
1806     public boolean needsToRunInSeparateThread();
1807 }
1808 
1809 class DecoderSurface extends OutputSurface implements TestSurface {
1810     private ByteBuffer mBuf;
1811     int mWidth;
1812     int mHeight;
1813     CRC32 mCRC;
1814 
1815     public DecoderSurface(int width, int height, CRC32 crc) {
1816         super(width, height);
1817         mWidth = width;
1818         mHeight = height;
1819         mCRC = crc;
1820         mBuf = ByteBuffer.allocateDirect(4 * width * height);
1821     }
1822 
1823     public void prepare() {
1824         makeCurrent();
1825     }
1826 
1827     public void waitForDraw() {
1828         awaitNewImage();
1829         drawImage();
1830     }
1831 
1832     public long checksum() {
1833         mBuf.position(0);
1834         GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, mBuf);
1835         mBuf.position(0);
1836         return AdaptivePlaybackTest.checksum(mBuf, mBuf.capacity(), mCRC);
1837     }
1838 
1839     public void release() {
1840         super.release();
1841         mBuf = null;
1842     }
1843 
1844     public boolean needsToRunInSeparateThread() {
1845         return true;
1846     }
1847 }
1848 
1849 class ActivitySurface implements TestSurface {
1850     private Surface mSurface;
1851     public ActivitySurface(Surface s) {
1852         mSurface = s;
1853     }
1854     public Surface getSurface() {
1855         return mSurface;
1856     }
1857     public void prepare() { }
1858     public void waitForDraw() { }
1859     public long checksum() {
1860         return 0;
1861     }
1862     public void release() {
1863         // don't release activity surface, as it is reusable
1864     }
1865     public boolean needsToRunInSeparateThread() {
1866         return false;
1867     }
1868 }
1869