1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import com.android.cts.media.R;
20 
21 import android.content.Context;
22 import android.content.pm.PackageManager;
23 import android.content.res.AssetFileDescriptor;
24 import android.media.MediaCodec;
25 import android.media.MediaCodecInfo;
26 import android.media.MediaCodecInfo.CodecCapabilities;
27 import android.media.MediaCodecInfo.CodecProfileLevel;
28 import android.media.MediaCodecList;
29 import android.media.MediaExtractor;
30 import android.media.MediaFormat;
31 import android.util.Log;
32 import android.view.Surface;
33 
34 import android.opengl.GLES20;
35 import javax.microedition.khronos.opengles.GL10;
36 
37 import java.io.IOException;
38 import java.lang.System;
39 import java.nio.ByteBuffer;
40 import java.util.ArrayList;
41 import java.util.Arrays;
42 import java.util.Collection;
43 import java.util.Locale;
44 import java.util.Vector;
45 import java.util.zip.CRC32;
46 
47 public class AdaptivePlaybackTest extends MediaPlayerTestBase {
48     private static final String TAG = "AdaptivePlaybackTest";
49     private boolean sanity = false;
50     private static final int MIN_FRAMES_BEFORE_DRC = 2;
51 
H264(CodecFactory factory)52     public Iterable<Codec> H264(CodecFactory factory) {
53         return factory.createCodecList(
54                 mContext,
55                 MediaFormat.MIMETYPE_VIDEO_AVC,
56                 "OMX.google.h264.decoder",
57                 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz,
58                 R.raw.video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz);
59     }
60 
HEVC(CodecFactory factory)61     public Iterable<Codec> HEVC(CodecFactory factory) {
62         return factory.createCodecList(
63                 mContext,
64                 MediaFormat.MIMETYPE_VIDEO_HEVC,
65                 "OMX.google.hevc.decoder",
66                 R.raw.video_640x360_mp4_hevc_450kbps_30fps_aac_stereo_128kbps_48000hz,
67                 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz);
68     }
69 
H263(CodecFactory factory)70     public Iterable<Codec> H263(CodecFactory factory) {
71         return factory.createCodecList(
72                 mContext,
73                 MediaFormat.MIMETYPE_VIDEO_H263,
74                 "OMX.google.h263.decoder",
75                 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz,
76                 R.raw.video_352x288_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz);
77     }
78 
Mpeg4(CodecFactory factory)79     public Iterable<Codec> Mpeg4(CodecFactory factory) {
80         return factory.createCodecList(
81                 mContext,
82                 MediaFormat.MIMETYPE_VIDEO_MPEG4,
83                 "OMX.google.mpeg4.decoder",
84 
85                 R.raw.video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz,
86                 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz);
87     }
88 
VP8(CodecFactory factory)89     public Iterable<Codec> VP8(CodecFactory factory) {
90         return factory.createCodecList(
91                 mContext,
92                 MediaFormat.MIMETYPE_VIDEO_VP8,
93                 "OMX.google.vp8.decoder",
94                 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz,
95                 R.raw.video_1280x720_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz);
96     }
97 
VP9(CodecFactory factory)98     public Iterable<Codec> VP9(CodecFactory factory) {
99         return factory.createCodecList(
100                 mContext,
101                 MediaFormat.MIMETYPE_VIDEO_VP9,
102                 "OMX.google.vp9.decoder",
103                 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz,
104                 R.raw.video_1280x720_webm_vp9_309kbps_25fps_vorbis_stereo_128kbps_48000hz);
105     }
106 
107     CodecFactory ALL = new CodecFactory();
108     CodecFactory SW  = new SWCodecFactory();
109     CodecFactory HW  = new HWCodecFactory();
110 
H264()111     public Iterable<Codec> H264()  { return H264(ALL);  }
HEVC()112     public Iterable<Codec> HEVC()  { return HEVC(ALL);  }
VP8()113     public Iterable<Codec> VP8()   { return VP8(ALL);   }
VP9()114     public Iterable<Codec> VP9()   { return VP9(ALL);   }
Mpeg4()115     public Iterable<Codec> Mpeg4() { return Mpeg4(ALL); }
H263()116     public Iterable<Codec> H263()  { return H263(ALL);  }
117 
AllCodecs()118     public Iterable<Codec> AllCodecs() {
119         return chain(H264(ALL), HEVC(ALL), VP8(ALL), VP9(ALL), Mpeg4(ALL), H263(ALL));
120     }
121 
SWCodecs()122     public Iterable<Codec> SWCodecs() {
123         return chain(H264(SW), HEVC(SW), VP8(SW), VP9(SW), Mpeg4(SW), H263(SW));
124     }
125 
HWCodecs()126     public Iterable<Codec> HWCodecs() {
127         return chain(H264(HW), HEVC(HW), VP8(HW), VP9(HW), Mpeg4(HW), H263(HW));
128     }
129 
130     /* tests for adaptive codecs */
131     Test adaptiveEarlyEos     = new EarlyEosTest().adaptive();
132     Test adaptiveEosFlushSeek = new EosFlushSeekTest().adaptive();
133     Test adaptiveSkipAhead    = new AdaptiveSkipTest(true /* forward */);
134     Test adaptiveSkipBack     = new AdaptiveSkipTest(false /* forward */);
135 
136     /* DRC tests for adaptive codecs */
137     Test adaptiveReconfigDrc      = new ReconfigDrcTest().adaptive();
138     Test adaptiveSmallReconfigDrc = new ReconfigDrcTest().adaptiveSmall();
139     Test adaptiveDrc      = new AdaptiveDrcTest(); /* adaptive */
140     Test adaptiveSmallDrc = new AdaptiveDrcTest().adaptiveSmall();
141 
142     /* tests for regular codecs */
143     Test earlyEos          = new EarlyEosTest();
144     Test eosFlushSeek      = new EosFlushSeekTest();
145     Test flushConfigureDrc = new ReconfigDrcTest();
146 
147     Test[] allTests = {
148         adaptiveEarlyEos,
149         adaptiveEosFlushSeek,
150         adaptiveSkipAhead,
151         adaptiveSkipBack,
152         adaptiveReconfigDrc,
153         adaptiveSmallReconfigDrc,
154         adaptiveDrc,
155         adaptiveSmallDrc,
156         earlyEos,
157         eosFlushSeek,
158         flushConfigureDrc,
159     };
160 
161     /* helpers to run sets of tests */
runEOS()162     public void runEOS() { ex(AllCodecs(), new Test[] {
163         adaptiveEarlyEos,
164         adaptiveEosFlushSeek,
165         adaptiveReconfigDrc,
166         adaptiveSmallReconfigDrc,
167         earlyEos,
168         eosFlushSeek,
169         flushConfigureDrc,
170     }); }
171 
runAll()172     public void runAll() { ex(AllCodecs(), allTests); }
runSW()173     public void runSW()  { ex(SWCodecs(),  allTests); }
runHW()174     public void runHW()  { ex(HWCodecs(),  allTests); }
175 
sanityAll()176     public void sanityAll() { sanity = true; try { runAll(); } finally { sanity = false; } }
sanitySW()177     public void sanitySW()  { sanity = true; try { runSW();  } finally { sanity = false; } }
sanityHW()178     public void sanityHW()  { sanity = true; try { runHW();  } finally { sanity = false; } }
179 
runH264()180     public void runH264()  { ex(H264(),  allTests); }
runHEVC()181     public void runHEVC()  { ex(HEVC(),  allTests); }
runVP8()182     public void runVP8()   { ex(VP8(),   allTests); }
runVP9()183     public void runVP9()   { ex(VP9(),   allTests); }
runMpeg4()184     public void runMpeg4() { ex(Mpeg4(), allTests); }
runH263()185     public void runH263()  { ex(H263(),  allTests); }
186 
onlyH264HW()187     public void onlyH264HW()  { ex(H264(HW),  allTests); }
onlyHEVCHW()188     public void onlyHEVCHW()  { ex(HEVC(HW),  allTests); }
onlyVP8HW()189     public void onlyVP8HW()   { ex(VP8(HW),   allTests); }
onlyVP9HW()190     public void onlyVP9HW()   { ex(VP9(HW),   allTests); }
onlyMpeg4HW()191     public void onlyMpeg4HW() { ex(Mpeg4(HW), allTests); }
onlyH263HW()192     public void onlyH263HW()  { ex(H263(HW),  allTests); }
193 
onlyH264SW()194     public void onlyH264SW()  { ex(H264(SW),  allTests); }
onlyHEVCSW()195     public void onlyHEVCSW()  { ex(HEVC(SW),  allTests); }
onlyVP8SW()196     public void onlyVP8SW()   { ex(VP8(SW),   allTests); }
onlyVP9SW()197     public void onlyVP9SW()   { ex(VP9(SW),   allTests); }
onlyMpeg4SW()198     public void onlyMpeg4SW() { ex(Mpeg4(SW), allTests); }
onlyH263SW()199     public void onlyH263SW()  { ex(H263(SW),  allTests); }
200 
bytebuffer()201     public void bytebuffer() { ex(H264(SW), new EarlyEosTest().byteBuffer()); }
texture()202     public void texture() { ex(H264(HW), new EarlyEosTest().texture()); }
203 
204     /* inidividual tests */
testH264_adaptiveEarlyEos()205     public void testH264_adaptiveEarlyEos()  { ex(H264(),  adaptiveEarlyEos); }
testHEVC_adaptiveEarlyEos()206     public void testHEVC_adaptiveEarlyEos()  { ex(HEVC(),  adaptiveEarlyEos); }
testVP8_adaptiveEarlyEos()207     public void testVP8_adaptiveEarlyEos()   { ex(VP8(),   adaptiveEarlyEos); }
testVP9_adaptiveEarlyEos()208     public void testVP9_adaptiveEarlyEos()   { ex(VP9(),   adaptiveEarlyEos); }
testMpeg4_adaptiveEarlyEos()209     public void testMpeg4_adaptiveEarlyEos() { ex(Mpeg4(), adaptiveEarlyEos); }
testH263_adaptiveEarlyEos()210     public void testH263_adaptiveEarlyEos()  { ex(H263(),  adaptiveEarlyEos); }
211 
testH264_adaptiveEosFlushSeek()212     public void testH264_adaptiveEosFlushSeek()  { ex(H264(),  adaptiveEosFlushSeek); }
testHEVC_adaptiveEosFlushSeek()213     public void testHEVC_adaptiveEosFlushSeek()  { ex(HEVC(),  adaptiveEosFlushSeek); }
testVP8_adaptiveEosFlushSeek()214     public void testVP8_adaptiveEosFlushSeek()   { ex(VP8(),   adaptiveEosFlushSeek); }
testVP9_adaptiveEosFlushSeek()215     public void testVP9_adaptiveEosFlushSeek()   { ex(VP9(),   adaptiveEosFlushSeek); }
testMpeg4_adaptiveEosFlushSeek()216     public void testMpeg4_adaptiveEosFlushSeek() { ex(Mpeg4(), adaptiveEosFlushSeek); }
testH263_adaptiveEosFlushSeek()217     public void testH263_adaptiveEosFlushSeek()  { ex(H263(),  adaptiveEosFlushSeek); }
218 
testH264_adaptiveSkipAhead()219     public void testH264_adaptiveSkipAhead()  { ex(H264(),  adaptiveSkipAhead); }
testHEVC_adaptiveSkipAhead()220     public void testHEVC_adaptiveSkipAhead()  { ex(HEVC(),  adaptiveSkipAhead); }
testVP8_adaptiveSkipAhead()221     public void testVP8_adaptiveSkipAhead()   { ex(VP8(),   adaptiveSkipAhead); }
testVP9_adaptiveSkipAhead()222     public void testVP9_adaptiveSkipAhead()   { ex(VP9(),   adaptiveSkipAhead); }
testMpeg4_adaptiveSkipAhead()223     public void testMpeg4_adaptiveSkipAhead() { ex(Mpeg4(), adaptiveSkipAhead); }
testH263_adaptiveSkipAhead()224     public void testH263_adaptiveSkipAhead()  { ex(H263(),  adaptiveSkipAhead); }
225 
testH264_adaptiveSkipBack()226     public void testH264_adaptiveSkipBack()  { ex(H264(),  adaptiveSkipBack); }
testHEVC_adaptiveSkipBack()227     public void testHEVC_adaptiveSkipBack()  { ex(HEVC(),  adaptiveSkipBack); }
testVP8_adaptiveSkipBack()228     public void testVP8_adaptiveSkipBack()   { ex(VP8(),   adaptiveSkipBack); }
testVP9_adaptiveSkipBack()229     public void testVP9_adaptiveSkipBack()   { ex(VP9(),   adaptiveSkipBack); }
testMpeg4_adaptiveSkipBack()230     public void testMpeg4_adaptiveSkipBack() { ex(Mpeg4(), adaptiveSkipBack); }
testH263_adaptiveSkipBack()231     public void testH263_adaptiveSkipBack()  { ex(H263(),  adaptiveSkipBack); }
232 
testH264_adaptiveReconfigDrc()233     public void testH264_adaptiveReconfigDrc()  { ex(H264(),  adaptiveReconfigDrc); }
testHEVC_adaptiveReconfigDrc()234     public void testHEVC_adaptiveReconfigDrc()  { ex(HEVC(),  adaptiveReconfigDrc); }
testVP8_adaptiveReconfigDrc()235     public void testVP8_adaptiveReconfigDrc()   { ex(VP8(),   adaptiveReconfigDrc); }
testVP9_adaptiveReconfigDrc()236     public void testVP9_adaptiveReconfigDrc()   { ex(VP9(),   adaptiveReconfigDrc); }
testMpeg4_adaptiveReconfigDrc()237     public void testMpeg4_adaptiveReconfigDrc() { ex(Mpeg4(), adaptiveReconfigDrc); }
testH263_adaptiveReconfigDrc()238     public void testH263_adaptiveReconfigDrc()  { ex(H263(),  adaptiveReconfigDrc); }
239 
testH264_adaptiveSmallReconfigDrc()240     public void testH264_adaptiveSmallReconfigDrc()  { ex(H264(),  adaptiveSmallReconfigDrc); }
testHEVC_adaptiveSmallReconfigDrc()241     public void testHEVC_adaptiveSmallReconfigDrc()  { ex(HEVC(),  adaptiveSmallReconfigDrc); }
testVP8_adaptiveSmallReconfigDrc()242     public void testVP8_adaptiveSmallReconfigDrc()   { ex(VP8(),   adaptiveSmallReconfigDrc); }
testVP9_adaptiveSmallReconfigDrc()243     public void testVP9_adaptiveSmallReconfigDrc()   { ex(VP9(),   adaptiveSmallReconfigDrc); }
testMpeg4_adaptiveSmallReconfigDrc()244     public void testMpeg4_adaptiveSmallReconfigDrc() { ex(Mpeg4(), adaptiveSmallReconfigDrc); }
testH263_adaptiveSmallReconfigDrc()245     public void testH263_adaptiveSmallReconfigDrc()  { ex(H263(),  adaptiveSmallReconfigDrc); }
246 
testH264_adaptiveDrc()247     public void testH264_adaptiveDrc() { ex(H264(), adaptiveDrc); }
testHEVC_adaptiveDrc()248     public void testHEVC_adaptiveDrc() { ex(HEVC(), adaptiveDrc); }
testVP8_adaptiveDrc()249     public void testVP8_adaptiveDrc()  { ex(VP8(),  adaptiveDrc); }
testVP9_adaptiveDrc()250     public void testVP9_adaptiveDrc()  { ex(VP9(),  adaptiveDrc); }
testMpeg4_adaptiveDrc()251     public void testMpeg4_adaptiveDrc() { ex(Mpeg4(), adaptiveDrc); }
testH263_adaptiveDrc()252     public void testH263_adaptiveDrc() { ex(H263(), adaptiveDrc); }
253 
testH264_adaptiveDrcEarlyEos()254     public void testH264_adaptiveDrcEarlyEos() { ex(H264(), new AdaptiveDrcEarlyEosTest()); }
testHEVC_adaptiveDrcEarlyEos()255     public void testHEVC_adaptiveDrcEarlyEos() { ex(HEVC(), new AdaptiveDrcEarlyEosTest()); }
testVP8_adaptiveDrcEarlyEos()256     public void testVP8_adaptiveDrcEarlyEos()  { ex(VP8(),  new AdaptiveDrcEarlyEosTest()); }
testVP9_adaptiveDrcEarlyEos()257     public void testVP9_adaptiveDrcEarlyEos()  { ex(VP9(),  new AdaptiveDrcEarlyEosTest()); }
258 
testH264_adaptiveSmallDrc()259     public void testH264_adaptiveSmallDrc()  { ex(H264(),  adaptiveSmallDrc); }
testHEVC_adaptiveSmallDrc()260     public void testHEVC_adaptiveSmallDrc()  { ex(HEVC(),  adaptiveSmallDrc); }
testVP8_adaptiveSmallDrc()261     public void testVP8_adaptiveSmallDrc()   { ex(VP8(),   adaptiveSmallDrc); }
testVP9_adaptiveSmallDrc()262     public void testVP9_adaptiveSmallDrc()   { ex(VP9(),   adaptiveSmallDrc); }
263 
testH264_earlyEos()264     public void testH264_earlyEos()  { ex(H264(),  earlyEos); }
testHEVC_earlyEos()265     public void testHEVC_earlyEos()  { ex(HEVC(),  earlyEos); }
testVP8_earlyEos()266     public void testVP8_earlyEos()   { ex(VP8(),   earlyEos); }
testVP9_earlyEos()267     public void testVP9_earlyEos()   { ex(VP9(),   earlyEos); }
testMpeg4_earlyEos()268     public void testMpeg4_earlyEos() { ex(Mpeg4(), earlyEos); }
testH263_earlyEos()269     public void testH263_earlyEos()  { ex(H263(),  earlyEos); }
270 
testH264_eosFlushSeek()271     public void testH264_eosFlushSeek()  { ex(H264(),  eosFlushSeek); }
testHEVC_eosFlushSeek()272     public void testHEVC_eosFlushSeek()  { ex(HEVC(),  eosFlushSeek); }
testVP8_eosFlushSeek()273     public void testVP8_eosFlushSeek()   { ex(VP8(),   eosFlushSeek); }
testVP9_eosFlushSeek()274     public void testVP9_eosFlushSeek()   { ex(VP9(),   eosFlushSeek); }
testMpeg4_eosFlushSeek()275     public void testMpeg4_eosFlushSeek() { ex(Mpeg4(), eosFlushSeek); }
testH263_eosFlushSeek()276     public void testH263_eosFlushSeek()  { ex(H263(),  eosFlushSeek); }
277 
testH264_flushConfigureDrc()278     public void testH264_flushConfigureDrc()  { ex(H264(),  flushConfigureDrc); }
testHEVC_flushConfigureDrc()279     public void testHEVC_flushConfigureDrc()  { ex(HEVC(),  flushConfigureDrc); }
testVP8_flushConfigureDrc()280     public void testVP8_flushConfigureDrc()   { ex(VP8(),   flushConfigureDrc); }
testVP9_flushConfigureDrc()281     public void testVP9_flushConfigureDrc()   { ex(VP9(),   flushConfigureDrc); }
testMpeg4_flushConfigureDrc()282     public void testMpeg4_flushConfigureDrc() { ex(Mpeg4(), flushConfigureDrc); }
testH263_flushConfigureDrc()283     public void testH263_flushConfigureDrc()  { ex(H263(),  flushConfigureDrc); }
284 
285     /* only use unchecked exceptions to allow brief test methods */
ex(Iterable<Codec> codecList, Test test)286     private void ex(Iterable<Codec> codecList, Test test) {
287         ex(codecList, new Test[] { test } );
288     }
289 
ex(Iterable<Codec> codecList, Test[] testList)290     private void ex(Iterable<Codec> codecList, Test[] testList) {
291         if (codecList == null) {
292             Log.i(TAG, "CodecList was empty. Skipping test.");
293             return;
294         }
295 
296         TestList tests = new TestList();
297         for (Codec c : codecList) {
298             for (Test test : testList) {
299                 if (test.isValid(c)) {
300                     test.addTests(tests, c);
301                 }
302             }
303         }
304         try {
305             tests.run();
306         } catch (Throwable t) {
307             throw new RuntimeException(t);
308         }
309     }
310 
311     /* need an inner class to have access to the activity */
312     abstract class ActivityTest extends Test {
313         TestSurface mNullSurface = new ActivitySurface(null);
getSurface()314         protected TestSurface getSurface() {
315             if (mUseSurface) {
316                 return new ActivitySurface(getActivity().getSurfaceHolder().getSurface());
317             } else if (mUseSurfaceTexture) {
318                 return new DecoderSurface(1280, 720, mCRC);
319             }
320             return mNullSurface;
321         }
322     }
323 
324     static final int NUM_FRAMES = 50;
325 
326     /**
327      * Queue some frames with an EOS on the last one.  Test that we have decoded as many
328      * frames as we queued.  This tests the EOS handling of the codec to see if all queued
329      * (and out-of-order) frames are actually decoded and returned.
330      *
331      * Also test flushing prior to sending CSD, and immediately after sending CSD.
332      */
333     class EarlyEosTest extends ActivityTest {
334         // using bitfields to create a directed state graph that terminates at FLUSH_NEVER
335         static final int FLUSH_BEFORE_CSD = (1 << 1);
336         static final int FLUSH_AFTER_CSD = (1 << 0);
337         static final int FLUSH_NEVER = 0;
338 
isValid(Codec c)339         public boolean isValid(Codec c) {
340             return getFormat(c) != null;
341         }
addTests(TestList tests, final Codec c)342         public void addTests(TestList tests, final Codec c) {
343             int state = FLUSH_BEFORE_CSD;
344             for (int i = NUM_FRAMES / 2; i > 0; --i, state >>= 1) {
345                 final int queuedFrames = i;
346                 final int earlyFlushMode = state;
347                 tests.add(
348                     new Step("testing early EOS at " + queuedFrames, this, c) {
349                         public void run() {
350                             Decoder decoder = new Decoder(c.name);
351                             try {
352                                 MediaFormat fmt = stepFormat();
353                                 MediaFormat configFmt = fmt;
354                                 if (earlyFlushMode == FLUSH_BEFORE_CSD) {
355                                     // flush before CSD requires not submitting CSD with configure
356                                     configFmt = Media.removeCSD(fmt);
357                                 }
358                                 decoder.configureAndStart(configFmt, stepSurface());
359                                 if (earlyFlushMode != FLUSH_NEVER) {
360                                     decoder.flush();
361                                     // We must always queue CSD after a flush that is potentially
362                                     // before we receive output format has changed.  This should
363                                     // work even after we receive the format change.
364                                     decoder.queueCSD(fmt);
365                                 }
366                                 int decodedFrames = -decoder.queueInputBufferRange(
367                                         stepMedia(),
368                                         0 /* startFrame */,
369                                         queuedFrames,
370                                         true /* sendEos */,
371                                         true /* waitForEos */);
372                                 if (decodedFrames <= 0) {
373                                     Log.w(TAG, "Did not receive EOS -- negating frame count");
374                                 }
375                                 decoder.stop();
376                                 if (decodedFrames != queuedFrames) {
377                                     warn("decoded " + decodedFrames + " frames out of " +
378                                             queuedFrames + " queued");
379                                 }
380                             } finally {
381                                 warn(decoder.getWarnings());
382                                 decoder.releaseQuietly();
383                             }
384                         }
385                     });
386                 if (sanity) {
387                     i >>= 1;
388                 }
389             }
390         }
391     };
392 
393     /**
394      * Similar to EarlyEosTest, but we keep the component alive and running in between the steps.
395      * This is how seeking should be done if all frames must be outputted.  This also tests that
396      * PTS can be repeated after flush.
397      */
398     class EosFlushSeekTest extends ActivityTest {
399         Decoder mDecoder; // test state
isValid(Codec c)400         public boolean isValid(Codec c) {
401             return getFormat(c) != null;
402         }
addTests(TestList tests, final Codec c)403         public void addTests(TestList tests, final Codec c) {
404             tests.add(
405                 new Step("testing EOS & flush before seek - init", this, c) {
406                     public void run() {
407                         mDecoder = new Decoder(c.name);
408                         mDecoder.configureAndStart(stepFormat(), stepSurface());
409                     }});
410 
411             for (int i = NUM_FRAMES; i > 0; i--) {
412                 final int queuedFrames = i;
413                 tests.add(
414                     new Step("testing EOS & flush before seeking after " + queuedFrames +
415                             " frames", this, c) {
416                         public void run() {
417                             int decodedFrames = -mDecoder.queueInputBufferRange(
418                                     stepMedia(),
419                                     0 /* startFrame */,
420                                     queuedFrames,
421                                     true /* sendEos */,
422                                     true /* waitForEos */);
423                             if (decodedFrames != queuedFrames) {
424                                 warn("decoded " + decodedFrames + " frames out of " +
425                                         queuedFrames + " queued");
426                             }
427                             warn(mDecoder.getWarnings());
428                             mDecoder.clearWarnings();
429                             mDecoder.flush();
430                         }
431                     });
432                 if (sanity) {
433                     i >>= 1;
434                 }
435             }
436 
437             tests.add(
438                 new Step("testing EOS & flush before seek - finally", this, c) {
439                     public void run() {
440                         try {
441                             mDecoder.stop();
442                         } finally {
443                             mDecoder.release();
444                         }
445                     }});
446         }
447     };
448 
449     /**
450      * Similar to EosFlushSeekTest, but we change the media size between the steps.
451      * This is how dynamic resolution switching can be done on codecs that do not support
452      * adaptive playback.
453      */
454     class ReconfigDrcTest extends ActivityTest {
455         Decoder mDecoder;  // test state
isValid(Codec c)456         public boolean isValid(Codec c) {
457             return getFormat(c) != null && c.mediaList.length > 1;
458         }
addTests(TestList tests, final Codec c)459         public void addTests(TestList tests, final Codec c) {
460             tests.add(
461                 new Step("testing DRC with reconfigure - init", this, c) {
462                     public void run() {
463                         mDecoder = new Decoder(c.name);
464                     }});
465 
466             for (int i = NUM_FRAMES, ix = 0; i > 0; i--, ix++) {
467                 final int queuedFrames = i;
468                 final int mediaIx = ix % c.mediaList.length;
469                 tests.add(
470                     new Step("testing DRC with reconfigure after " + queuedFrames + " frames",
471                             this, c, mediaIx) {
472                         public void run() {
473                             try {
474                                 mDecoder.configureAndStart(stepFormat(), stepSurface());
475                                 int decodedFrames = -mDecoder.queueInputBufferRange(
476                                         stepMedia(),
477                                         0 /* startFrame */,
478                                         queuedFrames,
479                                         true /* sendEos */,
480                                         true /* waitForEos */);
481                                 if (decodedFrames != queuedFrames) {
482                                     warn("decoded " + decodedFrames + " frames out of " +
483                                             queuedFrames + " queued");
484                                 }
485                                 warn(mDecoder.getWarnings());
486                                 mDecoder.clearWarnings();
487                                 mDecoder.flush();
488                             } finally {
489                                 mDecoder.stop();
490                             }
491                         }
492                     });
493                 if (sanity) {
494                     i >>= 1;
495                 }
496             }
497             tests.add(
498                 new Step("testing DRC with reconfigure - finally", this, c) {
499                     public void run() {
500                         mDecoder.release();
501                     }});
502         }
503     };
504 
505     /* ADAPTIVE-ONLY TESTS - only run on codecs that support adaptive playback */
506 
507     /**
508      * Test dynamic resolution change support.  Queue various sized media segments
509      * with different resolutions, verify that all queued frames were decoded.  Here
510      * PTS will grow between segments.
511      */
512     class AdaptiveDrcTest extends ActivityTest {
513         Decoder mDecoder;
514         int mAdjustTimeUs;
515         int mDecodedFrames;
516         int mQueuedFrames;
517 
AdaptiveDrcTest()518         public AdaptiveDrcTest() {
519             super();
520             adaptive();
521         }
isValid(Codec c)522         public boolean isValid(Codec c) {
523             checkAdaptiveFormat();
524             return c.adaptive && c.mediaList.length > 1;
525         }
addTests(TestList tests, final Codec c)526         public void addTests(TestList tests, final Codec c) {
527             tests.add(
528                 new Step("testing DRC with no reconfigure - init", this, c) {
529                     public void run() throws Throwable {
530                         // FIXME wait 2 seconds to allow system to free up previous codecs
531                         try {
532                             Thread.sleep(2000);
533                         } catch (InterruptedException e) {}
534                         mDecoder = new Decoder(c.name);
535                         mDecoder.configureAndStart(stepFormat(), stepSurface());
536                         mAdjustTimeUs = 0;
537                         mDecodedFrames = 0;
538                         mQueuedFrames = 0;
539                     }});
540 
541             for (int i = NUM_FRAMES, ix = 0; i >= MIN_FRAMES_BEFORE_DRC; i--, ix++) {
542                 final int mediaIx = ix % c.mediaList.length;
543                 final int segmentSize = i;
544                 tests.add(
545                     new Step("testing DRC with no reconfigure after " + i + " frames",
546                             this, c, mediaIx) {
547                         public void run() throws Throwable {
548                             mQueuedFrames += segmentSize;
549                             boolean lastSequence = segmentSize == MIN_FRAMES_BEFORE_DRC;
550                             if (sanity) {
551                                 lastSequence = (segmentSize >> 1) <= MIN_FRAMES_BEFORE_DRC;
552                             }
553                             int frames = mDecoder.queueInputBufferRange(
554                                     stepMedia(),
555                                     0 /* startFrame */,
556                                     segmentSize,
557                                     lastSequence /* sendEos */,
558                                     lastSequence /* expectEos */,
559                                     mAdjustTimeUs);
560                             if (lastSequence && frames >= 0) {
561                                 warn("did not receive EOS, received " + frames + " frames");
562                             } else if (!lastSequence && frames < 0) {
563                                 warn("received EOS, received " + (-frames) + " frames");
564                             }
565                             warn(mDecoder.getWarnings());
566                             mDecoder.clearWarnings();
567 
568                             mDecodedFrames += Math.abs(frames);
569                             mAdjustTimeUs += 1 + stepMedia().getTimestampRangeValue(
570                                     0, segmentSize, Media.RANGE_END);
571                         }});
572                 if (sanity) {
573                     i >>= 1;
574                 }
575             }
576             tests.add(
577                 new Step("testing DRC with no reconfigure - init", this, c) {
578                     public void run() throws Throwable {
579                         if (mDecodedFrames != mQueuedFrames) {
580                             warn("decoded " + mDecodedFrames + " frames out of " +
581                                     mQueuedFrames + " queued");
582                         }
583                         try {
584                             mDecoder.stop();
585                         } finally {
586                             mDecoder.release();
587                         }
588                     }
589                 });
590         }
591     };
592 
593     /**
594      * Queue EOS shortly after a dynamic resolution change.  Test that all frames were
595      * decoded.
596      */
597     class AdaptiveDrcEarlyEosTest extends ActivityTest {
AdaptiveDrcEarlyEosTest()598         public AdaptiveDrcEarlyEosTest() {
599             super();
600             adaptive();
601         }
isValid(Codec c)602         public boolean isValid(Codec c) {
603             checkAdaptiveFormat();
604             return c.adaptive && c.mediaList.length > 1;
605         }
testStep(final Codec c, final int framesBeforeDrc, final int framesBeforeEos)606         public Step testStep(final Codec c, final int framesBeforeDrc,
607                 final int framesBeforeEos) {
608             return new Step("testing DRC with no reconfigure after " + framesBeforeDrc +
609                     " frames and subsequent EOS after " + framesBeforeEos + " frames",
610                     this, c) {
611                 public void run() throws Throwable {
612                     Decoder decoder = new Decoder(c.name);
613                     int queuedFrames = framesBeforeDrc + framesBeforeEos;
614                     int framesA = 0;
615                     int framesB = 0;
616                     try {
617                         decoder.configureAndStart(stepFormat(), stepSurface());
618                         Media media = c.mediaList[0];
619 
620                         framesA = decoder.queueInputBufferRange(
621                                 media,
622                                 0 /* startFrame */,
623                                 framesBeforeDrc,
624                                 false /* sendEos */,
625                                 false /* expectEos */);
626                         if (framesA < 0) {
627                             warn("received unexpected EOS, received " + (-framesA) + " frames");
628                         }
629                         long adjustTimeUs = 1 + media.getTimestampRangeValue(
630                                 0, framesBeforeDrc, Media.RANGE_END);
631 
632                         media = c.mediaList[1];
633                         framesB = decoder.queueInputBufferRange(
634                                 media,
635                                 0 /* startFrame */,
636                                 framesBeforeEos,
637                                 true /* sendEos */,
638                                 true /* expectEos */,
639                                 adjustTimeUs);
640                         if (framesB >= 0) {
641                             warn("did not receive EOS, received " + (-framesB) + " frames");
642                         }
643                         decoder.stop();
644                         warn(decoder.getWarnings());
645                     } finally {
646                         int decodedFrames = Math.abs(framesA) + Math.abs(framesB);
647                         if (decodedFrames != queuedFrames) {
648                             warn("decoded " + decodedFrames + " frames out of " + queuedFrames +
649                                     " queued");
650                         }
651                         decoder.release();
652                     }
653                 }
654             };
655         }
addTests(TestList tests, Codec c)656         public void addTests(TestList tests, Codec c) {
657             for (int drcFrame = 6; drcFrame >= MIN_FRAMES_BEFORE_DRC; drcFrame--) {
658                 for (int eosFrame = 6; eosFrame >= 1; eosFrame--) {
659                     tests.add(testStep(c, drcFrame, eosFrame));
660                 }
661             }
662         }
663     };
664 
665     /**
666      * Similar to AdaptiveDrcTest, but tests that PTS can change at adaptive boundaries both
667      * forward and backward without the need to flush.
668      */
669     class AdaptiveSkipTest extends ActivityTest {
670         boolean forward;
671         public AdaptiveSkipTest(boolean fwd) {
672             forward = fwd;
673             adaptive();
674         }
675         public boolean isValid(Codec c) {
676             checkAdaptiveFormat();
677             return c.adaptive;
678         }
679         Decoder mDecoder;
680         int mAdjustTimeUs = 0;
681         int mDecodedFrames = 0;
682         int mQueuedFrames = 0;
683         public void addTests(TestList tests, final Codec c) {
684             tests.add(
685                 new Step("testing flushless skipping - init", this, c) {
686                     public void run() throws Throwable {
687                         mDecoder = new Decoder(c.name);
688                         mDecoder.configureAndStart(stepFormat(), stepSurface());
689                         mAdjustTimeUs = 0;
690                         mDecodedFrames = 0;
691                         mQueuedFrames = 0;
692                     }});
693 
694             for (int i = 2, ix = 0; i <= NUM_FRAMES; i++, ix++) {
695                 final int mediaIx = ix % c.mediaList.length;
696                 final int segmentSize = i;
697                 final boolean lastSequence;
698                 if (sanity) {
699                     lastSequence = (segmentSize << 1) + 1 > NUM_FRAMES;
700                 } else {
701                     lastSequence = segmentSize >= NUM_FRAMES;
702                 }
703                 tests.add(
704                     new Step("testing flushless skipping " + (forward ? "forward" : "backward") +
705                             " after " + i + " frames", this, c) {
706                         public void run() throws Throwable {
707                             int frames = mDecoder.queueInputBufferRange(
708                                 stepMedia(),
709                                 0 /* startFrame */,
710                                 segmentSize,
711                                 lastSequence /* sendEos */,
712                                 lastSequence /* expectEos */,
713                                 mAdjustTimeUs);
714                             if (lastSequence && frames >= 0) {
715                                 warn("did not receive EOS, received " + frames + " frames");
716                             } else if (!lastSequence && frames < 0) {
717                                 warn("received unexpected EOS, received " + (-frames) + " frames");
718                             }
719                             warn(mDecoder.getWarnings());
720                             mDecoder.clearWarnings();
721 
722                             mQueuedFrames += segmentSize;
723                             mDecodedFrames += Math.abs(frames);
724                             if (forward) {
725                                 mAdjustTimeUs += 10000000 + stepMedia().getTimestampRangeValue(
726                                         0, segmentSize, Media.RANGE_DURATION);
727                             }
728                         }});
729                 if (sanity) {
730                     i <<= 1;
731                 }
732             }
733 
734             tests.add(
735                 new Step("testing flushless skipping - finally", this, c) {
736                     public void run() throws Throwable {
737                         if (mDecodedFrames != mQueuedFrames) {
738                             warn("decoded " + mDecodedFrames + " frames out of " + mQueuedFrames +
739                                     " queued");
740                         }
741                         try {
742                             mDecoder.stop();
743                         } finally {
744                             mDecoder.release();
745                         }
746                     }});
747         }
748     };
749 
750     // not yet used
751     static long checksum(ByteBuffer buf, int size, CRC32 crc) {
752         assertTrue(size >= 0);
753         assertTrue(size <= buf.capacity());
754         crc.reset();
755         if (buf.hasArray()) {
756             crc.update(buf.array(), buf.arrayOffset(), size);
757         } else {
758            int pos = buf.position();
759            buf.rewind();
760            final int rdsize = Math.min(4096, size);
761            byte bb[] = new byte[rdsize];
762            int chk;
763            for (int i = 0; i < size; i += chk) {
764                 chk = Math.min(rdsize, size - i);
765                 buf.get(bb, 0, chk);
766                 crc.update(bb, 0, chk);
767             }
768             buf.position(pos);
769         }
770         return crc.getValue();
771     }
772 
773     CRC32 mCRC;
774 
775     @Override
776     protected void setUp() throws Exception {
777         super.setUp();
778         mCRC = new CRC32();
779     }
780 
781     /* ====================================================================== */
782     /*                              UTILITY FUNCTIONS                         */
783     /* ====================================================================== */
784     public static String collectionString(Collection<?> c) {
785         StringBuilder res = new StringBuilder("[");
786         boolean subsequent = false;
787         for (Object o: c) {
788             if (subsequent) {
789                 res.append(", ");
790             }
791             res.append(o);
792             subsequent = true;
793         }
794         return res.append("]").toString();
795     }
796 
797     static String byteBufferToString(ByteBuffer buf, int start, int len) {
798         int oldPosition = buf.position();
799         buf.position(start);
800         int strlen = 2; // {}
801         boolean ellipsis = len < buf.limit();
802         if (ellipsis) {
803             strlen += 3; // ...
804         } else {
805             len = buf.limit();
806         }
807         strlen += 3 * len - (len > 0 ? 1 : 0); // XX,XX
808         char[] res = new char[strlen];
809         res[0] = '{';
810         res[strlen - 1] = '}';
811         if (ellipsis) {
812             res[strlen - 2] = res[strlen - 3] = res[strlen - 4] = '.';
813         }
814         for (int i = 1; i < len; i++) {
815             res[i * 3] = ',';
816         }
817         for (int i = 0; i < len; i++) {
818             byte b = buf.get();
819             int d = (b >> 4) & 15;
820             res[i * 3 + 1] = (char)(d + (d > 9 ? 'a' - 10 : '0'));
821             d = (b & 15);
822             res[i * 3 + 2] = (char)(d + (d > 9 ? 'a' - 10 : '0'));
823         }
824         buf.position(oldPosition);
825         return new String(res);
826     }
827 
828     static <E> Iterable<E> chain(Iterable<E> ... iterables) {
829         /* simple chainer using ArrayList */
830         ArrayList<E> items = new ArrayList<E>();
831         for (Iterable<E> it: iterables) {
832             for (E el: it) {
833                 items.add(el);
834             }
835         }
836         return items;
837     }
838 
839     class Decoder implements MediaCodec.OnFrameRenderedListener {
840         private final static String TAG = "AdaptiveDecoder";
841         final long kTimeOutUs = 5000;
842         final long kCSDTimeOutUs = 1000000;
843         MediaCodec mCodec;
844         ByteBuffer[] mInputBuffers;
845         ByteBuffer[] mOutputBuffers;
846         TestSurface mSurface;
847         boolean mDoChecksum;
848         boolean mQueuedEos;
849         ArrayList<Long> mTimeStamps;
850         ArrayList<String> mWarnings;
851         Vector<Long> mRenderedTimeStamps; // using Vector as it is implicitly synchronized
852         long mLastRenderNanoTime;
853         int mFramesNotifiedRendered;
854 
855         public Decoder(String codecName) {
856             MediaCodec codec = null;
857             try {
858                 codec = MediaCodec.createByCodecName(codecName);
859             } catch (Exception e) {
860                 throw new RuntimeException("couldn't create codec " + codecName, e);
861             }
862             Log.i(TAG, "using codec: " + codec.getName());
863             mCodec = codec;
864             mDoChecksum = false;
865             mQueuedEos = false;
866             mTimeStamps = new ArrayList<Long>();
867             mWarnings = new ArrayList<String>();
868             mRenderedTimeStamps = new Vector<Long>();
869             mLastRenderNanoTime = System.nanoTime();
870             mFramesNotifiedRendered = 0;
871 
872             codec.setOnFrameRenderedListener(this, null);
873         }
874 
875         public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) {
876             final long NSECS_IN_1SEC = 1000000000;
877             if (!mRenderedTimeStamps.remove(presentationTimeUs)) {
878                 warn("invalid timestamp " + presentationTimeUs + ", queued " +
879                         collectionString(mRenderedTimeStamps));
880             }
881             assert nanoTime > mLastRenderNanoTime;
882             mLastRenderNanoTime = nanoTime;
883             ++mFramesNotifiedRendered;
884             assert nanoTime > System.nanoTime() - NSECS_IN_1SEC;
885         }
886 
887         public String getName() {
888             return mCodec.getName();
889         }
890 
891         public Iterable<String> getWarnings() {
892             return mWarnings;
893         }
894 
895         private void warn(String warning) {
896             mWarnings.add(warning);
897             Log.w(TAG, warning);
898         }
899 
900         public void clearWarnings() {
901             mWarnings.clear();
902         }
903 
904         public void configureAndStart(MediaFormat format, TestSurface surface) {
905             mSurface = surface;
906             Log.i(TAG, "configure(" + format + ", " + mSurface.getSurface() + ")");
907             mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */);
908             Log.i(TAG, "start");
909             mCodec.start();
910 
911             // inject some minimal setOutputSurface test
912             // TODO: change this test to also change the surface midstream
913             try {
914                 mCodec.setOutputSurface(null);
915                 fail("should not be able to set surface to NULL");
916             } catch (IllegalArgumentException e) {}
917             mCodec.setOutputSurface(mSurface.getSurface());
918 
919             mInputBuffers = mCodec.getInputBuffers();
920             mOutputBuffers = mCodec.getOutputBuffers();
921             Log.i(TAG, "configured " + mInputBuffers.length + " input[" +
922                   mInputBuffers[0].capacity() + "] and " +
923                   mOutputBuffers.length + "output[" +
924                   (mOutputBuffers[0] == null ? null : mOutputBuffers[0].capacity()) + "]");
925             mQueuedEos = false;
926             mRenderedTimeStamps.clear();
927             mLastRenderNanoTime = System.nanoTime();
928             mFramesNotifiedRendered = 0;
929         }
930 
931         public void stop() {
932             Log.i(TAG, "stop");
933             mCodec.stop();
934             // if we have queued 32 frames or more, at least one should have been notified
935             // to have rendered.
936             if (mRenderedTimeStamps.size() > 32 && mFramesNotifiedRendered == 0) {
937                 fail("rendered " + mRenderedTimeStamps.size() +
938                         " frames, but none have been notified.");
939             }
940         }
941 
942         public void flush() {
943             Log.i(TAG, "flush");
944             mCodec.flush();
945             mQueuedEos = false;
946             mTimeStamps.clear();
947         }
948 
949         public String dequeueAndReleaseOutputBuffer(MediaCodec.BufferInfo info) {
950             int ix = mCodec.dequeueOutputBuffer(info, kTimeOutUs);
951             if (ix == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
952                 mOutputBuffers = mCodec.getOutputBuffers();
953                 Log.d(TAG, "output buffers have changed.");
954                 return null;
955             } else if (ix == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
956                 MediaFormat format = mCodec.getOutputFormat();
957                 Log.d(TAG, "output format has changed to " + format);
958                 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
959                 mDoChecksum = isRecognizedFormat(colorFormat);
960                 return null;
961             } else if (ix < 0) {
962                 Log.v(TAG, "no output");
963                 return null;
964             }
965             /* create checksum */
966             long sum = 0;
967 
968 
969             Log.v(TAG, "dequeue #" + ix + " => { [" + info.size + "] flags=" + info.flags +
970                     " @" + info.presentationTimeUs + "}");
971 
972             // we get a nonzero size for valid decoded frames
973             boolean doRender = (info.size != 0);
974             if (mSurface.getSurface() == null) {
975                 if (mDoChecksum) {
976                     sum = checksum(mOutputBuffers[ix], info.size, mCRC);
977                 }
978                 mCodec.releaseOutputBuffer(ix, doRender);
979             } else if (doRender) {
980                 // If using SurfaceTexture, as soon as we call releaseOutputBuffer, the
981                 // buffer will be forwarded to SurfaceTexture to convert to a texture.
982                 // The API doesn't guarantee that the texture will be available before
983                 // the call returns, so we need to wait for the onFrameAvailable callback
984                 // to fire.  If we don't wait, we risk dropping frames.
985                 mSurface.prepare();
986                 mCodec.releaseOutputBuffer(ix, doRender);
987                 mSurface.waitForDraw();
988                 if (mDoChecksum) {
989                     sum = mSurface.checksum();
990                 }
991             } else {
992                 mCodec.releaseOutputBuffer(ix, doRender);
993             }
994 
995             if (doRender) {
996                 mRenderedTimeStamps.add(info.presentationTimeUs);
997                 if (!mTimeStamps.remove(info.presentationTimeUs)) {
998                     warn("invalid timestamp " + info.presentationTimeUs + ", queued " +
999                             collectionString(mTimeStamps));
1000                 }
1001             }
1002 
1003             return String.format(Locale.US, "{pts=%d, flags=%x, data=0x%x}",
1004                                  info.presentationTimeUs, info.flags, sum);
1005         }
1006 
1007         /* returns true iff queued a frame */
1008         public boolean queueInputBuffer(Media media, int frameIx, boolean EOS) {
1009             return queueInputBuffer(media, frameIx, EOS, 0);
1010         }
1011 
1012         public boolean queueInputBuffer(Media media, int frameIx, boolean EOS, long adjustTimeUs) {
1013             if (mQueuedEos) {
1014                 return false;
1015             }
1016 
1017             int ix = mCodec.dequeueInputBuffer(kTimeOutUs);
1018 
1019             if (ix < 0) {
1020                 return false;
1021             }
1022 
1023             ByteBuffer buf = mInputBuffers[ix];
1024             Media.Frame frame = media.getFrame(frameIx);
1025             buf.clear();
1026 
1027             long presentationTimeUs = adjustTimeUs;
1028             int flags = 0;
1029             if (frame != null) {
1030                 buf.put((ByteBuffer)frame.buf.clear());
1031                 presentationTimeUs += frame.presentationTimeUs;
1032                 flags = frame.flags;
1033             }
1034 
1035             if (EOS) {
1036                 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
1037                 mQueuedEos = true;
1038             }
1039 
1040             mTimeStamps.add(presentationTimeUs);
1041             Log.v(TAG, "queue { [" + buf.position() + "]=" + byteBufferToString(buf, 0, 16) +
1042                     " flags=" + flags + " @" + presentationTimeUs + "} => #" + ix);
1043             mCodec.queueInputBuffer(
1044                     ix, 0 /* offset */, buf.position(), presentationTimeUs, flags);
1045             return true;
1046         }
1047 
1048         /* returns number of frames received multiplied by -1 if received EOS, 1 otherwise */
1049         public int queueInputBufferRange(
1050                 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd,
1051                 boolean waitForEos) {
1052             return queueInputBufferRange(media,frameStartIx,frameEndIx,sendEosAtEnd,waitForEos,0);
1053         }
1054 
1055         public void queueCSD(MediaFormat format) {
1056             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1057             for (int csdIx = 0; ; ++csdIx) {
1058                 ByteBuffer csdBuf = format.getByteBuffer("csd-" + csdIx);
1059                 if (csdBuf == null) {
1060                     break;
1061                 }
1062 
1063                 int ix = mCodec.dequeueInputBuffer(kCSDTimeOutUs);
1064                 if (ix < 0) {
1065                     fail("Could not dequeue input buffer for CSD #" + csdIx);
1066                     return;
1067                 }
1068 
1069                 ByteBuffer buf = mInputBuffers[ix];
1070                 buf.clear();
1071                 buf.put((ByteBuffer)csdBuf.clear());
1072                 Log.v(TAG, "queue-CSD { [" + buf.position() + "]=" +
1073                         byteBufferToString(buf, 0, 16) + "} => #" + ix);
1074                 mCodec.queueInputBuffer(
1075                         ix, 0 /* offset */, buf.position(), 0 /* timeUs */,
1076                         MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
1077             }
1078         }
1079 
1080         public int queueInputBufferRange(
1081                 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd,
1082                 boolean waitForEos, long adjustTimeUs) {
1083             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1084             int frameIx = frameStartIx;
1085             int numFramesDecoded = 0;
1086             boolean sawOutputEos = false;
1087             int deadDecoderCounter = 0;
1088             ArrayList<String> frames = new ArrayList<String>();
1089             while ((waitForEos && !sawOutputEos) || frameIx < frameEndIx) {
1090                 if (frameIx < frameEndIx) {
1091                     if (queueInputBuffer(
1092                             media,
1093                             frameIx,
1094                             sendEosAtEnd && (frameIx + 1 == frameEndIx),
1095                             adjustTimeUs)) {
1096                         frameIx++;
1097                     }
1098                 }
1099 
1100                 String buf = dequeueAndReleaseOutputBuffer(info);
1101                 if (buf != null) {
1102                     // Some decoders output a 0-sized buffer at the end. Disregard those.
1103                     if (info.size > 0) {
1104                         deadDecoderCounter = 0;
1105                         numFramesDecoded++;
1106                     }
1107 
1108                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1109                         Log.d(TAG, "saw output EOS.");
1110                         sawOutputEos = true;
1111                     }
1112                 }
1113                 if (++deadDecoderCounter >= 100) {
1114                     warn("have not received an output frame for a while");
1115                     break;
1116                 }
1117             }
1118 
1119             if (numFramesDecoded < frameEndIx - frameStartIx - 16) {
1120                 fail("Queued " + (frameEndIx - frameStartIx) + " frames but only received " +
1121                         numFramesDecoded);
1122             }
1123             return (sawOutputEos ? -1 : 1) * numFramesDecoded;
1124         }
1125 
1126         void release() {
1127             Log.i(TAG, "release");
1128             mCodec.release();
1129             mSurface.release();
1130             mInputBuffers = null;
1131             mOutputBuffers = null;
1132             mCodec = null;
1133             mSurface = null;
1134         }
1135 
1136         // don't fail on exceptions in release()
1137         void releaseQuietly() {
1138             try {
1139                 Log.i(TAG, "release");
1140                 mCodec.release();
1141             } catch (Throwable e) {
1142                 Log.e(TAG, "Exception while releasing codec", e);
1143             }
1144             mSurface.release();
1145             mInputBuffers = null;
1146             mOutputBuffers = null;
1147             mCodec = null;
1148             mSurface = null;
1149         }
1150     };
1151 
1152     /* from EncodeDecodeTest */
1153     private static boolean isRecognizedFormat(int colorFormat) {
1154         switch (colorFormat) {
1155             // these are the formats we know how to handle for this test
1156             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
1157             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
1158             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
1159             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
1160             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
1161                 return true;
1162             default:
1163                 return false;
1164         }
1165     }
1166 
1167     private int countFrames(
1168             String codecName, MediaCodecInfo codecInfo, Media media, int eosframe, TestSurface s)
1169             throws Exception {
1170         Decoder codec = new Decoder(codecName);
1171         codec.configureAndStart(media.getFormat(), s /* surface */);
1172 
1173         int numframes = codec.queueInputBufferRange(
1174                 media, 0, eosframe, true /* sendEos */, true /* waitForEos */);
1175         if (numframes >= 0) {
1176             Log.w(TAG, "Did not receive EOS");
1177         } else {
1178             numframes *= -1;
1179         }
1180 
1181         codec.stop();
1182         codec.release();
1183         return numframes;
1184     }
1185 }
1186 
1187 /* ====================================================================== */
1188 /*                             Video Media Asset                          */
1189 /* ====================================================================== */
1190 class Media {
1191     private final static String TAG = "AdaptiveMedia";
1192     private MediaFormat mFormat;
1193     private MediaFormat mAdaptiveFormat;
1194     static class Frame {
1195         long presentationTimeUs;
1196         int flags;
1197         ByteBuffer buf;
1198         public Frame(long _pts, int _flags, ByteBuffer _buf) {
1199             presentationTimeUs = _pts;
1200             flags = _flags;
1201             buf = _buf;
1202         }
1203     };
1204     private Frame[] mFrames;
1205 
1206     public Frame getFrame(int ix) {
1207         /* this works even on short sample as frame is allocated as null */
1208         if (ix >= 0 && ix < mFrames.length) {
1209             return mFrames[ix];
1210         }
1211         return null;
1212     }
1213     private Media(MediaFormat format, MediaFormat adaptiveFormat, int numFrames) {
1214         /* need separate copies of format as once we add adaptive flags to
1215            MediaFormat, we cannot remove them */
1216         mFormat = format;
1217         mAdaptiveFormat = adaptiveFormat;
1218         mFrames = new Frame[numFrames];
1219     }
1220 
1221     public MediaFormat getFormat() {
1222         return mFormat;
1223     }
1224 
1225     public static MediaFormat removeCSD(MediaFormat orig) {
1226         MediaFormat copy = MediaFormat.createVideoFormat(
1227                 orig.getString(orig.KEY_MIME),
1228                 orig.getInteger(orig.KEY_WIDTH), orig.getInteger(orig.KEY_HEIGHT));
1229         for (String k : new String[] {
1230                 orig.KEY_FRAME_RATE, orig.KEY_MAX_WIDTH, orig.KEY_MAX_HEIGHT,
1231                 orig.KEY_MAX_INPUT_SIZE
1232         }) {
1233             if (orig.containsKey(k)) {
1234                 try {
1235                     copy.setInteger(k, orig.getInteger(k));
1236                 } catch (ClassCastException e) {
1237                     try {
1238                         copy.setFloat(k, orig.getFloat(k));
1239                     } catch (ClassCastException e2) {
1240                         // Could not copy value. Don't fail here, as having non-standard
1241                         // value types for defined keys is permissible by the media API
1242                         // for optional keys.
1243                     }
1244                 }
1245             }
1246         }
1247         return copy;
1248     }
1249 
1250     public MediaFormat getAdaptiveFormat(int width, int height) {
1251         mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width);
1252         mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height);
1253         return mAdaptiveFormat;
1254     }
1255 
1256     public String getMime() {
1257         return mFormat.getString(MediaFormat.KEY_MIME);
1258     }
1259 
1260     public int getWidth() {
1261         return mFormat.getInteger(MediaFormat.KEY_WIDTH);
1262     }
1263 
1264     public int getHeight() {
1265         return mFormat.getInteger(MediaFormat.KEY_HEIGHT);
1266     }
1267 
1268     public final static int RANGE_START = 0;
1269     public final static int RANGE_END = 1;
1270     public final static int RANGE_DURATION = 2;
1271 
1272     public long getTimestampRangeValue(int frameStartIx, int frameEndIx, int kind) {
1273         long min = Long.MAX_VALUE, max = Long.MIN_VALUE;
1274         for (int frameIx = frameStartIx; frameIx < frameEndIx; frameIx++) {
1275             Frame frame = getFrame(frameIx);
1276             if (frame != null) {
1277                 if (min > frame.presentationTimeUs) {
1278                     min = frame.presentationTimeUs;
1279                 }
1280                 if (max < frame.presentationTimeUs) {
1281                     max = frame.presentationTimeUs;
1282                 }
1283             }
1284         }
1285         if (kind == RANGE_START) {
1286             return min;
1287         } else if (kind == RANGE_END) {
1288             return max;
1289         } else if (kind == RANGE_DURATION) {
1290             return max - min;
1291         } else {
1292             throw new IllegalArgumentException("kind is not valid: " + kind);
1293         }
1294     }
1295 
1296     public static Media read(Context context, int video, int numFrames)
1297             throws java.io.IOException {
1298         MediaExtractor extractor = new MediaExtractor();
1299         AssetFileDescriptor testFd = context.getResources().openRawResourceFd(video);
1300         extractor.setDataSource(testFd.getFileDescriptor(), testFd.getStartOffset(),
1301                 testFd.getLength());
1302 
1303         Media media = new Media(
1304                 extractor.getTrackFormat(0), extractor.getTrackFormat(0), numFrames);
1305         extractor.selectTrack(0);
1306 
1307         Log.i(TAG, "format=" + media.getFormat());
1308         ArrayList<ByteBuffer> csds = new ArrayList<ByteBuffer>();
1309         for (String tag: new String[] { "csd-0", "csd-1" }) {
1310             if (media.getFormat().containsKey(tag)) {
1311                 ByteBuffer csd = media.getFormat().getByteBuffer(tag);
1312                 Log.i(TAG, tag + "=" + AdaptivePlaybackTest.byteBufferToString(csd, 0, 16));
1313                 csds.add(csd);
1314             }
1315         }
1316 
1317         ByteBuffer readBuf = ByteBuffer.allocate(200000);
1318         for (int ix = 0; ix < numFrames; ix++) {
1319             int sampleSize = extractor.readSampleData(readBuf, 0 /* offset */);
1320 
1321             if (sampleSize < 0) {
1322                 throw new IllegalArgumentException("media is too short at " + ix + " frames");
1323             } else {
1324                 readBuf.position(0).limit(sampleSize);
1325                 for (ByteBuffer csd: csds) {
1326                     sampleSize += csd.capacity();
1327                 }
1328                 ByteBuffer buf = ByteBuffer.allocate(sampleSize);
1329                 for (ByteBuffer csd: csds) {
1330                     csd.clear();
1331                     buf.put(csd);
1332                     csd.clear();
1333                     Log.i(TAG, "csd[" + csd.capacity() + "]");
1334                 }
1335                 Log.i(TAG, "frame-" + ix + "[" + sampleSize + "]");
1336                 csds.clear();
1337                 buf.put(readBuf);
1338                 media.mFrames[ix] = new Frame(
1339                     extractor.getSampleTime(),
1340                     extractor.getSampleFlags(),
1341                     buf);
1342                 extractor.advance();
1343             }
1344         }
1345         extractor.release();
1346         testFd.close();
1347         return media;
1348     }
1349 }
1350 
1351 /* ====================================================================== */
1352 /*                      Codec, CodecList and CodecFactory                 */
1353 /* ====================================================================== */
1354 class Codec {
1355     private final static String TAG = "AdaptiveCodec";
1356 
1357     public String name;
1358     public CodecCapabilities capabilities;
1359     public Media[] mediaList;
1360     public boolean adaptive;
1361     public Codec(String n, CodecCapabilities c, Media[] m) {
1362         name = n;
1363         capabilities = c;
1364         mediaList = m;
1365 
1366         if (capabilities == null) {
1367             adaptive = false;
1368         } else {
1369             Log.w(TAG, "checking capabilities of " + name + " for " + mediaList[0].getMime());
1370             adaptive = capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback);
1371         }
1372     }
1373 }
1374 
1375 class CodecList extends ArrayList<Codec> { };
1376 
1377 /* all codecs of mime, plus named codec if exists */
1378 class CodecFamily extends CodecList {
1379     private final static String TAG = "AdaptiveCodecFamily";
1380     private static final int NUM_FRAMES = AdaptivePlaybackTest.NUM_FRAMES;
1381 
1382     public CodecFamily(Context context, String mime, String explicitCodecName, int ... resources) {
1383         try {
1384             /* read all media */
1385             Media[] mediaList = new Media[resources.length];
1386             for (int i = 0; i < resources.length; i++) {
1387                 Log.v(TAG, "reading media " + resources[i]);
1388                 Media media = Media.read(context, resources[i], NUM_FRAMES);
1389                 assert media.getMime().equals(mime):
1390                         "test stream " + resources[i] + " has " + media.getMime() +
1391                         " mime type instead of " + mime;
1392 
1393                 /* assuming the first timestamp is the smallest */
1394                 long firstPTS = media.getFrame(0).presentationTimeUs;
1395                 long smallestPTS = media.getTimestampRangeValue(0, NUM_FRAMES, Media.RANGE_START);
1396 
1397                 assert firstPTS == smallestPTS:
1398                         "first frame timestamp (" + firstPTS + ") is not smallest (" +
1399                         smallestPTS + ")";
1400 
1401                 mediaList[i] = media;
1402             }
1403 
1404             /* enumerate codecs */
1405             MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
1406             for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
1407                 if (codecInfo.isEncoder()) {
1408                     continue;
1409                 }
1410                 for (String type : codecInfo.getSupportedTypes()) {
1411                     if (type.equals(mime)) {
1412                         /* mark the explicitly named codec as included */
1413                         if (codecInfo.getName().equals(explicitCodecName)) {
1414                             explicitCodecName = null;
1415                         }
1416                         add(new Codec(
1417                                 codecInfo.getName(),
1418                                 codecInfo.getCapabilitiesForType(mime),
1419                                 mediaList));
1420                         break;
1421                     }
1422                 }
1423             }
1424 
1425             /* test if the explicitly named codec is present on the system */
1426             if (explicitCodecName != null) {
1427                 MediaCodec codec = MediaCodec.createByCodecName(explicitCodecName);
1428                 if (codec != null) {
1429                     codec.release();
1430                     add(new Codec(explicitCodecName, null, mediaList));
1431                 }
1432             }
1433         } catch (Throwable t) {
1434             Log.wtf("Constructor failed", t);
1435             throw new RuntimeException("constructor failed", t);
1436         }
1437     }
1438 }
1439 
1440 /* named codec if exists */
1441 class CodecByName extends CodecList {
1442     public CodecByName(Context context, String mime, String codecName, int ... resources) {
1443         for (Codec c: new CodecFamily(context, mime, codecName, resources)) {
1444             if (c.name.equals(codecName)) {
1445                 add(c);
1446             }
1447         }
1448     }
1449 }
1450 
1451 /* all codecs of mime, except named codec if exists */
1452 class CodecFamilyExcept extends CodecList {
1453     public CodecFamilyExcept(
1454             Context context, String mime, String exceptCodecName, int ... resources) {
1455         for (Codec c: new CodecFamily(context, mime, null, resources)) {
1456             if (!c.name.equals(exceptCodecName)) {
1457                 add(c);
1458             }
1459         }
1460     }
1461 }
1462 
1463 class CodecFactory {
1464     protected boolean hasCodec(String codecName) {
1465         MediaCodecList list = new MediaCodecList(MediaCodecList.ALL_CODECS);
1466         for (MediaCodecInfo info : list.getCodecInfos()) {
1467             if (codecName.equals(info.getName())) {
1468                 return true;
1469             }
1470         }
1471         return false;
1472     }
1473 
1474     public CodecList createCodecList(
1475             Context context, String mime, String googleCodecName, int ...resources) {
1476         if (!hasCodec(googleCodecName)) {
1477             return null;
1478         }
1479         return new CodecFamily(context, mime, googleCodecName, resources);
1480     }
1481 }
1482 
1483 class SWCodecFactory extends CodecFactory {
1484     public CodecList createCodecList(
1485             Context context, String mime, String googleCodecName, int ...resources) {
1486         if (!hasCodec(googleCodecName)) {
1487             return null;
1488         }
1489         return new CodecByName(context, mime, googleCodecName, resources);
1490     }
1491 }
1492 
1493 class HWCodecFactory extends CodecFactory {
1494     public CodecList createCodecList(
1495             Context context, String mime, String googleCodecName, int ...resources) {
1496         if (!hasCodec(googleCodecName)) {
1497             return null;
1498         }
1499         return new CodecFamilyExcept(context, mime, googleCodecName, resources);
1500     }
1501 }
1502 
1503 /* ====================================================================== */
1504 /*                  Test Steps, Test (Case)s, and Test List               */
1505 /* ====================================================================== */
1506 class StepRunner implements Runnable {
1507     public StepRunner(Step s) {
1508         mStep = s;
1509         mThrowed = null;
1510     }
1511     public void run() {
1512         try {
1513             mStep.run();
1514         } catch (Throwable e) {
1515             mThrowed = e;
1516         }
1517     }
1518     public void throwThrowed() throws Throwable {
1519         if (mThrowed != null) {
1520             throw mThrowed;
1521         }
1522     }
1523     private Throwable mThrowed;
1524     private Step mStep;
1525 }
1526 
1527 class TestList extends ArrayList<Step> {
1528     private final static String TAG = "AdaptiveTestList";
1529     public void run() throws Throwable {
1530         Throwable res = null;
1531         for (Step step: this) {
1532             try {
1533                 Log.i(TAG, step.getDescription());
1534                 if (step.stepSurface().needsToRunInSeparateThread()) {
1535                     StepRunner runner = new StepRunner(step);
1536                     Thread th = new Thread(runner, "stepWrapper");
1537                     th.start();
1538                     th.join();
1539                     runner.throwThrowed();
1540                 } else {
1541                     step.run();
1542                 }
1543             } catch (Throwable e) {
1544                 Log.e(TAG, "while " + step.getDescription(), e);
1545                 res = e;
1546                 mFailedSteps++;
1547             } finally {
1548                 mWarnings += step.getWarnings();
1549             }
1550         }
1551         if (res != null) {
1552             throw new RuntimeException(
1553                 mFailedSteps + " failed steps, " + mWarnings + " warnings",
1554                 res);
1555         }
1556     }
1557     public int getWarnings() {
1558         return mWarnings;
1559     }
1560     public int getFailures() {
1561         return mFailedSteps;
1562     }
1563     private int mFailedSteps;
1564     private int mWarnings;
1565 }
1566 
1567 abstract class Test {
1568     public static final int FORMAT_ADAPTIVE_LARGEST = 1;
1569     public static final int FORMAT_ADAPTIVE_FIRST = 2;
1570     public static final int FORMAT_REGULAR = 3;
1571 
1572     protected int mFormatType;
1573     protected boolean mUseSurface;
1574     protected boolean mUseSurfaceTexture;
1575 
1576     public Test() {
1577         mFormatType = FORMAT_REGULAR;
1578         mUseSurface = true;
1579         mUseSurfaceTexture = false;
1580     }
1581 
1582     public Test adaptive() {
1583         mFormatType = FORMAT_ADAPTIVE_LARGEST;
1584         return this;
1585     }
1586 
1587     public Test adaptiveSmall() {
1588         mFormatType = FORMAT_ADAPTIVE_FIRST;
1589         return this;
1590     }
1591 
1592     public Test byteBuffer() {
1593         mUseSurface = false;
1594         mUseSurfaceTexture = false;
1595         return this;
1596     }
1597 
1598     public Test texture() {
1599         mUseSurface = false;
1600         mUseSurfaceTexture = true;
1601         return this;
1602     }
1603 
1604     public void checkAdaptiveFormat() {
1605         assert mFormatType != FORMAT_REGULAR:
1606                 "must be used with adaptive format";
1607     }
1608 
1609     abstract protected TestSurface getSurface();
1610 
1611     /* TRICKY: format is updated in each test run as we are actually reusing the
1612        same 2 MediaFormat objects returned from MediaExtractor.  Therefore,
1613        format must be explicitly obtained in each test step.
1614 
1615        returns null if codec does not support the format.
1616        */
1617     protected MediaFormat getFormat(Codec c) {
1618         return getFormat(c, 0);
1619     }
1620 
1621     protected MediaFormat getFormat(Codec c, int i) {
1622         MediaFormat format = null;
1623         if (mFormatType == FORMAT_REGULAR) {
1624             format = c.mediaList[i].getFormat();
1625         } else if (mFormatType == FORMAT_ADAPTIVE_FIRST && c.adaptive) {
1626             format = c.mediaList[i].getAdaptiveFormat(
1627                 c.mediaList[i].getWidth(), c.mediaList[i].getHeight());
1628         } else if (mFormatType == FORMAT_ADAPTIVE_LARGEST && c.adaptive) {
1629             /* update adaptive format to max size used */
1630             format = c.mediaList[i].getAdaptiveFormat(0, 0);
1631             for (Media media : c.mediaList) {
1632                 /* get the largest width, and the largest height independently */
1633                 if (media.getWidth() > format.getInteger(MediaFormat.KEY_MAX_WIDTH)) {
1634                     format.setInteger(MediaFormat.KEY_MAX_WIDTH, media.getWidth());
1635                 }
1636                 if (media.getHeight() > format.getInteger(MediaFormat.KEY_MAX_HEIGHT)) {
1637                     format.setInteger(MediaFormat.KEY_MAX_HEIGHT, media.getHeight());
1638                 }
1639             }
1640         }
1641         return format;
1642     }
1643 
1644     public boolean isValid(Codec c) { return true; }
1645     public abstract void addTests(TestList tests, Codec c);
1646 }
1647 
1648 abstract class Step {
1649     private static final String TAG = "AdaptiveStep";
1650 
1651     public Step(String title, Test instance, Codec codec, Media media) {
1652         mTest = instance;
1653         mCodec = codec;
1654         mMedia = media;
1655         mDescription = title + " on " + stepSurface().getSurface() + " using " +
1656             mCodec.name + " and " + stepFormat();
1657     }
1658     public Step(String title, Test instance, Codec codec, int mediaIx) {
1659         this(title, instance, codec, codec.mediaList[mediaIx]);
1660     }
1661     public Step(String title, Test instance, Codec codec) {
1662         this(title, instance, codec, 0);
1663     }
1664     public Step(String description) {
1665         mDescription = description;
1666     }
1667     public Step() { }
1668 
1669     public abstract void run() throws Throwable;
1670 
1671     private String mDescription;
1672     private Test mTest;
1673     private Codec mCodec;
1674     private Media mMedia;
1675     private int mWarnings;
1676 
1677     /* TRICKY: use non-standard getter names so that we don't conflict with the getters
1678        in the Test classes, as most test Steps are defined as anonymous classes inside
1679        the test classes. */
1680     public MediaFormat stepFormat() {
1681         int ix = Arrays.asList(mCodec.mediaList).indexOf(mMedia);
1682         return mTest.getFormat(mCodec, ix);
1683     }
1684 
1685     public TestSurface stepSurface() {
1686         return mTest.getSurface();
1687     }
1688 
1689     public Media  stepMedia()       { return mMedia; }
1690 
1691     public String getDescription() { return mDescription; }
1692     public int    getWarnings()    { return mWarnings; }
1693 
1694     public void warn(String message) {
1695         Log.e(TAG, "WARNING: " + message + " in " + getDescription());
1696         mWarnings++;
1697     }
1698     public void warn(String message, Throwable t) {
1699         Log.e(TAG, "WARNING: " + message + " in " + getDescription(), t);
1700         mWarnings++;
1701     }
1702     public void warn(Iterable<String> warnings) {
1703         for (String warning: warnings) {
1704             warn(warning);
1705         }
1706     }
1707 }
1708 
1709 interface TestSurface {
1710     public Surface getSurface();
1711     public long checksum();
1712     public void release();
1713     public void prepare();         // prepare surface prior to render
1714     public void waitForDraw();     // wait for rendering to take place
1715     public boolean needsToRunInSeparateThread();
1716 }
1717 
1718 class DecoderSurface extends OutputSurface implements TestSurface {
1719     private ByteBuffer mBuf;
1720     int mWidth;
1721     int mHeight;
1722     CRC32 mCRC;
1723 
1724     public DecoderSurface(int width, int height, CRC32 crc) {
1725         super(width, height);
1726         mWidth = width;
1727         mHeight = height;
1728         mCRC = crc;
1729         mBuf = ByteBuffer.allocateDirect(4 * width * height);
1730     }
1731 
1732     public void prepare() {
1733         makeCurrent();
1734     }
1735 
1736     public void waitForDraw() {
1737         awaitNewImage();
1738         drawImage();
1739     }
1740 
1741     public long checksum() {
1742         mBuf.position(0);
1743         GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, mBuf);
1744         mBuf.position(0);
1745         return AdaptivePlaybackTest.checksum(mBuf, mBuf.capacity(), mCRC);
1746     }
1747 
1748     public void release() {
1749         super.release();
1750         mBuf = null;
1751     }
1752 
1753     public boolean needsToRunInSeparateThread() {
1754         return true;
1755     }
1756 }
1757 
1758 class ActivitySurface implements TestSurface {
1759     private Surface mSurface;
1760     public ActivitySurface(Surface s) {
1761         mSurface = s;
1762     }
1763     public Surface getSurface() {
1764         return mSurface;
1765     }
1766     public void prepare() { }
1767     public void waitForDraw() { }
1768     public long checksum() {
1769         return 0;
1770     }
1771     public void release() {
1772         // don't release activity surface, as it is reusable
1773     }
1774     public boolean needsToRunInSeparateThread() {
1775         return false;
1776     }
1777 }
1778 
1779