1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.video.cts;
18 
19 import static org.junit.Assert.assertFalse;
20 import static org.junit.Assert.assertNotNull;
21 import static org.junit.Assert.assertNull;
22 import static org.junit.Assert.assertTrue;
23 import static org.junit.Assert.fail;
24 
25 import android.graphics.ImageFormat;
26 import android.graphics.Point;
27 import android.media.Image;
28 import android.media.Image.Plane;
29 import android.media.MediaCodec;
30 import android.media.MediaCodec.BufferInfo;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaCodecInfo.CodecCapabilities;
33 import android.media.MediaFormat;
34 import android.media.cts.CodecImage;
35 import android.media.cts.CodecUtils;
36 import android.media.cts.TestArgs;
37 import android.media.cts.TestUtils;
38 import android.media.cts.YUVImage;
39 import android.os.Build;
40 import android.util.Log;
41 import android.util.Pair;
42 
43 import androidx.test.platform.app.InstrumentationRegistry;
44 
45 import com.android.compatibility.common.util.ApiTest;
46 import com.android.compatibility.common.util.DeviceReportLog;
47 import com.android.compatibility.common.util.MediaPerfUtils;
48 import com.android.compatibility.common.util.MediaUtils;
49 import com.android.compatibility.common.util.ResultType;
50 import com.android.compatibility.common.util.ResultUnit;
51 import com.android.compatibility.common.util.Stat;
52 
53 import org.junit.After;
54 import org.junit.Before;
55 import org.junit.Test;
56 import org.junit.runner.RunWith;
57 import org.junit.runners.Parameterized;
58 
59 import java.io.IOException;
60 import java.nio.ByteBuffer;
61 import java.util.ArrayList;
62 import java.util.Arrays;
63 import java.util.Collection;
64 import java.util.LinkedList;
65 import java.util.List;
66 import java.util.Random;
67 
68 /**
69  * This tries to test video encoder / decoder performance by running encoding / decoding
70  * without displaying the raw data. To make things simpler, encoder is used to encode synthetic
71  * data and decoder is used to decode the encoded video. This approach does not work where
72  * there is only decoder. Performance index is total time taken for encoding and decoding
73  * the whole frames.
74  * To prevent sacrificing quality for faster encoding / decoding, randomly selected pixels are
75  * compared with the original image. As the pixel comparison can slow down the decoding process,
76  * only some randomly selected pixels are compared. As there can be only one performance index,
77  * error above certain threshold in pixel value will be treated as an error.
78  */
79 @RunWith(Parameterized.class)
80 public class VideoEncoderDecoderTest {
81     private static final String TAG = "VideoEncoderDecoderTest";
82     private static final String REPORT_LOG_NAME = "CtsVideoTestCases";
83     // this wait time affects fps as too big value will work as a blocker if device fps
84     // is not very high.
85     private static final long VIDEO_CODEC_WAIT_TIME_US = 1000;
86     private static final boolean VERBOSE = false;
87     private static final int MAX_FPS = 30; // measure performance at 30fps, this is relevant for
88                                            // the meaning of bitrate
89 
90     private static final String AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
91     private static final String H263 = MediaFormat.MIMETYPE_VIDEO_H263;
92     private static final String HEVC = MediaFormat.MIMETYPE_VIDEO_HEVC;
93     private static final String MPEG2 = MediaFormat.MIMETYPE_VIDEO_MPEG2;
94     private static final String MPEG4 = MediaFormat.MIMETYPE_VIDEO_MPEG4;
95     private static final String VP8 = MediaFormat.MIMETYPE_VIDEO_VP8;
96     private static final String VP9 = MediaFormat.MIMETYPE_VIDEO_VP9;
97     private static final String AV1 = MediaFormat.MIMETYPE_VIDEO_AV1;
98 
99     // test results:
100 
101     private int mCurrentTestRound = 0;
102     private double[][] mEncoderFrameTimeUsDiff;
103     private double[] mEncoderFpsResults;
104 
105     private double[][] mDecoderFrameTimeUsDiff;
106     private double[] mDecoderFpsResults;
107     private double[] mTotalFpsResults;
108     private double[] mDecoderRmsErrorResults;
109 
110     // i frame interval for encoder
111     private static final int KEY_I_FRAME_INTERVAL = 5;
112     private static final int MAX_TEST_TIMEOUT_MS = 300000;   // 5 minutes
113 
114     private static final int Y_CLAMP_MIN = 16;
115     private static final int Y_CLAMP_MAX = 235;
116     private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200;
117     private ByteBuffer mYBuffer, mYDirectBuffer;
118     private ByteBuffer mUVBuffer, mUVDirectBuffer;
119     private int mSrcColorFormat;
120     private int mDstColorFormat;
121     private int mBufferWidth;
122     private int mBufferHeight;
123     private int mVideoWidth;
124     private int mVideoHeight;
125     private int mVideoStride;
126     private int mVideoVStride;
127     private int mFrameRate;
128 
129     private MediaFormat mEncConfigFormat;
130     private MediaFormat mEncInputFormat;
131     private MediaFormat mEncOutputFormat;
132     private MediaFormat mDecOutputFormat;
133 
134     private LinkedList<Pair<ByteBuffer, BufferInfo>> mEncodedOutputBuffer;
135     // check this many pixels per each decoded frame
136     // checking too many points decreases decoder frame rates a lot.
137     private static final int PIXEL_CHECK_PER_FRAME = 1000;
138     // RMS error in pixel values above this will be treated as error.
139     private static final double PIXEL_RMS_ERROR_MARGIN = 20.0;
140     // offset legitimate timestamps away from 0, so that we'll never confuse them
141     // with a missing or otherwise erroneous timestamp.
142     private static final int TIMESTAMP_OFFSET = 132;
143     private double mRmsErrorMargin;
144     private Random mRandom;
145 
146     private boolean mUpdatedSwCodec = false;
147 
148     private String mMediaType;
149     private int mWidth;
150     private int mHeight;
151     private String mEncoderName;
152     private int mMaxBFrames;
153 
154     private class TestConfig {
155         public boolean mTestPixels = true;
156         public boolean mReportFrameTime = false;
157         public int mTotalFrames = 300;
158         public int mMinNumFrames = 300;
159         public int mMaxTimeMs = 120000;  // 2 minutes
160         public int mMinTimeMs = 10000;   // 10 seconds
161         public int mNumberOfRepeat = 10;
162 
initPerfTest()163         public void initPerfTest() {
164             mTestPixels = false;
165             mTotalFrames = 30000;
166             mMinNumFrames = 3000;
167             mNumberOfRepeat = 2;
168         }
169     }
170 
171     private TestConfig mTestConfig;
172 
isPreferredAbi()173     private static boolean isPreferredAbi() {
174         boolean prefers64Bit = false;
175         if (Build.SUPPORTED_64_BIT_ABIS.length > 0 &&
176                 Build.SUPPORTED_ABIS.length > 0 &&
177                 Build.SUPPORTED_ABIS[0].equals(Build.SUPPORTED_64_BIT_ABIS[0])) {
178             prefers64Bit = true;
179         }
180         return android.os.Process.is64Bit() ? prefers64Bit : !prefers64Bit;
181     }
182 
183     @Before
setUp()184     public void setUp() throws Exception {
185         mEncodedOutputBuffer = new LinkedList<Pair<ByteBuffer, BufferInfo>>();
186         mUpdatedSwCodec =
187                 !TestUtils.isMainlineModuleFactoryVersion("com.google.android.media.swcodec");
188         // Use time as a seed, hoping to prevent checking pixels in the same pattern
189         long now = System.currentTimeMillis();
190         mRandom = new Random(now);
191         mTestConfig = new TestConfig();
192     }
193 
194     @After
tearDown()195     public void tearDown() throws Exception {
196         mEncodedOutputBuffer.clear();
197         mEncodedOutputBuffer = null;
198         mYBuffer = null;
199         mUVBuffer = null;
200         mYDirectBuffer = null;
201         mUVDirectBuffer = null;
202         mRandom = null;
203         mTestConfig = null;
204     }
205 
206     /** run performance test. */
perf(String mimeType, int w, int h, String encoder, int maxBFrames)207     private void perf(String mimeType, int w, int h, String encoder, int maxBFrames)
208             throws Exception {
209         doTest(mimeType, w, h, true /* isPerf */, encoder, maxBFrames);
210     }
211 
212     /** run quality test. */
qual(String mimeType, int w, int h, String encoder, int maxBFrames)213     private void qual(String mimeType, int w, int h, String encoder, int maxBFrames)
214             throws Exception {
215         qual(mimeType, w, h, encoder, maxBFrames, PIXEL_RMS_ERROR_MARGIN);
216     }
217 
218     /** run quality test with configurable error. */
qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin)219     private void qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin)
220             throws Exception {
221         mRmsErrorMargin = margin;
222         doTest(mimeType, w, h, false /* isPerf */, encoder, maxBFrames);
223     }
224 
prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths, int[] heights)225     static void prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths,
226             int[] heights) {
227         String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType);
228         int[] maxBFrames = {0, 2};
229         for (int i = 0; i < widths.length; i++) {
230             MediaFormat format =
231                     MediaFormat.createVideoFormat(mediaType, widths[i], heights[i]);
232             for (String encoder : encoderNames) {
233                 if (TestArgs.shouldSkipCodec(encoder)) {
234                     continue;
235                 }
236                 if (MediaUtils.supports(encoder, format)) {
237                     for (int maxBFrame : maxBFrames) {
238                         if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
239                                 && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
240                                 && maxBFrame != 0) {
241                             continue;
242                         }
243                         testParams.add(
244                                 new Object[]{mediaType, widths[i], heights[i], encoder, maxBFrame});
245                     }
246                 }
247             }
248         }
249     }
250 
251     @Parameterized.Parameters(name = "{0}_{3}_{1}x{2}_{4}")
input()252     public static Collection<Object[]> input() throws IOException {
253         final List<Object[]> testParams = new ArrayList<>();
254         final String[] mediaTypes = {AVC, HEVC, MPEG2, MPEG4, VP8, VP9, H263, AV1};
255         for (String mediaType : mediaTypes) {
256             if (mediaType.equals(AVC)) {
257                 int[] widths = {320, 720, 1280, 1920};
258                 int[] heights = {240, 480, 720, 1080};
259                 prepareParamsList(testParams, mediaType, widths, heights);
260             } else if (mediaType.equals(H263)) {
261                 int[] widths = {176, 352, 704, 1408};
262                 int[] heights = {144, 288, 576, 1152};
263                 prepareParamsList(testParams, mediaType, widths, heights);
264             } else if (mediaType.equals(HEVC)) {
265                 int[] widths = {320, 720, 1280, 1920, 3840};
266                 int[] heights = {240, 480, 720, 1080, 2160};
267                 prepareParamsList(testParams, mediaType, widths, heights);
268             } else if (mediaType.equals(MPEG2)) {
269                 int[] widths = {176, 352, 640, 1280, 1920};
270                 int[] heights = {144, 288, 480, 720, 1080};
271                 prepareParamsList(testParams, mediaType, widths, heights);
272             } else if (mediaType.equals(MPEG4)) {
273                 int[] widths = {176, 352, 640, 1280};
274                 int[] heights = {144, 288, 480, 720};
275                 prepareParamsList(testParams, mediaType, widths, heights);
276             } else if (mediaType.equals(VP8)) {
277                 int[] widths = {320, 640, 1280, 1920};
278                 int[] heights = {180, 360, 720, 1080};
279                 prepareParamsList(testParams, mediaType, widths, heights);
280             } else if (mediaType.equals(VP9)) {
281                 int[] widths = {320, 640, 1280, 1920, 3840};
282                 int[] heights = {180, 360, 720, 1080, 2160};
283                 prepareParamsList(testParams, mediaType, widths, heights);
284             } else if (mediaType.equals(AV1)) {
285                 int[] widths = {320, 720, 1280, 1920};
286                 int[] heights = {240, 480, 720, 1080};
287                 prepareParamsList(testParams, mediaType, widths, heights);
288             }
289         }
290         return testParams;
291     }
292 
VideoEncoderDecoderTest(String mediaType, int width, int height, String encoderName, int maxBFrames)293     public VideoEncoderDecoderTest(String mediaType, int width, int height,
294             String encoderName, int maxBFrames) {
295         this.mMediaType = mediaType;
296         this.mWidth = width;
297         this.mHeight = height;
298         this.mEncoderName = encoderName;
299         this.mMaxBFrames = maxBFrames;
300     }
301 
302     @ApiTest(apis = {"VideoCapabilities#getSupportedWidths",
303             "VideoCapabilities#getSupportedHeightsFor",
304             "VideoCapabilities#getSupportedFrameRatesFor",
305             "VideoCapabilities#getBitrateRange",
306             "VideoCapabilities#getAchievableFrameRatesFor",
307             "CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
308             "CodecCapabilities#COLOR_FormatYUV420Planar",
309             "CodecCapabilities#COLOR_FormatYUV420Flexible",
310             "android.media.MediaFormat#KEY_MAX_B_FRAMES"})
311     @Test
testQual()312     public void testQual() throws Exception {
313         if (mMediaType == H263 && (mWidth == 704 || mWidth == 1408)) {
314             qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames, 25);
315         } else {
316             qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames);
317         }
318     }
319 
320     @ApiTest(apis = {"VideoCapabilities#getSupportedWidths",
321             "VideoCapabilities#getSupportedHeightsFor",
322             "VideoCapabilities#getSupportedFrameRatesFor",
323             "VideoCapabilities#getBitrateRange",
324             "VideoCapabilities#getAchievableFrameRatesFor",
325             "CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
326             "CodecCapabilities#COLOR_FormatYUV420Planar",
327             "CodecCapabilities#COLOR_FormatYUV420Flexible",
328             "android.media.MediaFormat#KEY_MAX_B_FRAMES"})
329     @Test
testPerf()330     public void testPerf() throws Exception {
331         perf(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames);
332     }
333 
isSrcSemiPlanar()334     private boolean isSrcSemiPlanar() {
335         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
336     }
337 
isSrcFlexYUV()338     private boolean isSrcFlexYUV() {
339         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
340     }
341 
isDstSemiPlanar()342     private boolean isDstSemiPlanar() {
343         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
344     }
345 
isDstFlexYUV()346     private boolean isDstFlexYUV() {
347         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
348     }
349 
getColorFormat(CodecInfo info)350     private static int getColorFormat(CodecInfo info) {
351         if (info.mSupportSemiPlanar) {
352             return CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
353         } else if (info.mSupportPlanar) {
354             return CodecCapabilities.COLOR_FormatYUV420Planar;
355         } else {
356             // FlexYUV must be supported
357             return CodecCapabilities.COLOR_FormatYUV420Flexible;
358         }
359     }
360 
361     private static class RunResult {
362         public final int mNumFrames;
363         public final double mDurationMs;
364         public final double mRmsError;
365 
RunResult()366         RunResult() {
367             mNumFrames = 0;
368             mDurationMs = Double.NaN;
369             mRmsError = Double.NaN;
370         }
371 
RunResult(int numFrames, double durationMs)372         RunResult(int numFrames, double durationMs) {
373             mNumFrames = numFrames;
374             mDurationMs = durationMs;
375             mRmsError = Double.NaN;
376         }
377 
RunResult(int numFrames, double durationMs, double rmsError)378         RunResult(int numFrames, double durationMs, double rmsError) {
379             mNumFrames = numFrames;
380             mDurationMs = durationMs;
381             mRmsError = rmsError;
382         }
383     }
384 
doTest(String mimeType, int w, int h, boolean isPerf, String encoderName, int maxBFrames)385     private void doTest(String mimeType, int w, int h, boolean isPerf, String encoderName,
386             int maxBFrames) throws Exception {
387         if (TestArgs.shouldSkipMediaType(mimeType)) {
388             return;
389         }
390         MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h);
391 
392         if (isPerf) {
393             mTestConfig.initPerfTest();
394         }
395 
396         if (TestArgs.shouldSkipCodec(encoderName)) {
397             return;
398         }
399         CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h, MAX_FPS);
400         assertNotNull(infoEnc);
401 
402         // Skip decoding pass for performance tests as bitstream complexity is not representative
403         String[] decoderNames = null;  // no decoding pass required by default
404         int codingPasses = 1;  // used for time limit. 1 for encoding pass
405         int numRuns = mTestConfig.mNumberOfRepeat;  // used for result array sizing
406         if (!isPerf) {
407             // consider all decoders for quality tests
408             decoderNames = MediaUtils.getDecoderNames(format);
409             if (decoderNames.length == 0) {
410                 MediaUtils.skipTest("No decoders for " + format);
411                 return;
412             }
413             numRuns *= decoderNames.length; // combine each decoder with the encoder
414             codingPasses += decoderNames.length;
415         }
416 
417         // be a bit conservative
418         mTestConfig.mMaxTimeMs = Math.min(
419                 mTestConfig.mMaxTimeMs, MAX_TEST_TIMEOUT_MS / 5 * 4 / codingPasses
420                         / mTestConfig.mNumberOfRepeat);
421         // reduce test-run on non-real devices
422         if (MediaUtils.onFrankenDevice()) {
423             mTestConfig.mMaxTimeMs /= 10;
424         }
425         Log.i(TAG, "current ABI is " + (isPreferredAbi() ? "" : "not ") + "a preferred one");
426 
427         mVideoWidth = w;
428         mVideoHeight = h;
429         mSrcColorFormat = getColorFormat(infoEnc);
430         Log.i(TAG, "Testing video resolution " + w + "x" + h + ": enc format " + mSrcColorFormat);
431 
432         initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH);
433 
434         // Adjust total number of frames to prevent OOM.
435         Runtime rt = Runtime.getRuntime();
436         long usedMemory = rt.totalMemory() - rt.freeMemory();
437         mTestConfig.mTotalFrames = Math.min(mTestConfig.mTotalFrames,
438                 (int) (rt.maxMemory() - usedMemory) / 4 * 3 /
439                 (infoEnc.mBitRate / 8 / infoEnc.mFps + 1));
440         Log.i(TAG, "Total testing frames " + mTestConfig.mTotalFrames);
441 
442         mEncoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1];
443         mEncoderFpsResults = new double[numRuns];
444 
445         if (decoderNames != null) {
446             mDecoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1];
447             mDecoderFpsResults = new double[numRuns];
448             mTotalFpsResults = new double[numRuns];
449             mDecoderRmsErrorResults = new double[numRuns];
450         }
451 
452         boolean success = true;
453         int runIx = 0;
454         for (int i = 0; i < mTestConfig.mNumberOfRepeat && success; i++) {
455             mCurrentTestRound = runIx;
456             format = new MediaFormat();
457             format.setString(MediaFormat.KEY_MIME, mimeType);
458             format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate);
459             format.setInteger(MediaFormat.KEY_BITRATE_MODE,
460                     MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
461             format.setInteger(MediaFormat.KEY_WIDTH, w);
462             format.setInteger(MediaFormat.KEY_HEIGHT, h);
463             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mSrcColorFormat);
464             format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
465             mFrameRate = infoEnc.mFps;
466             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
467             format.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames);
468 
469             RunResult encodingResult =
470                 runEncoder(encoderName, format, mTestConfig.mTotalFrames, i);
471             double encodingTime = encodingResult.mDurationMs;
472             int framesEncoded = encodingResult.mNumFrames;
473 
474             if (decoderNames != null && decoderNames.length > 0) {
475                 for (String decoderName : decoderNames) {
476                     if (TestArgs.shouldSkipCodec(decoderName)) {
477                         continue;
478                     }
479                     CodecInfo infoDec =
480                         CodecInfo.getSupportedFormatInfo(decoderName, mimeType, w, h, MAX_FPS);
481                     assertNotNull(infoDec);
482                     mDstColorFormat = getColorFormat(infoDec);
483 
484                     // re-initialize format for decoder
485                     format = new MediaFormat();
486                     format.setString(MediaFormat.KEY_MIME, mimeType);
487                     format.setInteger(MediaFormat.KEY_WIDTH, w);
488                     format.setInteger(MediaFormat.KEY_HEIGHT, h);
489                     format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat);
490                     RunResult decoderResult = runDecoder(decoderName, format, i);
491                     if (decoderResult == null) {
492                         success = false;
493                     } else {
494                         double decodingTime = decoderResult.mDurationMs;
495                         mDecoderRmsErrorResults[runIx] = decoderResult.mRmsError;
496                         mEncoderFpsResults[runIx] = framesEncoded / encodingTime;
497                         int framesDecoded = decoderResult.mNumFrames;
498                         mDecoderFpsResults[runIx] = framesDecoded / decodingTime;
499                         if (framesDecoded == framesEncoded) {
500                             mTotalFpsResults[runIx] =
501                                 framesEncoded / (encodingTime + decodingTime);
502                         }
503                     }
504                     ++runIx;
505                 }
506             } else {
507                 mEncoderFpsResults[runIx] = mTestConfig.mTotalFrames / encodingTime;
508                 ++runIx;
509             }
510 
511             // clear things for re-start
512             mEncodedOutputBuffer.clear();
513             // it will be good to clean everything to make every run the same.
514             System.gc();
515         }
516 
517         // log results before verification
518         double[] measuredFps = new double[numRuns];
519         if (isPerf) {
520             for (int i = 0; i < numRuns; i++) {
521                 measuredFps[i] = logPerformanceResults(encoderName, i);
522             }
523         }
524         if (mTestConfig.mTestPixels && decoderNames != null) {
525             logQualityResults(mimeType, encoderName, decoderNames);
526             for (int i = 0; i < numRuns; i++) {
527                 // make sure that rms error is not too big for all runs
528                 if (mDecoderRmsErrorResults[i] >= mRmsErrorMargin) {
529                     fail("rms error is bigger than the limit "
530                             + Arrays.toString(mDecoderRmsErrorResults) + " vs " + mRmsErrorMargin);
531                 }
532             }
533         }
534 
535         if (isPerf) {
536             // allow improvements in mainline-updated google-supplied software codecs.
537             boolean fasterIsOk =  mUpdatedSwCodec & encoderName.startsWith("c2.android.");
538             String error = MediaPerfUtils.verifyAchievableFrameRates(
539                     encoderName, mimeType, w, h, fasterIsOk, maxBFrames > 0, measuredFps);
540             // Performance numbers only make sense on real devices, so skip on non-real devices
541             //
542             // Also ignore verification on non-preferred ABIs due to the possibility of
543             // this being emulated. On some CPU-s 32-bit mode is emulated using big cores
544             // that results in the SW codecs also running much faster (perhaps they are
545             // scheduled for the big cores as well)
546             // TODO: still verify lower bound.
547             if (error != null) {
548                 if (MediaUtils.onFrankenDevice() || Build.IS_EMULATOR
549                         || (infoEnc.mIsSoftware && !isPreferredAbi())) {
550                     // ensure there is data, but don't insist that it is correct
551                     assertFalse(error, error.startsWith("Failed to get "));
552                 } else {
553                     fail("encountered error " + error);
554                 }
555             }
556         }
557         assertTrue(success);
558     }
559 
logQualityResults(String mimeType, String encoderName, String[] decoderNames)560     private void logQualityResults(String mimeType, String encoderName, String[] decoderNames) {
561         String streamName = "video_encoder_decoder_quality";
562         DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName);
563         log.addValue("encoder_name", encoderName, ResultType.NEUTRAL, ResultUnit.NONE);
564         log.addValues("decoder_names", Arrays.asList(decoderNames), ResultType.NEUTRAL, ResultUnit.NONE);
565         log.addValue("mime_type", mimeType, ResultType.NEUTRAL, ResultUnit.NONE);
566         log.addValue("width", mVideoWidth, ResultType.NEUTRAL, ResultUnit.NONE);
567         log.addValue("height", mVideoHeight, ResultType.NEUTRAL, ResultUnit.NONE);
568         log.addValues("encoder_fps", mEncoderFpsResults, ResultType.HIGHER_BETTER,
569                 ResultUnit.FPS);
570         log.addValues("rms_error", mDecoderRmsErrorResults, ResultType.LOWER_BETTER,
571                 ResultUnit.NONE);
572         log.addValues("decoder_fps", mDecoderFpsResults, ResultType.HIGHER_BETTER,
573                 ResultUnit.FPS);
574         log.addValues("encoder_decoder_fps", mTotalFpsResults, ResultType.HIGHER_BETTER,
575                 ResultUnit.FPS);
576         log.addValue("encoder_average_fps", Stat.getAverage(mEncoderFpsResults),
577                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
578         log.addValue("decoder_average_fps", Stat.getAverage(mDecoderFpsResults),
579                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
580         log.setSummary("encoder_decoder_average_fps", Stat.getAverage(mTotalFpsResults),
581                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
582         log.submit(InstrumentationRegistry.getInstrumentation());
583     }
584 
logPerformanceResults(String encoderName, int round)585     private double logPerformanceResults(String encoderName, int round) {
586         String streamName = "video_encoder_performance";
587         DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName);
588         String message = MediaPerfUtils.addPerformanceHeadersToLog(
589                 log, "encoder stats:", round, encoderName,
590                 mEncConfigFormat, mEncInputFormat, mEncOutputFormat);
591         double[] frameTimeUsDiff = mEncoderFrameTimeUsDiff[round];
592         double fps = MediaPerfUtils.addPerformanceStatsToLog(
593                 log, new MediaUtils.Stats(frameTimeUsDiff), message);
594 
595         if (mTestConfig.mReportFrameTime) {
596             double[] msDiff = new double[frameTimeUsDiff.length];
597             double nowUs = 0, lastMs = 0;
598             for (int i = 0; i < frameTimeUsDiff.length; ++i) {
599                 nowUs += frameTimeUsDiff[i];
600                 double nowMs = Math.round(nowUs) / 1000.;
601                 msDiff[i] = Math.round((nowMs - lastMs) * 1000) / 1000.;
602                 lastMs = nowMs;
603             }
604             log.addValues("encoder_raw_diff", msDiff, ResultType.NEUTRAL, ResultUnit.MS);
605         }
606 
607         log.submit(InstrumentationRegistry.getInstrumentation());
608         return fps;
609     }
610 
611     /**
612      * run encoder benchmarking
613      * @param encoderName encoder name
614      * @param format format of media to encode
615      * @param totalFrames total number of frames to encode
616      * @return time taken in ms to encode the frames. This does not include initialization time.
617      */
runEncoder( String encoderName, MediaFormat format, int totalFrames, int runId)618     private RunResult runEncoder(
619             String encoderName, MediaFormat format, int totalFrames, int runId) {
620         MediaCodec codec = null;
621         try {
622             codec = MediaCodec.createByCodecName(encoderName);
623             mEncConfigFormat = format;
624             codec.configure(
625                     format,
626                     null /* surface */,
627                     null /* crypto */,
628                     MediaCodec.CONFIGURE_FLAG_ENCODE);
629         } catch (IllegalStateException e) {
630             Log.e(TAG, "codec '" + encoderName + "' failed configuration.");
631             codec.release();
632             assertTrue("codec '" + encoderName + "' failed configuration.", false);
633         } catch (IOException | NullPointerException e) {
634             Log.i(TAG, "could not find codec for " + format);
635             return new RunResult();
636         }
637         codec.start();
638         mEncInputFormat = codec.getInputFormat();
639         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
640         MediaFormat inputFormat = codec.getInputFormat();
641         mVideoStride = inputFormat.containsKey(MediaFormat.KEY_STRIDE)
642                 ? inputFormat.getInteger(MediaFormat.KEY_STRIDE)
643                 : inputFormat.getInteger(MediaFormat.KEY_WIDTH);
644         mVideoVStride = inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)
645                 ? inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT)
646                 : inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
647 
648         int numBytesSubmitted = 0;
649         int numBytesDequeued = 0;
650         int inFramesCount = 0;
651         int outFramesCount = 0;
652         long lastOutputTimeUs = 0;
653         long start = System.currentTimeMillis();
654         while (true) {
655             int index;
656 
657             if (inFramesCount < totalFrames) {
658                 index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
659                 if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
660                     int size;
661                     long elapsedMs = System.currentTimeMillis() - start;
662                     boolean eos = (inFramesCount == totalFrames - 1
663                             || elapsedMs > mTestConfig.mMaxTimeMs
664                             || (elapsedMs > mTestConfig.mMinTimeMs
665                                     && inFramesCount > mTestConfig.mMinNumFrames));
666 
667                     // when encoder only supports flexYUV, use Image only; otherwise,
668                     // use ByteBuffer & Image each on half of the frames to test both
669                     if (isSrcFlexYUV() || inFramesCount % 2 == 0) {
670                         Image image = codec.getInputImage(index);
671                         // image should always be available
672                         assertTrue(image != null);
673                         size = queueInputImageEncoder(
674                                 codec, image, index, inFramesCount,
675                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId);
676                     } else {
677                         ByteBuffer buffer = codec.getInputBuffer(index);
678                         size = queueInputBufferEncoder(
679                                 codec, buffer, index, inFramesCount,
680                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId);
681                     }
682                     inFramesCount++;
683                     numBytesSubmitted += size;
684                     if (VERBOSE) {
685                         Log.d(TAG, "queued " + size + " bytes of input data, frame " +
686                                 (inFramesCount - 1));
687                     }
688                 }
689             }
690             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
691             index = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
692             if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
693             } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
694                 mEncOutputFormat = codec.getOutputFormat();
695             } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
696                 codecOutputBuffers = codec.getOutputBuffers();
697             } else if (index >= 0) {
698                 long nowUs = (System.nanoTime() + 500) / 1000;
699                 dequeueOutputBufferEncoder(codec, codecOutputBuffers, index, info);
700                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
701                     int pos = outFramesCount - 1;
702                     if (pos >= 0 && pos < mEncoderFrameTimeUsDiff[mCurrentTestRound].length) {
703                         mEncoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs;
704                     }
705                     lastOutputTimeUs = nowUs;
706 
707                     numBytesDequeued += info.size;
708                     ++outFramesCount;
709                 }
710                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
711                     if (VERBOSE) {
712                         Log.d(TAG, "dequeued output EOS.");
713                     }
714                     break;
715                 }
716                 if (VERBOSE) {
717                     Log.d(TAG, "dequeued " + info.size + " bytes of output data.");
718                 }
719             }
720         }
721         long finish = System.currentTimeMillis();
722         int validDataNum = Math.min(mEncodedOutputBuffer.size() - 1,
723                 mEncoderFrameTimeUsDiff[mCurrentTestRound].length);
724         mEncoderFrameTimeUsDiff[mCurrentTestRound] =
725                 Arrays.copyOf(mEncoderFrameTimeUsDiff[mCurrentTestRound], validDataNum);
726         if (VERBOSE) {
727             Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
728                     + "dequeued " + numBytesDequeued + " bytes.");
729         }
730         codec.stop();
731         codec.release();
732         codec = null;
733 
734         mEncOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
735                 format.getInteger(MediaFormat.KEY_BIT_RATE));
736         mEncOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
737                 format.getInteger(MediaFormat.KEY_FRAME_RATE));
738         if (outFramesCount > 0) {
739             mEncOutputFormat.setInteger(
740                     "actual-bitrate",
741                     (int)(numBytesDequeued * 8. * format.getInteger(MediaFormat.KEY_FRAME_RATE)
742                             / outFramesCount));
743         }
744         return new RunResult(outFramesCount, (finish - start) / 1000.);
745     }
746 
747     /**
748      * Fills input buffer for encoder from YUV buffers.
749      * @return size of enqueued data.
750      */
queueInputBufferEncoder( MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId)751     private int queueInputBufferEncoder(
752             MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId) {
753         buffer.clear();
754 
755         Point origin = getOrigin(frameCount, runId);
756         // Y color first
757         int srcOffsetY = origin.x + origin.y * mBufferWidth;
758         final byte[] yBuffer = mYBuffer.array();
759         for (int i = 0; i < mVideoHeight; i++) {
760             buffer.position(i * mVideoStride);
761             buffer.put(yBuffer, srcOffsetY, mVideoWidth);
762             srcOffsetY += mBufferWidth;
763         }
764         if (isSrcSemiPlanar()) {
765             int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2;
766             final byte[] uvBuffer = mUVBuffer.array();
767             for (int i = 0; i < mVideoHeight / 2; i++) {
768                 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride);
769                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth);
770                 srcOffsetU += mBufferWidth;
771             }
772         } else {
773             int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2;
774             int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2;
775             final byte[] uvBuffer = mUVBuffer.array();
776             for (int i = 0; i < mVideoHeight / 2; i++) { //U only
777                 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride / 2);
778                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2);
779                 srcOffsetU += mBufferWidth / 2;
780             }
781             for (int i = 0; i < mVideoHeight / 2; i++) { //V only
782                 buffer.position(mVideoVStride * mVideoStride * 5 / 4 + i * mVideoStride / 2);
783                 buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2);
784                 srcOffsetV += mBufferWidth / 2;
785             }
786         }
787         // submit till end of the data
788         int size = buffer.position();
789         long ptsUsec = computePresentationTime(frameCount);
790 
791         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
792         if (VERBOSE && (frameCount == 0)) {
793             printByteArray("Y ", mYBuffer.array(), 0, 20);
794             printByteArray("UV ", mUVBuffer.array(), 0, 20);
795             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
796         }
797         return size;
798     }
799 
800     /**
801      * Fills input image for encoder from YUV buffers.
802      * @return size of enqueued data.
803      */
queueInputImageEncoder( MediaCodec codec, Image image, int index, int frameCount, int flags, int runId)804     private int queueInputImageEncoder(
805             MediaCodec codec, Image image, int index, int frameCount, int flags, int runId) {
806         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
807 
808 
809         Point origin = getOrigin(frameCount, runId);
810 
811         // Y color first
812         CodecImage srcImage = new YUVImage(
813                 origin,
814                 mVideoWidth, mVideoHeight,
815                 mBufferWidth, mBufferHeight,
816                 isSrcSemiPlanar(),
817                 mYDirectBuffer, mUVDirectBuffer);
818 
819         CodecUtils.copyFlexYUVImage(image, srcImage);
820 
821         int size = mVideoHeight * mVideoWidth * 3 / 2;
822         long ptsUsec = computePresentationTime(frameCount);
823 
824         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
825         if (VERBOSE && (frameCount == 0)) {
826             printByteArray("Y ", mYBuffer.array(), 0, 20);
827             printByteArray("UV ", mUVBuffer.array(), 0, 20);
828             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
829         }
830         return size;
831     }
832 
833     /**
834      * Dequeue encoded data from output buffer and store for later usage.
835      */
dequeueOutputBufferEncoder( MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info)836     private void dequeueOutputBufferEncoder(
837             MediaCodec codec, ByteBuffer[] outputBuffers,
838             int index, MediaCodec.BufferInfo info) {
839         ByteBuffer output = outputBuffers[index];
840         int l = info.size;
841         ByteBuffer copied = ByteBuffer.allocate(l);
842         output.get(copied.array(), 0, l);
843         BufferInfo savedInfo = new BufferInfo();
844         savedInfo.set(0, l, info.presentationTimeUs, info.flags);
845         mEncodedOutputBuffer.addLast(Pair.create(copied, savedInfo));
846         codec.releaseOutputBuffer(index, false /* render */);
847     }
848 
849     /**
850      * run decoder benchmarking with encoded stream stored from encoding phase
851      * @param decoderName decoder name
852      * @param format format of media to decode
853      * @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels
854      */
runDecoder(String decoderName, MediaFormat format, int runId)855     private RunResult runDecoder(String decoderName, MediaFormat format, int runId) {
856         MediaCodec codec = null;
857         try {
858             codec = MediaCodec.createByCodecName(decoderName);
859         } catch (IOException | NullPointerException e) {
860             Log.i(TAG, "could not find decoder for " + format);
861             return null;
862         }
863         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
864         codec.start();
865         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
866 
867         double totalErrorSquared = 0;
868 
869         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
870         boolean sawOutputEOS = false;
871         int inputLeft = mEncodedOutputBuffer.size();
872         int inputBufferCount = 0;
873         int outFrameCount = 0;
874         YUVValue expected = new YUVValue();
875         YUVValue decoded = new YUVValue();
876         long lastOutputTimeUs = 0;
877         long start = System.currentTimeMillis();
878         while (!sawOutputEOS) {
879             if (inputLeft > 0) {
880                 int inputBufIndex = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US);
881 
882                 if (inputBufIndex >= 0) {
883                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
884                     dstBuf.clear();
885                     ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount).first;
886                     BufferInfo srcInfo = mEncodedOutputBuffer.get(inputBufferCount).second;
887                     int writeSize = src.capacity();
888                     dstBuf.put(src.array(), 0, writeSize);
889 
890                     int flags = srcInfo.flags;
891                     if ((System.currentTimeMillis() - start) > mTestConfig.mMaxTimeMs) {
892                         flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
893                     }
894 
895                     codec.queueInputBuffer(
896                             inputBufIndex,
897                             0 /* offset */,
898                             writeSize,
899                             srcInfo.presentationTimeUs,
900                             flags);
901                     inputLeft --;
902                     inputBufferCount ++;
903                 }
904             }
905 
906             int res = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US);
907             if (res >= 0) {
908                 int outputBufIndex = res;
909 
910                 // only do YUV compare on EOS frame if the buffer size is none-zero
911                 if (info.size > 0) {
912                     long nowUs = (System.nanoTime() + 500) / 1000;
913                     int pos = outFrameCount - 1;
914                     if (pos >= 0 && pos < mDecoderFrameTimeUsDiff[mCurrentTestRound].length) {
915                         mDecoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs;
916                     }
917                     lastOutputTimeUs = nowUs;
918 
919                     if (mTestConfig.mTestPixels) {
920                         Point origin = getOrigin(computeFrameIndex(info.presentationTimeUs), runId);
921                         int i;
922 
923                         // if decoder supports planar or semiplanar, check output with
924                         // ByteBuffer & Image each on half of the points
925                         int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
926                         if (!isDstFlexYUV()) {
927                             pixelCheckPerFrame /= 2;
928                             ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
929                             if (VERBOSE && (outFrameCount == 0)) {
930                                 printByteBuffer("Y ", buf, 0, 20);
931                                 printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
932                                 printByteBuffer("UV ", buf,
933                                         mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
934                             }
935                             for (i = 0; i < pixelCheckPerFrame; i++) {
936                                 int w = mRandom.nextInt(mVideoWidth);
937                                 int h = mRandom.nextInt(mVideoHeight);
938                                 getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
939                                 getPixelValuesFromOutputBuffer(buf, w, h, decoded);
940                                 if (VERBOSE) {
941                                     Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
942                                             + " expected "
943                                             + expected.mY + "," + expected.mU + "," + expected.mV
944                                             + " decoded "
945                                             + decoded.mY + "," + decoded.mU + "," + decoded.mV);
946                                 }
947                                 totalErrorSquared += expected.calcErrorSquared(decoded);
948                             }
949                         }
950 
951                         Image image = codec.getOutputImage(outputBufIndex);
952                         assertTrue(image != null);
953                         for (i = 0; i < pixelCheckPerFrame; i++) {
954                             int w = mRandom.nextInt(mVideoWidth);
955                             int h = mRandom.nextInt(mVideoHeight);
956                             getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
957                             getPixelValuesFromImage(image, w, h, decoded);
958                             if (VERBOSE) {
959                                 Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
960                                         + " expcted "
961                                         + expected.mY + "," + expected.mU + "," + expected.mV
962                                         + " decoded "
963                                         + decoded.mY + "," + decoded.mU + "," + decoded.mV);
964                             }
965                             totalErrorSquared += expected.calcErrorSquared(decoded);
966                         }
967                     }
968                     outFrameCount++;
969                 }
970                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
971                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
972                     Log.d(TAG, "saw output EOS.");
973                     sawOutputEOS = true;
974                 }
975             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
976                 mDecOutputFormat = codec.getOutputFormat();
977                 Log.d(TAG, "output format has changed to " + mDecOutputFormat);
978                 int colorFormat = mDecOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
979                 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar
980                         || colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
981                     mDstColorFormat = colorFormat;
982                 } else {
983                     mDstColorFormat = CodecCapabilities.COLOR_FormatYUV420Flexible;
984                     Log.w(TAG, "output format changed to unsupported one " +
985                             Integer.toHexString(colorFormat) + ", using FlexYUV");
986                 }
987                 mVideoStride = mDecOutputFormat.containsKey(MediaFormat.KEY_STRIDE)
988                         ? mDecOutputFormat.getInteger(MediaFormat.KEY_STRIDE)
989                         : mDecOutputFormat.getInteger(MediaFormat.KEY_WIDTH);
990                 mVideoVStride = mDecOutputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)
991                         ? mDecOutputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT)
992                         : mDecOutputFormat.getInteger(MediaFormat.KEY_HEIGHT);
993             }
994         }
995         long finish = System.currentTimeMillis();
996         int validDataNum = Math.min(outFrameCount - 1,
997                 mDecoderFrameTimeUsDiff[mCurrentTestRound].length);
998         mDecoderFrameTimeUsDiff[mCurrentTestRound] =
999                 Arrays.copyOf(mDecoderFrameTimeUsDiff[mCurrentTestRound], validDataNum);
1000         codec.stop();
1001         codec.release();
1002         codec = null;
1003 
1004         // divide by 3 as sum is done for Y, U, V.
1005         double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3);
1006         return new RunResult(outFrameCount, (finish - start) / 1000., errorRms);
1007     }
1008 
1009     /**
1010      *  returns origin in the absolute frame for given frame count.
1011      *  The video scene is moving by moving origin per each frame.
1012      */
getOrigin(int frameCount, int runId)1013     private Point getOrigin(int frameCount, int runId) {
1014         // Translation is basically:
1015         //    x = A * sin(B * t) + C * t
1016         //    y = D * cos(E * t) + F * t
1017         //    'bouncing' in a [0, length] regions (constrained to [0, length] by mirroring at 0
1018         //    and length.)
1019         double x = (1 - Math.sin(frameCount / (7. + (runId % 2)))) * 0.1 + frameCount * 0.005;
1020         double y = (1 - Math.cos(frameCount / (10. + (runId & ~1))))
1021                 + frameCount * (0.01 + runId / 1000.);
1022 
1023         // At every 32nd or 13th frame out of 32, an additional varying offset is added to
1024         // produce a jerk.
1025         if (frameCount % 32 == 0) {
1026             x += ((frameCount % 64) / 32) + 0.3 + y;
1027         }
1028         if (frameCount % 32 == 13) {
1029             y += ((frameCount % 64) / 32) + 0.6 + x;
1030         }
1031 
1032         // constrain to region
1033         int xi = (int)((x % 2) * YUV_PLANE_ADDITIONAL_LENGTH);
1034         int yi = (int)((y % 2) * YUV_PLANE_ADDITIONAL_LENGTH);
1035         if (xi > YUV_PLANE_ADDITIONAL_LENGTH) {
1036             xi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - xi;
1037         }
1038         if (yi > YUV_PLANE_ADDITIONAL_LENGTH) {
1039             yi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - yi;
1040         }
1041         return new Point(xi, yi);
1042     }
1043 
1044     /**
1045      * initialize reference YUV plane
1046      * @param w This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
1047      *          to allow movements
1048      * @param h This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
1049      *          to allow movements
1050      * @param semiPlanarEnc
1051      * @param semiPlanarDec
1052      */
initYUVPlane(int w, int h)1053     private void initYUVPlane(int w, int h) {
1054         int bufferSizeY = w * h;
1055         mYBuffer = ByteBuffer.allocate(bufferSizeY);
1056         mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2);
1057         mYDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY);
1058         mUVDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY / 2);
1059         mBufferWidth = w;
1060         mBufferHeight = h;
1061         final byte[] yArray = mYBuffer.array();
1062         final byte[] uvArray = mUVBuffer.array();
1063         for (int i = 0; i < h; i++) {
1064             for (int j = 0; j < w; j++) {
1065                 yArray[i * w + j]  = clampY((i + j) & 0xff);
1066             }
1067         }
1068         if (isSrcSemiPlanar()) {
1069             for (int i = 0; i < h/2; i++) {
1070                 for (int j = 0; j < w/2; j++) {
1071                     uvArray[i * w + 2 * j]  = (byte) (i & 0xff);
1072                     uvArray[i * w + 2 * j + 1]  = (byte) (j & 0xff);
1073                 }
1074             }
1075         } else { // planar, U first, then V
1076             int vOffset = bufferSizeY / 4;
1077             for (int i = 0; i < h/2; i++) {
1078                 for (int j = 0; j < w/2; j++) {
1079                     uvArray[i * w/2 + j]  = (byte) (i & 0xff);
1080                     uvArray[i * w/2 + vOffset + j]  = (byte) (j & 0xff);
1081                 }
1082             }
1083         }
1084         mYDirectBuffer.put(yArray);
1085         mUVDirectBuffer.put(uvArray);
1086         mYDirectBuffer.rewind();
1087         mUVDirectBuffer.rewind();
1088     }
1089 
1090     /**
1091      * class to store pixel values in YUV
1092      *
1093      */
1094     public class YUVValue {
1095         public byte mY;
1096         public byte mU;
1097         public byte mV;
YUVValue()1098         public YUVValue() {
1099         }
1100 
equalTo(YUVValue other)1101         public boolean equalTo(YUVValue other) {
1102             return (mY == other.mY) && (mU == other.mU) && (mV == other.mV);
1103         }
1104 
calcErrorSquared(YUVValue other)1105         public double calcErrorSquared(YUVValue other) {
1106             // Java's byte is signed but here we want to calculate difference in unsigned bytes.
1107             double yDelta = (mY & 0xFF) - (other.mY & 0xFF);
1108             double uDelta = (mU & 0xFF) - (other.mU & 0xFF);
1109             double vDelta = (mV & 0xFF) - (other.mV & 0xFF);
1110             return yDelta * yDelta + uDelta * uDelta + vDelta * vDelta;
1111         }
1112     }
1113 
1114     /**
1115      * Read YUV values from given position (x,y) for given origin (originX, originY)
1116      * The whole data is already available from YBuffer and UVBuffer.
1117      * @param result pass the result via this. This is for avoiding creating / destroying too many
1118      *               instances
1119      */
getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y, YUVValue result)1120     private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y,
1121             YUVValue result) {
1122         result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x));
1123         if (isSrcSemiPlanar()) {
1124             int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2;
1125             //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index);
1126             result.mU = mUVBuffer.get(index);
1127             result.mV = mUVBuffer.get(index + 1);
1128         } else {
1129             int vOffset = mBufferWidth * mBufferHeight / 4;
1130             int index = (originY + y) / 2 * mBufferWidth / 2 + (originX + x) / 2;
1131             result.mU = mUVBuffer.get(index);
1132             result.mV = mUVBuffer.get(vOffset + index);
1133         }
1134     }
1135 
1136     /**
1137      * Read YUV pixels from decoded output buffer for give (x, y) position
1138      * Output buffer is composed of Y parts followed by U/V
1139      * @param result pass the result via this. This is for avoiding creating / destroying too many
1140      *               instances
1141      */
getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result)1142     private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) {
1143         result.mY = buffer.get(y * mVideoStride + x);
1144         if (isDstSemiPlanar()) {
1145             int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride + x / 2 * 2;
1146             //Log.d(TAG, "Decoded " + x + "," + y + "," + index);
1147             result.mU = buffer.get(index);
1148             result.mV = buffer.get(index + 1);
1149         } else {
1150             int vOffset = mVideoStride * mVideoVStride / 4;
1151             int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride / 2 + x / 2;
1152             result.mU = buffer.get(index);
1153             result.mV = buffer.get(index + vOffset);
1154         }
1155     }
1156 
getPixelValuesFromImage(Image image, int x, int y, YUVValue result)1157     private void getPixelValuesFromImage(Image image, int x, int y, YUVValue result) {
1158         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
1159 
1160         Plane[] planes = image.getPlanes();
1161         assertTrue(planes.length == 3);
1162 
1163         result.mY = getPixelFromPlane(planes[0], x, y);
1164         result.mU = getPixelFromPlane(planes[1], x / 2, y / 2);
1165         result.mV = getPixelFromPlane(planes[2], x / 2, y / 2);
1166     }
1167 
getPixelFromPlane(Plane plane, int x, int y)1168     private byte getPixelFromPlane(Plane plane, int x, int y) {
1169         ByteBuffer buf = plane.getBuffer();
1170         return buf.get(y * plane.getRowStride() + x * plane.getPixelStride());
1171     }
1172 
1173     /**
1174      * Y cannot have full range. clamp it to prevent invalid value.
1175      */
clampY(int y)1176     private byte clampY(int y) {
1177         if (y < Y_CLAMP_MIN) {
1178             y = Y_CLAMP_MIN;
1179         } else if (y > Y_CLAMP_MAX) {
1180             y = Y_CLAMP_MAX;
1181         }
1182         return (byte) (y & 0xff);
1183     }
1184 
1185     // for debugging
printByteArray(String msg, byte[] data, int offset, int len)1186     private void printByteArray(String msg, byte[] data, int offset, int len) {
1187         StringBuilder builder = new StringBuilder();
1188         builder.append(msg);
1189         builder.append(":");
1190         for (int i = offset; i < offset + len; i++) {
1191             builder.append(Integer.toHexString(data[i]));
1192             builder.append(",");
1193         }
1194         builder.deleteCharAt(builder.length() - 1);
1195         Log.i(TAG, builder.toString());
1196     }
1197 
1198     // for debugging
printByteBuffer(String msg, ByteBuffer data, int offset, int len)1199     private void printByteBuffer(String msg, ByteBuffer data, int offset, int len) {
1200         StringBuilder builder = new StringBuilder();
1201         builder.append(msg);
1202         builder.append(":");
1203         for (int i = offset; i < offset + len; i++) {
1204             builder.append(Integer.toHexString(data.get(i)));
1205             builder.append(",");
1206         }
1207         builder.deleteCharAt(builder.length() - 1);
1208         Log.i(TAG, builder.toString());
1209     }
1210 
1211     /**
1212      * Generates the presentation time for frame N, in microseconds.
1213      */
computePresentationTime(int frameIndex)1214     private long computePresentationTime(int frameIndex) {
1215         return TIMESTAMP_OFFSET + frameIndex * 1000000L / mFrameRate;
1216     }
1217 
1218     /**
1219      * Generates the frameIndex from presentation time
1220      */
computeFrameIndex(long ptsUsec)1221     private int computeFrameIndex(long ptsUsec) {
1222         assertTrue("value for PtsUsec too low: " + ptsUsec, ptsUsec >= TIMESTAMP_OFFSET);
1223         return (int) ((ptsUsec - TIMESTAMP_OFFSET) * mFrameRate / 1000000.0 + 0.5);
1224     }
1225 
1226 }
1227