1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.content.Context; 20 import android.content.res.Resources; 21 import android.media.MediaCodec; 22 import android.media.MediaCodec.CodecException; 23 import android.media.MediaCodecInfo.CodecCapabilities; 24 import android.media.MediaCodecList; 25 import android.media.MediaCodecInfo; 26 import android.media.MediaFormat; 27 import android.os.Bundle; 28 import android.os.Environment; 29 import android.os.Looper; 30 import android.os.Handler; 31 import android.test.AndroidTestCase; 32 import android.util.Log; 33 import com.android.cts.media.R; 34 35 import java.io.File; 36 import java.io.FileInputStream; 37 import java.io.FileOutputStream; 38 import java.io.InputStream; 39 import java.nio.ByteBuffer; 40 import java.util.Locale; 41 import java.util.ArrayList; 42 import java.util.concurrent.Callable; 43 import java.util.concurrent.CountDownLatch; 44 45 /** 46 * Verification test for vp8 encoder and decoder. 47 * 48 * A raw yv12 stream is encoded at various settings and written to an IVF 49 * file. Encoded stream bitrate and key frame interval are checked against target values. 50 * The stream is later decoded by vp8 decoder to verify frames are decodable and to 51 * calculate PSNR values for various bitrates. 52 */ 53 public class Vp8CodecTestBase extends AndroidTestCase { 54 55 protected static final String TAG = "VP8CodecTestBase"; 56 protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8; 57 private static final String GOOGLE_CODEC_PREFIX = "omx.google."; 58 protected static final String SDCARD_DIR = 59 Environment.getExternalStorageDirectory().getAbsolutePath(); 60 61 // Default timeout for MediaCodec buffer dequeue - 200 ms. 62 protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000; 63 // Default timeout for MediaEncoderAsync - 30 sec. 64 protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000; 65 // Default sync frame interval in frames (zero means allow the encoder to auto-select 66 // key frame interval). 67 private static final int SYNC_FRAME_INTERVAL = 0; 68 // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h 69 protected static final int VIDEO_ControlRateVariable = 1; 70 protected static final int VIDEO_ControlRateConstant = 2; 71 // NV12 color format supported by QCOM codec, but not declared in MediaCodec - 72 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h 73 private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; 74 // Allowable color formats supported by codec - in order of preference. 75 private static final int[] mSupportedColorList = { 76 CodecCapabilities.COLOR_FormatYUV420Planar, 77 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 78 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 79 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m 80 }; 81 // Scaled image cache list - contains scale factors, for which up-scaled frames 82 // were calculated and were written to yuv file. 83 ArrayList<Integer> mScaledImages = new ArrayList<Integer>(); 84 85 private Resources mResources; 86 87 @Override setContext(Context context)88 public void setContext(Context context) { 89 super.setContext(context); 90 mResources = mContext.getResources(); 91 } 92 93 /** 94 * VP8 codec properties generated by getVp8CodecProperties() function. 95 */ 96 private class CodecProperties { CodecProperties(String codecName, int colorFormat)97 CodecProperties(String codecName, int colorFormat) { 98 this.codecName = codecName; 99 this.colorFormat = colorFormat; 100 } isGoogleCodec()101 public boolean isGoogleCodec() { 102 return codecName.toLowerCase().startsWith(GOOGLE_CODEC_PREFIX); 103 } 104 105 public final String codecName; // OpenMax component name for VP8 codec. 106 public final int colorFormat; // Color format supported by codec. 107 } 108 109 /** 110 * Function to find VP8 codec. 111 * 112 * Iterates through the list of available codecs and tries to find 113 * VPX codec, which can support either YUV420 planar or NV12 color formats. 114 * If forceGoogleCodec parameter set to true the function always returns 115 * Google VPX codec. 116 * If forceGoogleCodec parameter set to false the functions looks for platform 117 * specific VPX codec first. If no platform specific codec exist, falls back to 118 * Google VPX codec. 119 * 120 * @param isEncoder Flag if encoder is requested. 121 * @param forceGoogleCodec Forces to use Google codec. 122 */ getVpxCodecProperties( boolean isEncoder, MediaFormat format, boolean forceGoogleCodec)123 private CodecProperties getVpxCodecProperties( 124 boolean isEncoder, 125 MediaFormat format, 126 boolean forceGoogleCodec) throws Exception { 127 CodecProperties codecProperties = null; 128 String mime = format.getString(MediaFormat.KEY_MIME); 129 130 // Loop through the list of omx components in case platform specific codec 131 // is requested. 132 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 133 for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) { 134 if (isEncoder != codecInfo.isEncoder()) { 135 continue; 136 } 137 Log.v(TAG, codecInfo.getName()); 138 // TODO: remove dependence of Google from the test 139 // Check if this is Google codec - we should ignore it. 140 boolean isGoogleCodec = 141 codecInfo.getName().toLowerCase().startsWith(GOOGLE_CODEC_PREFIX); 142 if (!isGoogleCodec && forceGoogleCodec) { 143 continue; 144 } 145 146 for (String type : codecInfo.getSupportedTypes()) { 147 if (!type.equalsIgnoreCase(mime)) { 148 continue; 149 } 150 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type); 151 if (!capabilities.isFormatSupported(format)) { 152 continue; 153 } 154 155 // Get candidate codec properties. 156 Log.v(TAG, "Found candidate codec " + codecInfo.getName()); 157 for (int colorFormat: capabilities.colorFormats) { 158 Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); 159 } 160 161 // Check supported color formats. 162 for (int supportedColorFormat : mSupportedColorList) { 163 for (int codecColorFormat : capabilities.colorFormats) { 164 if (codecColorFormat == supportedColorFormat) { 165 codecProperties = new CodecProperties(codecInfo.getName(), 166 codecColorFormat); 167 Log.v(TAG, "Found target codec " + codecProperties.codecName + 168 ". Color: 0x" + Integer.toHexString(codecColorFormat)); 169 // return first HW codec found 170 if (!isGoogleCodec) { 171 return codecProperties; 172 } 173 } 174 } 175 } 176 } 177 } 178 if (codecProperties == null) { 179 Log.i(TAG, "no suitable " + (forceGoogleCodec ? "google " : "") 180 + (isEncoder ? "encoder " : "decoder ") + "found for " + format); 181 } 182 return codecProperties; 183 } 184 185 /** 186 * Parameters for encoded video stream. 187 */ 188 protected class EncoderOutputStreamParameters { 189 // Name of raw YUV420 input file. When the value of this parameter 190 // is set to null input file descriptor from inputResourceId parameter 191 // is used instead. 192 public String inputYuvFilename; 193 // Name of scaled YUV420 input file. 194 public String scaledYuvFilename; 195 // File descriptor for the raw input file (YUV420). Used only if 196 // inputYuvFilename parameter is null. 197 int inputResourceId; 198 // Name of the IVF file to write encoded bitsream 199 public String outputIvfFilename; 200 // Force to use Google VP8 encoder. 201 boolean forceGoogleEncoder; 202 // Number of frames to encode. 203 int frameCount; 204 // Frame rate of input file in frames per second. 205 int frameRate; 206 // Encoded frame width. 207 public int frameWidth; 208 // Encoded frame height. 209 public int frameHeight; 210 // Encoding bitrate array in bits/second for every frame. If array length 211 // is shorter than the total number of frames, the last value is re-used for 212 // all remaining frames. For constant bitrate encoding single element 213 // array can be used with first element set to target bitrate value. 214 public int[] bitrateSet; 215 // Encoding bitrate type - VBR or CBR 216 public int bitrateType; 217 // Number of temporal layers 218 public int temporalLayers; 219 // Desired key frame interval - codec is asked to generate key frames 220 // at a period defined by this parameter. 221 public int syncFrameInterval; 222 // Optional parameter - forced key frame interval. Used to 223 // explicitly request the codec to generate key frames using 224 // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter. 225 public int syncForceFrameInterval; 226 // Buffer timeout 227 long timeoutDequeue; 228 // Flag if encoder should run in Looper thread. 229 boolean runInLooperThread; 230 } 231 232 /** 233 * Generates an array of default parameters for encoder output stream based on 234 * upscaling value. 235 */ getDefaultEncodingParameterList( String inputYuvName, String outputIvfBaseName, int encodeSeconds, int[] resolutionScales, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int[] bitrates, boolean syncEncoding)236 protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList( 237 String inputYuvName, 238 String outputIvfBaseName, 239 int encodeSeconds, 240 int[] resolutionScales, 241 int frameWidth, 242 int frameHeight, 243 int frameRate, 244 int bitrateMode, 245 int[] bitrates, 246 boolean syncEncoding) { 247 assertTrue(resolutionScales.length == bitrates.length); 248 int numCodecs = resolutionScales.length; 249 ArrayList<EncoderOutputStreamParameters> outputParameters = 250 new ArrayList<EncoderOutputStreamParameters>(numCodecs); 251 for (int i = 0; i < numCodecs; i++) { 252 EncoderOutputStreamParameters params = new EncoderOutputStreamParameters(); 253 if (inputYuvName != null) { 254 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName; 255 } else { 256 params.inputYuvFilename = null; 257 } 258 params.scaledYuvFilename = SDCARD_DIR + File.separator + 259 outputIvfBaseName + resolutionScales[i]+ ".yuv"; 260 params.inputResourceId = R.raw.football_qvga; 261 params.outputIvfFilename = SDCARD_DIR + File.separator + 262 outputIvfBaseName + resolutionScales[i] + ".ivf"; 263 params.forceGoogleEncoder = false; 264 params.frameCount = encodeSeconds * frameRate; 265 params.frameRate = frameRate; 266 params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280); 267 params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720); 268 params.bitrateSet = new int[1]; 269 params.bitrateSet[0] = bitrates[i]; 270 params.bitrateType = bitrateMode; 271 params.temporalLayers = 0; 272 params.syncFrameInterval = SYNC_FRAME_INTERVAL; 273 params.syncForceFrameInterval = 0; 274 if (syncEncoding) { 275 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US; 276 params.runInLooperThread = false; 277 } else { 278 params.timeoutDequeue = 0; 279 params.runInLooperThread = true; 280 } 281 outputParameters.add(params); 282 } 283 return outputParameters; 284 } 285 getDefaultEncodingParameters( String inputYuvName, String outputIvfBaseName, int encodeSeconds, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int bitrate, boolean syncEncoding)286 protected EncoderOutputStreamParameters getDefaultEncodingParameters( 287 String inputYuvName, 288 String outputIvfBaseName, 289 int encodeSeconds, 290 int frameWidth, 291 int frameHeight, 292 int frameRate, 293 int bitrateMode, 294 int bitrate, 295 boolean syncEncoding) { 296 int[] scaleValues = { 1 }; 297 int[] bitrates = { bitrate }; 298 return getDefaultEncodingParameterList( 299 inputYuvName, 300 outputIvfBaseName, 301 encodeSeconds, 302 scaleValues, 303 frameWidth, 304 frameHeight, 305 frameRate, 306 bitrateMode, 307 bitrates, 308 syncEncoding).get(0); 309 } 310 311 /** 312 * Converts (interleaves) YUV420 planar to NV12. 313 * Assumes packed, macroblock-aligned frame with no cropping 314 * (visible/coded row length == stride). 315 */ YUV420ToNV(int width, int height, byte[] yuv)316 private static byte[] YUV420ToNV(int width, int height, byte[] yuv) { 317 byte[] nv = new byte[yuv.length]; 318 // Y plane we just copy. 319 System.arraycopy(yuv, 0, nv, 0, width * height); 320 321 // U & V plane we interleave. 322 int u_offset = width * height; 323 int v_offset = u_offset + u_offset / 4; 324 int nv_offset = width * height; 325 for (int i = 0; i < width * height / 4; i++) { 326 nv[nv_offset++] = yuv[u_offset++]; 327 nv[nv_offset++] = yuv[v_offset++]; 328 } 329 return nv; 330 } 331 332 /** 333 * Converts (de-interleaves) NV12 to YUV420 planar. 334 * Stride may be greater than width, slice height may be greater than height. 335 */ NV12ToYUV420(int width, int height, int stride, int sliceHeight, byte[] nv12)336 private static byte[] NV12ToYUV420(int width, int height, 337 int stride, int sliceHeight, byte[] nv12) { 338 byte[] yuv = new byte[width * height * 3 / 2]; 339 340 // Y plane we just copy. 341 for (int i = 0; i < height; i++) { 342 System.arraycopy(nv12, i * stride, yuv, i * width, width); 343 } 344 345 // U & V plane - de-interleave. 346 int u_offset = width * height; 347 int v_offset = u_offset + u_offset / 4; 348 int nv_offset; 349 for (int i = 0; i < height / 2; i++) { 350 nv_offset = stride * (sliceHeight + i); 351 for (int j = 0; j < width / 2; j++) { 352 yuv[u_offset++] = nv12[nv_offset++]; 353 yuv[v_offset++] = nv12[nv_offset++]; 354 } 355 } 356 return yuv; 357 } 358 359 /** 360 * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice 361 * height equal to the original frame width and height. 362 */ PackYUV420(int width, int height, int stride, int sliceHeight, byte[] src)363 private static byte[] PackYUV420(int width, int height, 364 int stride, int sliceHeight, byte[] src) { 365 byte[] dst = new byte[width * height * 3 / 2]; 366 // Y copy. 367 for (int i = 0; i < height; i++) { 368 System.arraycopy(src, i * stride, dst, i * width, width); 369 } 370 // U and V copy. 371 int u_src_offset = stride * sliceHeight; 372 int v_src_offset = u_src_offset + u_src_offset / 4; 373 int u_dst_offset = width * height; 374 int v_dst_offset = u_dst_offset + u_dst_offset / 4; 375 for (int i = 0; i < height / 2; i++) { 376 System.arraycopy(src, u_src_offset + i * (stride / 2), 377 dst, u_dst_offset + i * (width / 2), width / 2); 378 System.arraycopy(src, v_src_offset + i * (stride / 2), 379 dst, v_dst_offset + i * (width / 2), width / 2); 380 } 381 return dst; 382 } 383 384 imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, byte[] dst, int dstByteOffset, int dstWidth, int dstHeight)385 private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, 386 byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) { 387 for (int i = 0; i < dstHeight/2 - 1; i++) { 388 int dstOffset0 = 2 * i * dstWidth + dstByteOffset; 389 int dstOffset1 = dstOffset0 + dstWidth; 390 int srcOffset0 = i * srcStride + srcByteOffset; 391 int srcOffset1 = srcOffset0 + srcStride; 392 int pixel00 = (int)src[srcOffset0++] & 0xff; 393 int pixel10 = (int)src[srcOffset1++] & 0xff; 394 for (int j = 0; j < dstWidth/2 - 1; j++) { 395 int pixel01 = (int)src[srcOffset0++] & 0xff; 396 int pixel11 = (int)src[srcOffset1++] & 0xff; 397 dst[dstOffset0++] = (byte)pixel00; 398 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 399 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 400 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4); 401 pixel00 = pixel01; 402 pixel10 = pixel11; 403 } 404 // last column 405 dst[dstOffset0++] = (byte)pixel00; 406 dst[dstOffset0++] = (byte)pixel00; 407 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 408 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 409 } 410 411 // last row 412 int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset; 413 int dstOffset1 = dstOffset0 + dstWidth; 414 int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset; 415 int pixel00 = (int)src[srcOffset0++] & 0xff; 416 for (int j = 0; j < dstWidth/2 - 1; j++) { 417 int pixel01 = (int)src[srcOffset0++] & 0xff; 418 dst[dstOffset0++] = (byte)pixel00; 419 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 420 dst[dstOffset1++] = (byte)pixel00; 421 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2); 422 pixel00 = pixel01; 423 } 424 // the very last pixel - bottom right 425 dst[dstOffset0++] = (byte)pixel00; 426 dst[dstOffset0++] = (byte)pixel00; 427 dst[dstOffset1++] = (byte)pixel00; 428 dst[dstOffset1++] = (byte)pixel00; 429 } 430 431 /** 432 * Up-scale image. 433 * Scale factor is defined by source and destination width ratio. 434 * Only 1:2 and 1:4 up-scaling is supported for now. 435 * For 640x480 -> 1280x720 conversion only top 640x360 part of the original 436 * image is scaled. 437 */ imageScale(byte[] src, int srcWidth, int srcHeight, int dstWidth, int dstHeight)438 private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight, 439 int dstWidth, int dstHeight) throws Exception { 440 int srcYSize = srcWidth * srcHeight; 441 int dstYSize = dstWidth * dstHeight; 442 byte[] dst = null; 443 if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) { 444 // 1:2 upscale 445 dst = new byte[dstWidth * dstHeight * 3 / 2]; 446 imageUpscale1To2(src, 0, srcWidth, 447 dst, 0, dstWidth, dstHeight); // Y 448 imageUpscale1To2(src, srcYSize, srcWidth / 2, 449 dst, dstYSize, dstWidth / 2, dstHeight / 2); // U 450 imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2, 451 dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2); // V 452 } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) { 453 // 1:4 upscale - in two steps 454 int midWidth = 2 * srcWidth; 455 int midHeight = 2 * srcHeight; 456 byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight); 457 dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight); 458 459 } else { 460 throw new RuntimeException("Can not find proper scaling function"); 461 } 462 463 return dst; 464 } 465 cacheScaledImage( String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight, String dstYuvFilename, int dstFrameWidth, int dstFrameHeight)466 private void cacheScaledImage( 467 String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight, 468 String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception { 469 InputStream srcStream = OpenFileOrResourceId(srcYuvFilename, srcResourceId); 470 FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false); 471 int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2; 472 byte[] srcFrame = new byte[srcFrameSize]; 473 byte[] dstFrame = null; 474 Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename); 475 while (true) { 476 int bytesRead = srcStream.read(srcFrame); 477 if (bytesRead != srcFrame.length) { 478 break; 479 } 480 if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) { 481 dstFrame = srcFrame; 482 } else { 483 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight, 484 dstFrameWidth, dstFrameHeight); 485 } 486 dstFile.write(dstFrame); 487 } 488 srcStream.close(); 489 dstFile.close(); 490 } 491 492 493 /** 494 * A basic check if an encoded stream is decodable. 495 * 496 * The most basic confirmation we can get about a frame 497 * being properly encoded is trying to decode it. 498 * (Especially in realtime mode encode output is non- 499 * deterministic, therefore a more thorough check like 500 * md5 sum comparison wouldn't work.) 501 * 502 * Indeed, MediaCodec will raise an IllegalStateException 503 * whenever vp8 decoder fails to decode a frame, and 504 * this test uses that fact to verify the bitstream. 505 * 506 * @param inputIvfFilename The name of the IVF file containing encoded bitsream. 507 * @param outputYuvFilename The name of the output YUV file (optional). 508 * @param frameRate Frame rate of input file in frames per second 509 * @param forceGoogleDecoder Force to use Google VP8 decoder. 510 */ decode( String inputIvfFilename, String outputYuvFilename, int frameRate, boolean forceGoogleDecoder)511 protected ArrayList<MediaCodec.BufferInfo> decode( 512 String inputIvfFilename, 513 String outputYuvFilename, 514 int frameRate, 515 boolean forceGoogleDecoder) throws Exception { 516 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 517 518 // Open input/output. 519 IvfReader ivf = new IvfReader(inputIvfFilename); 520 int frameWidth = ivf.getWidth(); 521 int frameHeight = ivf.getHeight(); 522 int frameCount = ivf.getFrameCount(); 523 int frameStride = frameWidth; 524 int frameSliceHeight = frameHeight; 525 assertTrue(frameWidth > 0); 526 assertTrue(frameHeight > 0); 527 assertTrue(frameCount > 0); 528 529 // Create decoder. 530 MediaFormat format = MediaFormat.createVideoFormat( 531 VP8_MIME, ivf.getWidth(), ivf.getHeight()); 532 CodecProperties properties = getVpxCodecProperties( 533 false /* encoder */, format, forceGoogleDecoder); 534 if (properties == null) { 535 ivf.close(); 536 return null; 537 } 538 int frameColorFormat = properties.colorFormat; 539 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 540 541 FileOutputStream yuv = null; 542 if (outputYuvFilename != null) { 543 yuv = new FileOutputStream(outputYuvFilename, false); 544 } 545 546 Log.d(TAG, "Creating decoder " + properties.codecName + 547 ". Color format: 0x" + Integer.toHexString(frameColorFormat) + 548 ". " + frameWidth + " x " + frameHeight); 549 Log.d(TAG, " Format: " + format); 550 Log.d(TAG, " In: " + inputIvfFilename + ". Out:" + outputYuvFilename); 551 MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName); 552 decoder.configure(format, 553 null, // surface 554 null, // crypto 555 0); // flags 556 decoder.start(); 557 558 ByteBuffer[] inputBuffers = decoder.getInputBuffers(); 559 ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); 560 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 561 562 // decode loop 563 int inputFrameIndex = 0; 564 int outputFrameIndex = 0; 565 long inPresentationTimeUs = 0; 566 long outPresentationTimeUs = 0; 567 boolean sawOutputEOS = false; 568 boolean sawInputEOS = false; 569 570 while (!sawOutputEOS) { 571 if (!sawInputEOS) { 572 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US); 573 if (inputBufIndex >= 0) { 574 byte[] frame = ivf.readFrame(inputFrameIndex); 575 576 if (inputFrameIndex == frameCount - 1) { 577 Log.d(TAG, " Input EOS for frame # " + inputFrameIndex); 578 sawInputEOS = true; 579 } 580 581 inputBuffers[inputBufIndex].clear(); 582 inputBuffers[inputBufIndex].put(frame); 583 inputBuffers[inputBufIndex].rewind(); 584 inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate; 585 586 decoder.queueInputBuffer( 587 inputBufIndex, 588 0, // offset 589 frame.length, 590 inPresentationTimeUs, 591 sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 592 593 inputFrameIndex++; 594 } 595 } 596 597 int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 598 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 599 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 600 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 601 outputBuffers = decoder.getOutputBuffers(); 602 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 603 // Process format change 604 format = decoder.getOutputFormat(); 605 frameWidth = format.getInteger(MediaFormat.KEY_WIDTH); 606 frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 607 frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 608 Log.d(TAG, "Decoder output format change. Color: 0x" + 609 Integer.toHexString(frameColorFormat)); 610 Log.d(TAG, "Format: " + format.toString()); 611 612 // Parse frame and slice height from undocumented values 613 if (format.containsKey("stride")) { 614 frameStride = format.getInteger("stride"); 615 } else { 616 frameStride = frameWidth; 617 } 618 if (format.containsKey("slice-height")) { 619 frameSliceHeight = format.getInteger("slice-height"); 620 } else { 621 frameSliceHeight = frameHeight; 622 } 623 Log.d(TAG, "Frame stride and slice height: " + frameStride + 624 " x " + frameSliceHeight); 625 frameStride = Math.max(frameWidth, frameStride); 626 frameSliceHeight = Math.max(frameHeight, frameSliceHeight); 627 } 628 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 629 } 630 if (result >= 0) { 631 int outputBufIndex = result; 632 outPresentationTimeUs = bufferInfo.presentationTimeUs; 633 Log.v(TAG, "Writing buffer # " + outputFrameIndex + 634 ". Size: " + bufferInfo.size + 635 ". InTime: " + (inPresentationTimeUs + 500)/1000 + 636 ". OutTime: " + (outPresentationTimeUs + 500)/1000); 637 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 638 sawOutputEOS = true; 639 Log.d(TAG, " Output EOS for frame # " + outputFrameIndex); 640 } 641 642 if (bufferInfo.size > 0) { 643 // Save decoder output to yuv file. 644 if (yuv != null) { 645 byte[] frame = new byte[bufferInfo.size]; 646 outputBuffers[outputBufIndex].position(bufferInfo.offset); 647 outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size); 648 // Convert NV12 to YUV420 if necessary. 649 if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 650 frame = NV12ToYUV420(frameWidth, frameHeight, 651 frameStride, frameSliceHeight, frame); 652 } 653 int writeLength = Math.min(frameWidth * frameHeight * 3 / 2, frame.length); 654 // Pack frame if necessary. 655 if (writeLength < frame.length && 656 (frameStride > frameWidth || frameSliceHeight > frameHeight)) { 657 frame = PackYUV420(frameWidth, frameHeight, 658 frameStride, frameSliceHeight, frame); 659 } 660 yuv.write(frame, 0, writeLength); 661 } 662 outputFrameIndex++; 663 664 // Update statistics - store presentation time delay in offset 665 long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs; 666 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 667 bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size, 668 outPresentationTimeUs, bufferInfo.flags); 669 bufferInfos.add(bufferInfoCopy); 670 } 671 decoder.releaseOutputBuffer(outputBufIndex, false); 672 } 673 } 674 decoder.stop(); 675 decoder.release(); 676 ivf.close(); 677 if (yuv != null) { 678 yuv.close(); 679 } 680 681 return bufferInfos; 682 } 683 684 685 /** 686 * Helper function to return InputStream from either filename (if set) 687 * or resource id (if filename is not set). 688 */ OpenFileOrResourceId(String filename, int resourceId)689 private InputStream OpenFileOrResourceId(String filename, int resourceId) throws Exception { 690 if (filename != null) { 691 return new FileInputStream(filename); 692 } 693 return mResources.openRawResource(resourceId); 694 } 695 696 /** 697 * Results of frame encoding. 698 */ 699 protected class MediaEncoderOutput { 700 public long inPresentationTimeUs; 701 public long outPresentationTimeUs; 702 public boolean outputGenerated; 703 public int flags; 704 public byte[] buffer; 705 } 706 707 protected class MediaEncoderAsyncHelper { 708 private final EncoderOutputStreamParameters mStreamParams; 709 private final CodecProperties mProperties; 710 private final ArrayList<MediaCodec.BufferInfo> mBufferInfos; 711 private final IvfWriter mIvf; 712 private final byte[] mSrcFrame; 713 714 private InputStream mYuvStream; 715 private int mInputFrameIndex; 716 MediaEncoderAsyncHelper( EncoderOutputStreamParameters streamParams, CodecProperties properties, ArrayList<MediaCodec.BufferInfo> bufferInfos, IvfWriter ivf)717 MediaEncoderAsyncHelper( 718 EncoderOutputStreamParameters streamParams, 719 CodecProperties properties, 720 ArrayList<MediaCodec.BufferInfo> bufferInfos, 721 IvfWriter ivf) 722 throws Exception { 723 mStreamParams = streamParams; 724 mProperties = properties; 725 mBufferInfos = bufferInfos; 726 mIvf = ivf; 727 728 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 729 mSrcFrame = new byte[srcFrameSize]; 730 731 mYuvStream = OpenFileOrResourceId( 732 streamParams.inputYuvFilename, streamParams.inputResourceId); 733 } 734 getInputFrame()735 public byte[] getInputFrame() { 736 // Check EOS 737 if (mStreamParams.frameCount == 0 738 || (mStreamParams.frameCount > 0 739 && mInputFrameIndex >= mStreamParams.frameCount)) { 740 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex); 741 return null; 742 } 743 744 try { 745 int bytesRead = mYuvStream.read(mSrcFrame); 746 747 if (bytesRead == -1) { 748 // rewind to beginning of file 749 mYuvStream.close(); 750 mYuvStream = OpenFileOrResourceId( 751 mStreamParams.inputYuvFilename, mStreamParams.inputResourceId); 752 bytesRead = mYuvStream.read(mSrcFrame); 753 } 754 } catch (Exception e) { 755 Log.e(TAG, "Failed to read YUV file."); 756 return null; 757 } 758 mInputFrameIndex++; 759 760 // Convert YUV420 to NV12 if necessary 761 if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 762 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight, 763 mSrcFrame); 764 } else { 765 return mSrcFrame; 766 } 767 } 768 saveOutputFrame(MediaEncoderOutput out)769 public boolean saveOutputFrame(MediaEncoderOutput out) { 770 if (out.outputGenerated) { 771 if (out.buffer.length > 0) { 772 // Save frame 773 try { 774 mIvf.writeFrame(out.buffer, out.outPresentationTimeUs); 775 } catch (Exception e) { 776 Log.d(TAG, "Failed to write frame"); 777 return true; 778 } 779 780 // Update statistics - store presentation time delay in offset 781 long presentationTimeUsDelta = out.inPresentationTimeUs - 782 out.outPresentationTimeUs; 783 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 784 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 785 out.outPresentationTimeUs, out.flags); 786 mBufferInfos.add(bufferInfoCopy); 787 } 788 // Detect output EOS 789 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 790 Log.d(TAG, "----Output EOS "); 791 return true; 792 } 793 } 794 return false; 795 } 796 } 797 798 /** 799 * Video encoder wrapper class. 800 * Allows to run the encoder either in a callee's thread or in a looper thread 801 * using buffer dequeue ready notification callbacks. 802 * 803 * Function feedInput() is used to send raw video frame to the encoder input. When encoder 804 * is configured to run in async mode the function will run in a looper thread. 805 * Encoded frame can be retrieved by calling getOutput() function. 806 */ 807 protected class MediaEncoderAsync extends Thread { 808 private int mId; 809 private MediaCodec mCodec; 810 private MediaFormat mFormat; 811 private ByteBuffer[] mInputBuffers; 812 private ByteBuffer[] mOutputBuffers; 813 private int mInputFrameIndex; 814 private int mOutputFrameIndex; 815 private int mInputBufIndex; 816 private int mFrameRate; 817 private long mTimeout; 818 private MediaCodec.BufferInfo mBufferInfo; 819 private long mInPresentationTimeUs; 820 private long mOutPresentationTimeUs; 821 private boolean mAsync; 822 // Flag indicating if input frame was consumed by the encoder in feedInput() call. 823 private boolean mConsumedInput; 824 // Result of frame encoding returned by getOutput() call. 825 private MediaEncoderOutput mOutput; 826 // Object used to signal that looper thread has started and Handler instance associated 827 // with looper thread has been allocated. 828 private final Object mThreadEvent = new Object(); 829 // Object used to signal that MediaCodec buffer dequeue notification callback 830 // was received. 831 private final Object mCallbackEvent = new Object(); 832 private Handler mHandler; 833 private boolean mCallbackReceived; 834 private MediaEncoderAsyncHelper mHelper; 835 private final Object mCompletionEvent = new Object(); 836 private boolean mCompleted; 837 838 private MediaCodec.Callback mCallback = new MediaCodec.Callback() { 839 @Override 840 public void onInputBufferAvailable(MediaCodec codec, int index) { 841 if (mHelper == null) { 842 Log.e(TAG, "async helper not available"); 843 return; 844 } 845 846 byte[] encFrame = mHelper.getInputFrame(); 847 boolean inputEOS = (encFrame == null); 848 849 int encFrameLength = 0; 850 int flags = 0; 851 if (inputEOS) { 852 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 853 } else { 854 encFrameLength = encFrame.length; 855 856 ByteBuffer byteBuffer = mCodec.getInputBuffer(index); 857 byteBuffer.put(encFrame); 858 byteBuffer.rewind(); 859 860 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 861 862 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 863 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 864 865 mInputFrameIndex++; 866 } 867 868 mCodec.queueInputBuffer( 869 index, 870 0, // offset 871 encFrameLength, // size 872 mInPresentationTimeUs, 873 flags); 874 } 875 876 @Override 877 public void onOutputBufferAvailable(MediaCodec codec, 878 int index, MediaCodec.BufferInfo info) { 879 if (mHelper == null) { 880 Log.e(TAG, "async helper not available"); 881 return; 882 } 883 884 MediaEncoderOutput out = new MediaEncoderOutput(); 885 886 out.buffer = new byte[info.size]; 887 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index); 888 outputBuffer.get(out.buffer, 0, info.size); 889 mOutPresentationTimeUs = info.presentationTimeUs; 890 891 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 892 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 893 logStr += " CONFIG. "; 894 } 895 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 896 logStr += " KEY. "; 897 } 898 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 899 logStr += " EOS. "; 900 } 901 logStr += " Size: " + info.size; 902 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 903 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 904 Log.v(TAG, logStr); 905 906 if (mOutputFrameIndex == 0 && 907 ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) { 908 throw new RuntimeException("First frame is not a sync frame."); 909 } 910 911 if (info.size > 0) { 912 mOutputFrameIndex++; 913 out.inPresentationTimeUs = mInPresentationTimeUs; 914 out.outPresentationTimeUs = mOutPresentationTimeUs; 915 } 916 mCodec.releaseOutputBuffer(index, false); 917 918 out.flags = info.flags; 919 out.outputGenerated = true; 920 921 if (mHelper.saveOutputFrame(out)) { 922 // output EOS 923 signalCompletion(); 924 } 925 } 926 927 @Override 928 public void onError(MediaCodec codec, CodecException e) { 929 Log.e(TAG, "onError: " + e 930 + ", transient " + e.isTransient() 931 + ", recoverable " + e.isRecoverable() 932 + ", error " + e.getErrorCode()); 933 } 934 935 @Override 936 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 937 Log.i(TAG, "onOutputFormatChanged: " + format.toString()); 938 } 939 }; 940 requestStart()941 private synchronized void requestStart() throws Exception { 942 mHandler = null; 943 start(); 944 // Wait for Hander allocation 945 synchronized (mThreadEvent) { 946 while (mHandler == null) { 947 mThreadEvent.wait(); 948 } 949 } 950 } 951 setAsyncHelper(MediaEncoderAsyncHelper helper)952 public void setAsyncHelper(MediaEncoderAsyncHelper helper) { 953 mHelper = helper; 954 } 955 956 @Override run()957 public void run() { 958 Looper.prepare(); 959 synchronized (mThreadEvent) { 960 mHandler = new Handler(); 961 mThreadEvent.notify(); 962 } 963 Looper.loop(); 964 } 965 runCallable(final Callable<?> callable)966 private void runCallable(final Callable<?> callable) throws Exception { 967 if (mAsync) { 968 final Exception[] exception = new Exception[1]; 969 final CountDownLatch countDownLatch = new CountDownLatch(1); 970 mHandler.post( new Runnable() { 971 @Override 972 public void run() { 973 try { 974 callable.call(); 975 } catch (Exception e) { 976 exception[0] = e; 977 } finally { 978 countDownLatch.countDown(); 979 } 980 } 981 } ); 982 983 // Wait for task completion 984 countDownLatch.await(); 985 if (exception[0] != null) { 986 throw exception[0]; 987 } 988 } else { 989 callable.call(); 990 } 991 } 992 requestStop()993 private synchronized void requestStop() throws Exception { 994 mHandler.post( new Runnable() { 995 @Override 996 public void run() { 997 // This will run on the Looper thread 998 Log.v(TAG, "MediaEncoder looper quitting"); 999 Looper.myLooper().quitSafely(); 1000 } 1001 } ); 1002 // Wait for completion 1003 join(); 1004 mHandler = null; 1005 } 1006 createCodecInternal(final String name, final MediaFormat format, final long timeout)1007 private void createCodecInternal(final String name, 1008 final MediaFormat format, final long timeout) throws Exception { 1009 mBufferInfo = new MediaCodec.BufferInfo(); 1010 mFormat = format; 1011 mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); 1012 mTimeout = timeout; 1013 mInputFrameIndex = 0; 1014 mOutputFrameIndex = 0; 1015 mInPresentationTimeUs = 0; 1016 mOutPresentationTimeUs = 0; 1017 1018 mCodec = MediaCodec.createByCodecName(name); 1019 if (mAsync) { 1020 mCodec.setCallback(mCallback); 1021 } 1022 mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1023 mCodec.start(); 1024 1025 // get the cached input/output only in sync mode 1026 if (!mAsync) { 1027 mInputBuffers = mCodec.getInputBuffers(); 1028 mOutputBuffers = mCodec.getOutputBuffers(); 1029 } 1030 } 1031 createCodec(int id, final String name, final MediaFormat format, final long timeout, boolean async)1032 public void createCodec(int id, final String name, final MediaFormat format, 1033 final long timeout, boolean async) throws Exception { 1034 mId = id; 1035 mAsync = async; 1036 if (mAsync) { 1037 requestStart(); // start looper thread 1038 } 1039 runCallable( new Callable<Void>() { 1040 @Override 1041 public Void call() throws Exception { 1042 createCodecInternal(name, format, timeout); 1043 return null; 1044 } 1045 } ); 1046 } 1047 feedInputInternal(final byte[] encFrame, final boolean inputEOS)1048 private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) { 1049 mConsumedInput = false; 1050 // Feed input 1051 mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout); 1052 1053 if (mInputBufIndex >= 0) { 1054 mInputBuffers[mInputBufIndex].clear(); 1055 mInputBuffers[mInputBufIndex].put(encFrame); 1056 mInputBuffers[mInputBufIndex].rewind(); 1057 int encFrameLength = encFrame.length; 1058 int flags = 0; 1059 if (inputEOS) { 1060 encFrameLength = 0; 1061 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 1062 } 1063 if (!inputEOS) { 1064 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 1065 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 1066 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 1067 mInputFrameIndex++; 1068 } 1069 1070 mCodec.queueInputBuffer( 1071 mInputBufIndex, 1072 0, // offset 1073 encFrameLength, // size 1074 mInPresentationTimeUs, 1075 flags); 1076 1077 mConsumedInput = true; 1078 } else { 1079 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER"); 1080 } 1081 mCallbackReceived = false; 1082 } 1083 feedInput(final byte[] encFrame, final boolean inputEOS)1084 public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception { 1085 runCallable( new Callable<Void>() { 1086 @Override 1087 public Void call() throws Exception { 1088 feedInputInternal(encFrame, inputEOS); 1089 return null; 1090 } 1091 } ); 1092 return mConsumedInput; 1093 } 1094 getOutputInternal()1095 private void getOutputInternal() { 1096 mOutput = new MediaEncoderOutput(); 1097 mOutput.inPresentationTimeUs = mInPresentationTimeUs; 1098 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1099 mOutput.outputGenerated = false; 1100 1101 // Get output from the encoder 1102 int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1103 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 1104 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1105 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 1106 mOutputBuffers = mCodec.getOutputBuffers(); 1107 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1108 mFormat = mCodec.getOutputFormat(); 1109 Log.d(TAG, "Format changed: " + mFormat.toString()); 1110 } 1111 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1112 } 1113 if (result == MediaCodec.INFO_TRY_AGAIN_LATER) { 1114 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER"); 1115 } 1116 1117 if (result >= 0) { 1118 int outputBufIndex = result; 1119 mOutput.buffer = new byte[mBufferInfo.size]; 1120 mOutputBuffers[outputBufIndex].position(mBufferInfo.offset); 1121 mOutputBuffers[outputBufIndex].get(mOutput.buffer, 0, mBufferInfo.size); 1122 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs; 1123 1124 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 1125 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 1126 logStr += " CONFIG. "; 1127 } 1128 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1129 logStr += " KEY. "; 1130 } 1131 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1132 logStr += " EOS. "; 1133 } 1134 logStr += " Size: " + mBufferInfo.size; 1135 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 1136 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 1137 Log.v(TAG, logStr); 1138 if (mOutputFrameIndex == 0 && 1139 ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) { 1140 throw new RuntimeException("First frame is not a sync frame."); 1141 } 1142 1143 if (mBufferInfo.size > 0) { 1144 mOutputFrameIndex++; 1145 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1146 } 1147 mCodec.releaseOutputBuffer(outputBufIndex, false); 1148 1149 mOutput.flags = mBufferInfo.flags; 1150 mOutput.outputGenerated = true; 1151 } 1152 mCallbackReceived = false; 1153 } 1154 getOutput()1155 public MediaEncoderOutput getOutput() throws Exception { 1156 runCallable( new Callable<Void>() { 1157 @Override 1158 public Void call() throws Exception { 1159 getOutputInternal(); 1160 return null; 1161 } 1162 } ); 1163 return mOutput; 1164 } 1165 forceSyncFrame()1166 public void forceSyncFrame() throws Exception { 1167 final Bundle syncFrame = new Bundle(); 1168 syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 1169 runCallable( new Callable<Void>() { 1170 @Override 1171 public Void call() throws Exception { 1172 mCodec.setParameters(syncFrame); 1173 return null; 1174 } 1175 } ); 1176 } 1177 updateBitrate(int bitrate)1178 public void updateBitrate(int bitrate) throws Exception { 1179 final Bundle bitrateUpdate = new Bundle(); 1180 bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate); 1181 runCallable( new Callable<Void>() { 1182 @Override 1183 public Void call() throws Exception { 1184 mCodec.setParameters(bitrateUpdate); 1185 return null; 1186 } 1187 } ); 1188 } 1189 1190 waitForBufferEvent()1191 public void waitForBufferEvent() throws Exception { 1192 Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent"); 1193 if (mAsync) { 1194 synchronized (mCallbackEvent) { 1195 if (!mCallbackReceived) { 1196 mCallbackEvent.wait(1000); // wait 1 sec for a callback 1197 // throw an exception if callback was not received 1198 if (!mCallbackReceived) { 1199 throw new RuntimeException("MediaCodec callback was not received"); 1200 } 1201 } 1202 } 1203 } else { 1204 Thread.sleep(5); 1205 } 1206 Log.v(TAG, "----Waiting for bufferEvent done"); 1207 } 1208 1209 waitForCompletion(long timeoutMs)1210 public void waitForCompletion(long timeoutMs) throws Exception { 1211 synchronized (mCompletionEvent) { 1212 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs; 1213 1214 while (!mCompleted) { 1215 mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis()); 1216 if (System.currentTimeMillis() >= timeoutExpiredMs) { 1217 throw new RuntimeException("encoding has timed out!"); 1218 } 1219 } 1220 } 1221 } 1222 signalCompletion()1223 public void signalCompletion() { 1224 synchronized (mCompletionEvent) { 1225 mCompleted = true; 1226 mCompletionEvent.notify(); 1227 } 1228 } 1229 deleteCodec()1230 public void deleteCodec() throws Exception { 1231 runCallable( new Callable<Void>() { 1232 @Override 1233 public Void call() throws Exception { 1234 mCodec.stop(); 1235 mCodec.release(); 1236 return null; 1237 } 1238 } ); 1239 if (mAsync) { 1240 requestStop(); // Stop looper thread 1241 } 1242 } 1243 } 1244 1245 /** 1246 * Vp8 encoding loop supporting encoding single streams with an option 1247 * to run in a looper thread and use buffer ready notification callbacks. 1248 * 1249 * Output stream is described by encodingParams parameters. 1250 * 1251 * MediaCodec will raise an IllegalStateException 1252 * whenever vp8 encoder fails to encode a frame. 1253 * 1254 * Color format of input file should be YUV420, and frameWidth, 1255 * frameHeight should be supplied correctly as raw input file doesn't 1256 * include any header data. 1257 * 1258 * @param streamParams Structure with encoder parameters 1259 * @return Returns array of encoded frames information for each frame. 1260 */ encode( EncoderOutputStreamParameters streamParams)1261 protected ArrayList<MediaCodec.BufferInfo> encode( 1262 EncoderOutputStreamParameters streamParams) throws Exception { 1263 1264 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1265 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1266 streamParams.frameHeight); 1267 int bitrate = streamParams.bitrateSet[0]; 1268 1269 // Create minimal media format signifying desired output. 1270 MediaFormat format = MediaFormat.createVideoFormat( 1271 VP8_MIME, streamParams.frameWidth, streamParams.frameHeight); 1272 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1273 CodecProperties properties = getVpxCodecProperties( 1274 true, format, streamParams.forceGoogleEncoder); 1275 if (properties == null) { 1276 return null; 1277 } 1278 1279 // Open input/output 1280 InputStream yuvStream = OpenFileOrResourceId( 1281 streamParams.inputYuvFilename, streamParams.inputResourceId); 1282 IvfWriter ivf = new IvfWriter( 1283 streamParams.outputIvfFilename, streamParams.frameWidth, streamParams.frameHeight); 1284 1285 // Create a media format signifying desired output. 1286 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1287 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1288 } 1289 if (streamParams.temporalLayers > 0) { 1290 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1291 } 1292 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1293 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1294 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1295 streamParams.frameRate; 1296 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1297 1298 // Create encoder 1299 Log.d(TAG, "Creating encoder " + properties.codecName + 1300 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1301 streamParams.frameWidth + " x " + streamParams.frameHeight + 1302 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1303 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1304 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1305 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1306 Log.d(TAG, " Format: " + format); 1307 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1308 MediaEncoderAsync codec = new MediaEncoderAsync(); 1309 codec.createCodec(0, properties.codecName, format, 1310 streamParams.timeoutDequeue, streamParams.runInLooperThread); 1311 1312 // encode loop 1313 boolean sawInputEOS = false; // no more data 1314 boolean consumedInputEOS = false; // EOS flag is consumed dy encoder 1315 boolean sawOutputEOS = false; 1316 boolean inputConsumed = true; 1317 int inputFrameIndex = 0; 1318 int lastBitrate = bitrate; 1319 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 1320 byte[] srcFrame = new byte[srcFrameSize]; 1321 1322 while (!sawOutputEOS) { 1323 1324 // Read and feed input frame 1325 if (!consumedInputEOS) { 1326 1327 // Read new input buffers - if previous input was consumed and no EOS 1328 if (inputConsumed && !sawInputEOS) { 1329 int bytesRead = yuvStream.read(srcFrame); 1330 1331 // Check EOS 1332 if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) { 1333 sawInputEOS = true; 1334 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1335 } 1336 1337 if (!sawInputEOS && bytesRead == -1) { 1338 if (streamParams.frameCount == 0) { 1339 sawInputEOS = true; 1340 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1341 } else { 1342 yuvStream.close(); 1343 yuvStream = OpenFileOrResourceId( 1344 streamParams.inputYuvFilename, streamParams.inputResourceId); 1345 bytesRead = yuvStream.read(srcFrame); 1346 } 1347 } 1348 1349 // Force sync frame if syncForceFrameinterval is set. 1350 if (!sawInputEOS && inputFrameIndex > 0 && 1351 streamParams.syncForceFrameInterval > 0 && 1352 (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) { 1353 Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex); 1354 codec.forceSyncFrame(); 1355 } 1356 1357 // Dynamic bitrate change. 1358 if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) { 1359 int newBitrate = streamParams.bitrateSet[inputFrameIndex]; 1360 if (newBitrate != lastBitrate) { 1361 Log.d(TAG, "--- Requesting new bitrate " + newBitrate + 1362 " for frame " + inputFrameIndex); 1363 codec.updateBitrate(newBitrate); 1364 lastBitrate = newBitrate; 1365 } 1366 } 1367 1368 // Convert YUV420 to NV12 if necessary 1369 if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 1370 srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight, 1371 srcFrame); 1372 } 1373 } 1374 1375 inputConsumed = codec.feedInput(srcFrame, sawInputEOS); 1376 if (inputConsumed) { 1377 inputFrameIndex++; 1378 consumedInputEOS = sawInputEOS; 1379 } 1380 } 1381 1382 // Get output from the encoder 1383 MediaEncoderOutput out = codec.getOutput(); 1384 if (out.outputGenerated) { 1385 // Detect output EOS 1386 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1387 Log.d(TAG, "----Output EOS "); 1388 sawOutputEOS = true; 1389 } 1390 1391 if (out.buffer.length > 0) { 1392 // Save frame 1393 ivf.writeFrame(out.buffer, out.outPresentationTimeUs); 1394 1395 // Update statistics - store presentation time delay in offset 1396 long presentationTimeUsDelta = out.inPresentationTimeUs - 1397 out.outPresentationTimeUs; 1398 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1399 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1400 out.outPresentationTimeUs, out.flags); 1401 bufferInfos.add(bufferInfoCopy); 1402 } 1403 } 1404 1405 // If codec is not ready to accept input/poutput - wait for buffer ready callback 1406 if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) { 1407 codec.waitForBufferEvent(); 1408 } 1409 } 1410 1411 codec.deleteCodec(); 1412 ivf.close(); 1413 yuvStream.close(); 1414 1415 return bufferInfos; 1416 } 1417 1418 /** 1419 * Vp8 encoding run in a looper thread and use buffer ready callbacks. 1420 * 1421 * Output stream is described by encodingParams parameters. 1422 * 1423 * MediaCodec will raise an IllegalStateException 1424 * whenever vp8 encoder fails to encode a frame. 1425 * 1426 * Color format of input file should be YUV420, and frameWidth, 1427 * frameHeight should be supplied correctly as raw input file doesn't 1428 * include any header data. 1429 * 1430 * @param streamParams Structure with encoder parameters 1431 * @return Returns array of encoded frames information for each frame. 1432 */ encodeAsync( EncoderOutputStreamParameters streamParams)1433 protected ArrayList<MediaCodec.BufferInfo> encodeAsync( 1434 EncoderOutputStreamParameters streamParams) throws Exception { 1435 if (!streamParams.runInLooperThread) { 1436 throw new RuntimeException("encodeAsync should run with a looper thread!"); 1437 } 1438 1439 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1440 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1441 streamParams.frameHeight); 1442 int bitrate = streamParams.bitrateSet[0]; 1443 1444 // Create minimal media format signifying desired output. 1445 MediaFormat format = MediaFormat.createVideoFormat( 1446 VP8_MIME, streamParams.frameWidth, streamParams.frameHeight); 1447 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1448 CodecProperties properties = getVpxCodecProperties( 1449 true, format, streamParams.forceGoogleEncoder); 1450 if (properties == null) { 1451 return null; 1452 } 1453 1454 // Open input/output 1455 IvfWriter ivf = new IvfWriter( 1456 streamParams.outputIvfFilename, streamParams.frameWidth, streamParams.frameHeight); 1457 1458 // Create a media format signifying desired output. 1459 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1460 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1461 } 1462 if (streamParams.temporalLayers > 0) { 1463 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1464 } 1465 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1466 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1467 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1468 streamParams.frameRate; 1469 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1470 1471 // Create encoder 1472 Log.d(TAG, "Creating encoder " + properties.codecName + 1473 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1474 streamParams.frameWidth + " x " + streamParams.frameHeight + 1475 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1476 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1477 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1478 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1479 Log.d(TAG, " Format: " + format); 1480 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1481 1482 MediaEncoderAsync codec = new MediaEncoderAsync(); 1483 MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper( 1484 streamParams, properties, bufferInfos, ivf); 1485 1486 codec.setAsyncHelper(helper); 1487 codec.createCodec(0, properties.codecName, format, 1488 streamParams.timeoutDequeue, streamParams.runInLooperThread); 1489 codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS); 1490 1491 codec.deleteCodec(); 1492 ivf.close(); 1493 1494 return bufferInfos; 1495 } 1496 1497 /** 1498 * Vp8 encoding loop supporting encoding multiple streams at a time. 1499 * Each output stream is described by encodingParams parameters allowing 1500 * simultaneous encoding of various resolutions, bitrates with an option to 1501 * control key frame and dynamic bitrate for each output stream indepandently. 1502 * 1503 * MediaCodec will raise an IllegalStateException 1504 * whenever vp8 encoder fails to encode a frame. 1505 * 1506 * Color format of input file should be YUV420, and frameWidth, 1507 * frameHeight should be supplied correctly as raw input file doesn't 1508 * include any header data. 1509 * 1510 * @param srcFrameWidth Frame width of input yuv file 1511 * @param srcFrameHeight Frame height of input yuv file 1512 * @param encodingParams Encoder parameters 1513 * @return Returns 2D array of encoded frames information for each stream and 1514 * for each frame. 1515 */ encodeSimulcast( int srcFrameWidth, int srcFrameHeight, ArrayList<EncoderOutputStreamParameters> encodingParams)1516 protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast( 1517 int srcFrameWidth, 1518 int srcFrameHeight, 1519 ArrayList<EncoderOutputStreamParameters> encodingParams) throws Exception { 1520 int numEncoders = encodingParams.size(); 1521 1522 // Create arrays of input/output, formats, bitrates etc 1523 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos = 1524 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders); 1525 InputStream yuvStream[] = new InputStream[numEncoders]; 1526 IvfWriter[] ivf = new IvfWriter[numEncoders]; 1527 FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders]; 1528 MediaFormat[] format = new MediaFormat[numEncoders]; 1529 MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders]; 1530 int[] inputFrameIndex = new int[numEncoders]; 1531 boolean[] sawInputEOS = new boolean[numEncoders]; 1532 boolean[] consumedInputEOS = new boolean[numEncoders]; 1533 boolean[] inputConsumed = new boolean[numEncoders]; 1534 boolean[] bufferConsumed = new boolean[numEncoders]; 1535 boolean[] sawOutputEOS = new boolean[numEncoders]; 1536 byte[][] srcFrame = new byte[numEncoders][]; 1537 boolean sawOutputEOSTotal = false; 1538 boolean bufferConsumedTotal = false; 1539 CodecProperties[] codecProperties = new CodecProperties[numEncoders]; 1540 1541 numEncoders = 0; 1542 for (EncoderOutputStreamParameters params : encodingParams) { 1543 int i = numEncoders; 1544 Log.d(TAG, "Source resolution: " + params.frameWidth + " x " + 1545 params.frameHeight); 1546 int bitrate = params.bitrateSet[0]; 1547 1548 // Create minimal media format signifying desired output. 1549 format[i] = MediaFormat.createVideoFormat(VP8_MIME, 1550 params.frameWidth, params.frameHeight); 1551 format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1552 CodecProperties properties = getVpxCodecProperties( 1553 true, format[i], params.forceGoogleEncoder); 1554 if (properties == null) { 1555 continue; 1556 } 1557 1558 // Check if scaled image was created 1559 int scale = params.frameWidth / srcFrameWidth; 1560 if (!mScaledImages.contains(scale)) { 1561 // resize image 1562 cacheScaledImage(params.inputYuvFilename, params.inputResourceId, 1563 srcFrameWidth, srcFrameHeight, 1564 params.scaledYuvFilename, params.frameWidth, params.frameHeight); 1565 mScaledImages.add(scale); 1566 } 1567 1568 // Create buffer info storage 1569 bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>()); 1570 1571 // Create YUV reader 1572 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1573 1574 // Create IVF writer 1575 ivf[i] = new IvfWriter(params.outputIvfFilename, params.frameWidth, params.frameHeight); 1576 1577 // Frame buffer 1578 int frameSize = params.frameWidth * params.frameHeight * 3 / 2; 1579 srcFrame[i] = new byte[frameSize]; 1580 1581 // Create a media format signifying desired output. 1582 if (params.bitrateType == VIDEO_ControlRateConstant) { 1583 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1584 } 1585 if (params.temporalLayers > 0) { 1586 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer 1587 } 1588 format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1589 format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate); 1590 int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) / 1591 params.frameRate; // in sec 1592 format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1593 // Create encoder 1594 Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName + 1595 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1596 params.frameWidth + " x " + params.frameHeight + 1597 ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType + 1598 ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers + 1599 ". Key frame:" + syncFrameInterval * params.frameRate + 1600 ". Force keyFrame: " + params.syncForceFrameInterval); 1601 Log.d(TAG, " Format: " + format[i]); 1602 Log.d(TAG, " Output ivf:" + params.outputIvfFilename); 1603 1604 // Create encoder 1605 codec[i] = new MediaEncoderAsync(); 1606 codec[i].createCodec(i, properties.codecName, format[i], 1607 params.timeoutDequeue, params.runInLooperThread); 1608 codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat); 1609 1610 inputConsumed[i] = true; 1611 ++numEncoders; 1612 } 1613 if (numEncoders == 0) { 1614 Log.i(TAG, "no suitable encoders found for any of the streams"); 1615 return null; 1616 } 1617 1618 while (!sawOutputEOSTotal) { 1619 // Feed input buffer to all encoders 1620 for (int i = 0; i < numEncoders; i++) { 1621 bufferConsumed[i] = false; 1622 if (consumedInputEOS[i]) { 1623 continue; 1624 } 1625 1626 EncoderOutputStreamParameters params = encodingParams.get(i); 1627 // Read new input buffers - if previous input was consumed and no EOS 1628 if (inputConsumed[i] && !sawInputEOS[i]) { 1629 int bytesRead = yuvStream[i].read(srcFrame[i]); 1630 1631 // Check EOS 1632 if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) { 1633 sawInputEOS[i] = true; 1634 Log.d(TAG, "---Enc" + i + 1635 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1636 } 1637 1638 if (!sawInputEOS[i] && bytesRead == -1) { 1639 if (params.frameCount == 0) { 1640 sawInputEOS[i] = true; 1641 Log.d(TAG, "---Enc" + i + 1642 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1643 } else { 1644 yuvStream[i].close(); 1645 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1646 bytesRead = yuvStream[i].read(srcFrame[i]); 1647 } 1648 } 1649 1650 // Convert YUV420 to NV12 if necessary 1651 if (codecProperties[i].colorFormat != 1652 CodecCapabilities.COLOR_FormatYUV420Planar) { 1653 srcFrame[i] = 1654 YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]); 1655 } 1656 } 1657 1658 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]); 1659 if (inputConsumed[i]) { 1660 inputFrameIndex[i]++; 1661 consumedInputEOS[i] = sawInputEOS[i]; 1662 bufferConsumed[i] = true; 1663 } 1664 1665 } 1666 1667 // Get output from all encoders 1668 for (int i = 0; i < numEncoders; i++) { 1669 if (sawOutputEOS[i]) { 1670 continue; 1671 } 1672 1673 MediaEncoderOutput out = codec[i].getOutput(); 1674 if (out.outputGenerated) { 1675 bufferConsumed[i] = true; 1676 // Detect output EOS 1677 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1678 Log.d(TAG, "----Enc" + i + ". Output EOS "); 1679 sawOutputEOS[i] = true; 1680 } 1681 1682 if (out.buffer.length > 0) { 1683 // Save frame 1684 ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs); 1685 1686 // Update statistics - store presentation time delay in offset 1687 long presentationTimeUsDelta = out.inPresentationTimeUs - 1688 out.outPresentationTimeUs; 1689 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1690 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1691 out.outPresentationTimeUs, out.flags); 1692 bufferInfos.get(i).add(bufferInfoCopy); 1693 } 1694 } 1695 } 1696 1697 // If codec is not ready to accept input/output - wait for buffer ready callback 1698 bufferConsumedTotal = false; 1699 for (boolean bufferConsumedCurrent : bufferConsumed) { 1700 bufferConsumedTotal |= bufferConsumedCurrent; 1701 } 1702 if (!bufferConsumedTotal) { 1703 // Pick the encoder to wait for 1704 for (int i = 0; i < numEncoders; i++) { 1705 if (!bufferConsumed[i] && !sawOutputEOS[i]) { 1706 codec[i].waitForBufferEvent(); 1707 break; 1708 } 1709 } 1710 } 1711 1712 // Check if EOS happened for all encoders 1713 sawOutputEOSTotal = true; 1714 for (boolean sawOutputEOSStream : sawOutputEOS) { 1715 sawOutputEOSTotal &= sawOutputEOSStream; 1716 } 1717 } 1718 1719 for (int i = 0; i < numEncoders; i++) { 1720 codec[i].deleteCodec(); 1721 ivf[i].close(); 1722 yuvStream[i].close(); 1723 if (yuvScaled[i] != null) { 1724 yuvScaled[i].close(); 1725 } 1726 } 1727 1728 return bufferInfos; 1729 } 1730 1731 /** 1732 * Some encoding statistics. 1733 */ 1734 protected class Vp8EncodingStatistics { Vp8EncodingStatistics()1735 Vp8EncodingStatistics() { 1736 mBitrates = new ArrayList<Integer>(); 1737 mFrames = new ArrayList<Integer>(); 1738 mKeyFrames = new ArrayList<Integer>(); 1739 mMinimumKeyFrameInterval = Integer.MAX_VALUE; 1740 } 1741 1742 public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream. 1743 public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream. 1744 public int mAverageBitrate; // Average stream bitrate. 1745 public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream. 1746 public int mAverageKeyFrameInterval; // Average key frame interval. 1747 public int mMaximumKeyFrameInterval; // Maximum key frame interval. 1748 public int mMinimumKeyFrameInterval; // Minimum key frame interval. 1749 } 1750 1751 /** 1752 * Calculates average bitrate and key frame interval for the encoded streams. 1753 * Output mBitrates field will contain bitrate values for every second 1754 * of the encoded stream. 1755 * Average stream bitrate will be stored in mAverageBitrate field. 1756 * mKeyFrames array will contain the position of key frames in the encoded stream and 1757 * mKeyFrameInterval - average key frame interval. 1758 */ computeEncodingStatistics(int encoderId, ArrayList<MediaCodec.BufferInfo> bufferInfos )1759 protected Vp8EncodingStatistics computeEncodingStatistics(int encoderId, 1760 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1761 Vp8EncodingStatistics statistics = new Vp8EncodingStatistics(); 1762 1763 int totalSize = 0; 1764 int frames = 0; 1765 int framesPerSecond = 0; 1766 int totalFrameSizePerSecond = 0; 1767 int maxFrameSize = 0; 1768 int currentSecond; 1769 int nextSecond = 0; 1770 String keyFrameList = " IFrame List: "; 1771 String bitrateList = " Bitrate list: "; 1772 String framesList = " FPS list: "; 1773 1774 1775 for (int j = 0; j < bufferInfos.size(); j++) { 1776 MediaCodec.BufferInfo info = bufferInfos.get(j); 1777 currentSecond = (int)(info.presentationTimeUs / 1000000); 1778 boolean lastFrame = (j == bufferInfos.size() - 1); 1779 if (!lastFrame) { 1780 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000); 1781 } 1782 1783 totalSize += info.size; 1784 totalFrameSizePerSecond += info.size; 1785 maxFrameSize = Math.max(maxFrameSize, info.size); 1786 framesPerSecond++; 1787 frames++; 1788 1789 // Update the bitrate statistics if the next frame will 1790 // be for the next second 1791 if (lastFrame || nextSecond > currentSecond) { 1792 int currentBitrate = totalFrameSizePerSecond * 8; 1793 bitrateList += (currentBitrate + " "); 1794 framesList += (framesPerSecond + " "); 1795 statistics.mBitrates.add(currentBitrate); 1796 statistics.mFrames.add(framesPerSecond); 1797 totalFrameSizePerSecond = 0; 1798 framesPerSecond = 0; 1799 } 1800 1801 // Update key frame statistics. 1802 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1803 statistics.mKeyFrames.add(j); 1804 keyFrameList += (j + " "); 1805 } 1806 } 1807 int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000); 1808 duration = (duration + 500) / 1000; 1809 statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration); 1810 Log.d(TAG, "Statistics for encoder # " + encoderId); 1811 // Calculate average key frame interval in frames. 1812 int keyFrames = statistics.mKeyFrames.size(); 1813 if (keyFrames > 1) { 1814 statistics.mAverageKeyFrameInterval = 1815 statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0); 1816 statistics.mAverageKeyFrameInterval = 1817 Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1)); 1818 for (int j = 1; j < keyFrames; j++) { 1819 int keyFrameInterval = 1820 statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1); 1821 statistics.mMaximumKeyFrameInterval = 1822 Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval); 1823 statistics.mMinimumKeyFrameInterval = 1824 Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval); 1825 } 1826 Log.d(TAG, " Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval + 1827 ". Min: " + statistics.mMinimumKeyFrameInterval + 1828 ". Avg: " + statistics.mAverageKeyFrameInterval); 1829 } 1830 Log.d(TAG, " Frames: " + frames + ". Duration: " + duration + 1831 ". Total size: " + totalSize + ". Key frames: " + keyFrames); 1832 Log.d(TAG, keyFrameList); 1833 Log.d(TAG, bitrateList); 1834 Log.d(TAG, framesList); 1835 Log.d(TAG, " Bitrate average: " + statistics.mAverageBitrate); 1836 Log.d(TAG, " Maximum frame size: " + maxFrameSize); 1837 1838 return statistics; 1839 } 1840 computeEncodingStatistics( ArrayList<MediaCodec.BufferInfo> bufferInfos )1841 protected Vp8EncodingStatistics computeEncodingStatistics( 1842 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1843 return computeEncodingStatistics(0, bufferInfos); 1844 } 1845 computeSimulcastEncodingStatistics( ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos)1846 protected ArrayList<Vp8EncodingStatistics> computeSimulcastEncodingStatistics( 1847 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) { 1848 int numCodecs = bufferInfos.size(); 1849 ArrayList<Vp8EncodingStatistics> statistics = new ArrayList<Vp8EncodingStatistics>(); 1850 1851 for (int i = 0; i < numCodecs; i++) { 1852 Vp8EncodingStatistics currentStatistics = 1853 computeEncodingStatistics(i, bufferInfos.get(i)); 1854 statistics.add(currentStatistics); 1855 } 1856 return statistics; 1857 } 1858 1859 /** 1860 * Calculates maximum latency for encoder/decoder based on buffer info array 1861 * generated either by encoder or decoder. 1862 */ maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos)1863 protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) { 1864 int maxValue = 0; 1865 for (MediaCodec.BufferInfo bufferInfo : bufferInfos) { 1866 maxValue = Math.max(maxValue, bufferInfo.offset); 1867 } 1868 maxValue = (maxValue + 500) / 1000; // mcs -> ms 1869 return maxValue; 1870 } 1871 1872 /** 1873 * Decoding PSNR statistics. 1874 */ 1875 protected class Vp8DecodingStatistics { Vp8DecodingStatistics()1876 Vp8DecodingStatistics() { 1877 mMinimumPSNR = Integer.MAX_VALUE; 1878 } 1879 public double mAveragePSNR; 1880 public double mMinimumPSNR; 1881 } 1882 1883 /** 1884 * Calculates PSNR value between two video frames. 1885 */ computePSNR(byte[] data0, byte[] data1)1886 private double computePSNR(byte[] data0, byte[] data1) { 1887 long squareError = 0; 1888 assertTrue(data0.length == data1.length); 1889 int length = data0.length; 1890 for (int i = 0 ; i < length; i++) { 1891 int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff); 1892 squareError += diff * diff; 1893 } 1894 double meanSquareError = (double)squareError / length; 1895 double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError); 1896 return psnr; 1897 } 1898 1899 /** 1900 * Calculates average and minimum PSNR values between 1901 * set of reference and decoded video frames. 1902 * Runs PSNR calculation for the full duration of the decoded data. 1903 */ computeDecodingStatistics( String referenceYuvFilename, int referenceYuvRawId, String decodedYuvFilename, int width, int height)1904 protected Vp8DecodingStatistics computeDecodingStatistics( 1905 String referenceYuvFilename, 1906 int referenceYuvRawId, 1907 String decodedYuvFilename, 1908 int width, 1909 int height) throws Exception { 1910 Vp8DecodingStatistics statistics = new Vp8DecodingStatistics(); 1911 InputStream referenceStream = 1912 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId); 1913 InputStream decodedStream = new FileInputStream(decodedYuvFilename); 1914 1915 int ySize = width * height; 1916 int uvSize = width * height / 4; 1917 byte[] yRef = new byte[ySize]; 1918 byte[] yDec = new byte[ySize]; 1919 byte[] uvRef = new byte[uvSize]; 1920 byte[] uvDec = new byte[uvSize]; 1921 1922 int frames = 0; 1923 double averageYPSNR = 0; 1924 double averageUPSNR = 0; 1925 double averageVPSNR = 0; 1926 double minimumYPSNR = Integer.MAX_VALUE; 1927 double minimumUPSNR = Integer.MAX_VALUE; 1928 double minimumVPSNR = Integer.MAX_VALUE; 1929 int minimumPSNRFrameIndex = 0; 1930 1931 while (true) { 1932 // Calculate Y PSNR. 1933 int bytesReadRef = referenceStream.read(yRef); 1934 int bytesReadDec = decodedStream.read(yDec); 1935 if (bytesReadDec == -1) { 1936 break; 1937 } 1938 if (bytesReadRef == -1) { 1939 // Reference file wrapping up 1940 referenceStream.close(); 1941 referenceStream = 1942 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId); 1943 bytesReadRef = referenceStream.read(yRef); 1944 } 1945 double curYPSNR = computePSNR(yRef, yDec); 1946 averageYPSNR += curYPSNR; 1947 minimumYPSNR = Math.min(minimumYPSNR, curYPSNR); 1948 double curMinimumPSNR = curYPSNR; 1949 1950 // Calculate U PSNR. 1951 bytesReadRef = referenceStream.read(uvRef); 1952 bytesReadDec = decodedStream.read(uvDec); 1953 double curUPSNR = computePSNR(uvRef, uvDec); 1954 averageUPSNR += curUPSNR; 1955 minimumUPSNR = Math.min(minimumUPSNR, curUPSNR); 1956 curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR); 1957 1958 // Calculate V PSNR. 1959 bytesReadRef = referenceStream.read(uvRef); 1960 bytesReadDec = decodedStream.read(uvDec); 1961 double curVPSNR = computePSNR(uvRef, uvDec); 1962 averageVPSNR += curVPSNR; 1963 minimumVPSNR = Math.min(minimumVPSNR, curVPSNR); 1964 curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR); 1965 1966 // Frame index for minimum PSNR value - help to detect possible distortions 1967 if (curMinimumPSNR < statistics.mMinimumPSNR) { 1968 statistics.mMinimumPSNR = curMinimumPSNR; 1969 minimumPSNRFrameIndex = frames; 1970 } 1971 1972 String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f", 1973 frames, curYPSNR, curUPSNR, curVPSNR); 1974 Log.v(TAG, logStr); 1975 1976 frames++; 1977 } 1978 1979 averageYPSNR /= frames; 1980 averageUPSNR /= frames; 1981 averageVPSNR /= frames; 1982 statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6; 1983 1984 Log.d(TAG, "PSNR statistics for " + frames + " frames."); 1985 String logStr = String.format(Locale.US, 1986 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f", 1987 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR); 1988 Log.d(TAG, logStr); 1989 logStr = String.format(Locale.US, 1990 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d", 1991 minimumYPSNR, minimumUPSNR, minimumVPSNR, 1992 statistics.mMinimumPSNR, minimumPSNRFrameIndex); 1993 Log.d(TAG, logStr); 1994 1995 referenceStream.close(); 1996 decodedStream.close(); 1997 return statistics; 1998 } 1999 } 2000 2001