1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.content.Context; 20 import android.content.res.Resources; 21 import android.media.MediaCodec; 22 import android.media.MediaCodec.CodecException; 23 import android.media.MediaCodecInfo.CodecCapabilities; 24 import android.media.MediaCodecList; 25 import android.media.MediaCodecInfo; 26 import android.media.MediaFormat; 27 import android.os.Bundle; 28 import android.os.Environment; 29 import android.os.Looper; 30 import android.os.Handler; 31 import android.platform.test.annotations.AppModeFull; 32 import android.test.AndroidTestCase; 33 import android.util.Log; 34 35 import com.android.compatibility.common.util.MediaUtils; 36 37 import java.io.File; 38 import java.io.FileInputStream; 39 import java.io.FileOutputStream; 40 import java.io.InputStream; 41 import java.nio.ByteBuffer; 42 import java.util.ArrayList; 43 import java.util.Locale; 44 import java.util.concurrent.Callable; 45 import java.util.concurrent.CountDownLatch; 46 47 /** 48 * Verification test for video encoder and decoder. 49 * 50 * A raw yv12 stream is encoded at various settings and written to an IVF 51 * file. Encoded stream bitrate and key frame interval are checked against target values. 52 * The stream is later decoded by the decoder to verify frames are decodable and to 53 * calculate PSNR values for various bitrates. 54 */ 55 @AppModeFull(reason = "Instant apps cannot access the SD card") 56 public class VideoCodecTestBase extends AndroidTestCase { 57 58 protected static final String TAG = "VideoCodecTestBase"; 59 protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8; 60 protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9; 61 protected static final String AVC_MIME = MediaFormat.MIMETYPE_VIDEO_AVC; 62 protected static final String HEVC_MIME = MediaFormat.MIMETYPE_VIDEO_HEVC; 63 protected static final String SDCARD_DIR = 64 Environment.getExternalStorageDirectory().getAbsolutePath(); 65 static final String mInpPrefix = WorkDir.getMediaDirString(); 66 67 // Default timeout for MediaCodec buffer dequeue - 200 ms. 68 protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000; 69 // Default timeout for MediaEncoderAsync - 30 sec. 70 protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000; 71 // Default sync frame interval in frames 72 private static final int SYNC_FRAME_INTERVAL = 30; 73 // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h 74 protected static final int VIDEO_ControlRateVariable = 1; 75 protected static final int VIDEO_ControlRateConstant = 2; 76 // NV12 color format supported by QCOM codec, but not declared in MediaCodec - 77 // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h 78 private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04; 79 // Allowable color formats supported by codec - in order of preference. 80 private static final int[] mSupportedColorList = { 81 CodecCapabilities.COLOR_FormatYUV420Planar, 82 CodecCapabilities.COLOR_FormatYUV420SemiPlanar, 83 CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, 84 COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m 85 }; 86 // Scaled image cache list - contains scale factors, for which up-scaled frames 87 // were calculated and were written to yuv file. 88 ArrayList<Integer> mScaledImages = new ArrayList<Integer>(); 89 90 private Resources mResources; 91 92 @Override setContext(Context context)93 public void setContext(Context context) { 94 super.setContext(context); 95 mResources = mContext.getResources(); 96 } 97 98 /** 99 * Video codec properties generated by getVideoCodecProperties() function. 100 */ 101 private class CodecProperties { CodecProperties(String codecName, int colorFormat)102 CodecProperties(String codecName, int colorFormat) { 103 this.codecName = codecName; 104 this.colorFormat = colorFormat; 105 } 106 public final String codecName; // OpenMax component name for Video codec. 107 public final int colorFormat; // Color format supported by codec. 108 } 109 110 /** 111 * Function to find Video codec. 112 * 113 * Iterates through the list of available codecs and tries to find 114 * Video codec, which can support either YUV420 planar or NV12 color formats. 115 * If forceGoogleCodec parameter set to true the function always returns 116 * Google Video codec. 117 * If forceGoogleCodec parameter set to false the functions looks for platform 118 * specific Video codec first. If no platform specific codec exist, falls back to 119 * Google Video codec. 120 * 121 * @param isEncoder Flag if encoder is requested. 122 * @param forceGoogleCodec Forces to use Google codec. 123 */ getVideoCodecProperties( boolean isEncoder, MediaFormat format, boolean forceGoogleCodec)124 private CodecProperties getVideoCodecProperties( 125 boolean isEncoder, 126 MediaFormat format, 127 boolean forceGoogleCodec) throws Exception { 128 CodecProperties codecProperties = null; 129 String mime = format.getString(MediaFormat.KEY_MIME); 130 131 // Loop through the list of codec components in case platform specific codec 132 // is requested. 133 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 134 for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) { 135 if (isEncoder != codecInfo.isEncoder()) { 136 continue; 137 } 138 Log.v(TAG, codecInfo.getName()); 139 // TODO: remove dependence of Google from the test 140 // Check if this is Google codec - we should ignore it. 141 if (codecInfo.isVendor() && forceGoogleCodec) { 142 continue; 143 } 144 145 for (String type : codecInfo.getSupportedTypes()) { 146 if (!type.equalsIgnoreCase(mime)) { 147 continue; 148 } 149 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type); 150 if (!capabilities.isFormatSupported(format)) { 151 continue; 152 } 153 154 // Get candidate codec properties. 155 Log.v(TAG, "Found candidate codec " + codecInfo.getName()); 156 for (int colorFormat: capabilities.colorFormats) { 157 Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); 158 } 159 160 // Check supported color formats. 161 for (int supportedColorFormat : mSupportedColorList) { 162 for (int codecColorFormat : capabilities.colorFormats) { 163 if (codecColorFormat == supportedColorFormat) { 164 codecProperties = new CodecProperties(codecInfo.getName(), 165 codecColorFormat); 166 Log.v(TAG, "Found target codec " + codecProperties.codecName + 167 ". Color: 0x" + Integer.toHexString(codecColorFormat)); 168 // return first vendor codec (hopefully HW) found 169 if (codecInfo.isVendor()) { 170 return codecProperties; 171 } 172 } 173 } 174 } 175 } 176 } 177 if (codecProperties == null) { 178 Log.i(TAG, "no suitable " + (forceGoogleCodec ? "google " : "") 179 + (isEncoder ? "encoder " : "decoder ") + "found for " + format); 180 } 181 return codecProperties; 182 } 183 184 /** 185 * Parameters for encoded video stream. 186 */ 187 protected class EncoderOutputStreamParameters { 188 // Name of raw YUV420 input file. When the value of this parameter 189 // is set to null input file descriptor from inputResource parameter 190 // is used instead. 191 public String inputYuvFilename; 192 // Name of scaled YUV420 input file. 193 public String scaledYuvFilename; 194 // File descriptor for the raw input file (YUV420). Used only if 195 // inputYuvFilename parameter is null. 196 public String inputResource; 197 // Name of the IVF file to write encoded bitsream 198 public String outputIvfFilename; 199 // Mime Type of the Encoded content. 200 public String codecMimeType; 201 // Force to use Google Video encoder. 202 boolean forceGoogleEncoder; 203 // Number of frames to encode. 204 int frameCount; 205 // Frame rate of input file in frames per second. 206 int frameRate; 207 // Encoded frame width. 208 public int frameWidth; 209 // Encoded frame height. 210 public int frameHeight; 211 // Encoding bitrate array in bits/second for every frame. If array length 212 // is shorter than the total number of frames, the last value is re-used for 213 // all remaining frames. For constant bitrate encoding single element 214 // array can be used with first element set to target bitrate value. 215 public int[] bitrateSet; 216 // Encoding bitrate type - VBR or CBR 217 public int bitrateType; 218 // Number of temporal layers 219 public int temporalLayers; 220 // Desired key frame interval - codec is asked to generate key frames 221 // at a period defined by this parameter. 222 public int syncFrameInterval; 223 // Optional parameter - forced key frame interval. Used to 224 // explicitly request the codec to generate key frames using 225 // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter. 226 public int syncForceFrameInterval; 227 // Buffer timeout 228 long timeoutDequeue; 229 // Flag if encoder should run in Looper thread. 230 boolean runInLooperThread; 231 // Flag if use NdkMediaCodec 232 boolean useNdk; 233 } 234 getCodecSuffix(String codecMimeType)235 private String getCodecSuffix(String codecMimeType) { 236 switch(codecMimeType) { 237 case VP8_MIME: 238 return "vp8"; 239 case VP9_MIME: 240 return "vp9"; 241 case AVC_MIME: 242 return "avc"; 243 case HEVC_MIME: 244 return "hevc"; 245 default: 246 Log.w(TAG, "getCodecSuffix got an unexpected codecMimeType."); 247 } 248 return "video"; 249 } 250 251 /** 252 * Generates an array of default parameters for encoder output stream based on 253 * upscaling value. 254 */ getDefaultEncodingParameterList( String inputYuvName, String outputIvfBaseName, String codecMimeType, int encodeSeconds, int[] resolutionScales, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int[] bitrates, boolean syncEncoding)255 protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList( 256 String inputYuvName, 257 String outputIvfBaseName, 258 String codecMimeType, 259 int encodeSeconds, 260 int[] resolutionScales, 261 int frameWidth, 262 int frameHeight, 263 int frameRate, 264 int bitrateMode, 265 int[] bitrates, 266 boolean syncEncoding) { 267 assertTrue(resolutionScales.length == bitrates.length); 268 int numCodecs = resolutionScales.length; 269 ArrayList<EncoderOutputStreamParameters> outputParameters = 270 new ArrayList<EncoderOutputStreamParameters>(numCodecs); 271 for (int i = 0; i < numCodecs; i++) { 272 EncoderOutputStreamParameters params = new EncoderOutputStreamParameters(); 273 if (inputYuvName != null) { 274 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName; 275 } else { 276 params.inputYuvFilename = null; 277 } 278 params.scaledYuvFilename = SDCARD_DIR + File.separator + 279 outputIvfBaseName + resolutionScales[i]+ ".yuv"; 280 params.inputResource = "football_qvga.yuv"; 281 params.codecMimeType = codecMimeType; 282 String codecSuffix = getCodecSuffix(codecMimeType); 283 params.outputIvfFilename = SDCARD_DIR + File.separator + 284 outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf"; 285 params.forceGoogleEncoder = false; 286 params.frameCount = encodeSeconds * frameRate; 287 params.frameRate = frameRate; 288 params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280); 289 params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720); 290 params.bitrateSet = new int[1]; 291 params.bitrateSet[0] = bitrates[i]; 292 params.bitrateType = bitrateMode; 293 params.temporalLayers = 0; 294 params.syncFrameInterval = SYNC_FRAME_INTERVAL; 295 params.syncForceFrameInterval = 0; 296 if (syncEncoding) { 297 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US; 298 params.runInLooperThread = false; 299 } else { 300 params.timeoutDequeue = 0; 301 params.runInLooperThread = true; 302 } 303 outputParameters.add(params); 304 } 305 return outputParameters; 306 } 307 getDefaultEncodingParameters( String inputYuvName, String outputIvfBaseName, String codecMimeType, int encodeSeconds, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int bitrate, boolean syncEncoding)308 protected EncoderOutputStreamParameters getDefaultEncodingParameters( 309 String inputYuvName, 310 String outputIvfBaseName, 311 String codecMimeType, 312 int encodeSeconds, 313 int frameWidth, 314 int frameHeight, 315 int frameRate, 316 int bitrateMode, 317 int bitrate, 318 boolean syncEncoding) { 319 int[] scaleValues = { 1 }; 320 int[] bitrates = { bitrate }; 321 return getDefaultEncodingParameterList( 322 inputYuvName, 323 outputIvfBaseName, 324 codecMimeType, 325 encodeSeconds, 326 scaleValues, 327 frameWidth, 328 frameHeight, 329 frameRate, 330 bitrateMode, 331 bitrates, 332 syncEncoding).get(0); 333 } 334 335 /** 336 * Converts (interleaves) YUV420 planar to NV12. 337 * Assumes packed, macroblock-aligned frame with no cropping 338 * (visible/coded row length == stride). 339 */ YUV420ToNV(int width, int height, byte[] yuv)340 private static byte[] YUV420ToNV(int width, int height, byte[] yuv) { 341 byte[] nv = new byte[yuv.length]; 342 // Y plane we just copy. 343 System.arraycopy(yuv, 0, nv, 0, width * height); 344 345 // U & V plane we interleave. 346 int u_offset = width * height; 347 int v_offset = u_offset + u_offset / 4; 348 int nv_offset = width * height; 349 for (int i = 0; i < width * height / 4; i++) { 350 nv[nv_offset++] = yuv[u_offset++]; 351 nv[nv_offset++] = yuv[v_offset++]; 352 } 353 return nv; 354 } 355 356 /** 357 * Converts (de-interleaves) NV12 to YUV420 planar. 358 * Stride may be greater than width, slice height may be greater than height. 359 */ NV12ToYUV420(int width, int height, int stride, int sliceHeight, byte[] nv12)360 private static byte[] NV12ToYUV420(int width, int height, 361 int stride, int sliceHeight, byte[] nv12) { 362 byte[] yuv = new byte[width * height * 3 / 2]; 363 364 // Y plane we just copy. 365 for (int i = 0; i < height; i++) { 366 System.arraycopy(nv12, i * stride, yuv, i * width, width); 367 } 368 369 // U & V plane - de-interleave. 370 int u_offset = width * height; 371 int v_offset = u_offset + u_offset / 4; 372 int nv_offset; 373 for (int i = 0; i < height / 2; i++) { 374 nv_offset = stride * (sliceHeight + i); 375 for (int j = 0; j < width / 2; j++) { 376 yuv[u_offset++] = nv12[nv_offset++]; 377 yuv[v_offset++] = nv12[nv_offset++]; 378 } 379 } 380 return yuv; 381 } 382 383 /** 384 * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice 385 * height equal to the crop window. 386 */ PackYUV420(int left, int top, int width, int height, int stride, int sliceHeight, byte[] src)387 private static byte[] PackYUV420(int left, int top, int width, int height, 388 int stride, int sliceHeight, byte[] src) { 389 byte[] dst = new byte[width * height * 3 / 2]; 390 // Y copy. 391 for (int i = 0; i < height; i++) { 392 System.arraycopy(src, (i + top) * stride + left, dst, i * width, width); 393 } 394 // U and V copy. 395 int u_src_offset = stride * sliceHeight; 396 int v_src_offset = u_src_offset + u_src_offset / 4; 397 int u_dst_offset = width * height; 398 int v_dst_offset = u_dst_offset + u_dst_offset / 4; 399 // Downsample and align to floor-2 for crop origin. 400 left /= 2; 401 top /= 2; 402 for (int i = 0; i < height / 2; i++) { 403 System.arraycopy(src, u_src_offset + (i + top) * (stride / 2) + left, 404 dst, u_dst_offset + i * (width / 2), width / 2); 405 System.arraycopy(src, v_src_offset + (i + top) * (stride / 2) + left, 406 dst, v_dst_offset + i * (width / 2), width / 2); 407 } 408 return dst; 409 } 410 411 imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, byte[] dst, int dstByteOffset, int dstWidth, int dstHeight)412 private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, 413 byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) { 414 for (int i = 0; i < dstHeight/2 - 1; i++) { 415 int dstOffset0 = 2 * i * dstWidth + dstByteOffset; 416 int dstOffset1 = dstOffset0 + dstWidth; 417 int srcOffset0 = i * srcStride + srcByteOffset; 418 int srcOffset1 = srcOffset0 + srcStride; 419 int pixel00 = (int)src[srcOffset0++] & 0xff; 420 int pixel10 = (int)src[srcOffset1++] & 0xff; 421 for (int j = 0; j < dstWidth/2 - 1; j++) { 422 int pixel01 = (int)src[srcOffset0++] & 0xff; 423 int pixel11 = (int)src[srcOffset1++] & 0xff; 424 dst[dstOffset0++] = (byte)pixel00; 425 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 426 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 427 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4); 428 pixel00 = pixel01; 429 pixel10 = pixel11; 430 } 431 // last column 432 dst[dstOffset0++] = (byte)pixel00; 433 dst[dstOffset0++] = (byte)pixel00; 434 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 435 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2); 436 } 437 438 // last row 439 int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset; 440 int dstOffset1 = dstOffset0 + dstWidth; 441 int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset; 442 int pixel00 = (int)src[srcOffset0++] & 0xff; 443 for (int j = 0; j < dstWidth/2 - 1; j++) { 444 int pixel01 = (int)src[srcOffset0++] & 0xff; 445 dst[dstOffset0++] = (byte)pixel00; 446 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2); 447 dst[dstOffset1++] = (byte)pixel00; 448 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2); 449 pixel00 = pixel01; 450 } 451 // the very last pixel - bottom right 452 dst[dstOffset0++] = (byte)pixel00; 453 dst[dstOffset0++] = (byte)pixel00; 454 dst[dstOffset1++] = (byte)pixel00; 455 dst[dstOffset1++] = (byte)pixel00; 456 } 457 458 /** 459 * Up-scale image. 460 * Scale factor is defined by source and destination width ratio. 461 * Only 1:2 and 1:4 up-scaling is supported for now. 462 * For 640x480 -> 1280x720 conversion only top 640x360 part of the original 463 * image is scaled. 464 */ imageScale(byte[] src, int srcWidth, int srcHeight, int dstWidth, int dstHeight)465 private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight, 466 int dstWidth, int dstHeight) throws Exception { 467 int srcYSize = srcWidth * srcHeight; 468 int dstYSize = dstWidth * dstHeight; 469 byte[] dst = null; 470 if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) { 471 // 1:2 upscale 472 dst = new byte[dstWidth * dstHeight * 3 / 2]; 473 imageUpscale1To2(src, 0, srcWidth, 474 dst, 0, dstWidth, dstHeight); // Y 475 imageUpscale1To2(src, srcYSize, srcWidth / 2, 476 dst, dstYSize, dstWidth / 2, dstHeight / 2); // U 477 imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2, 478 dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2); // V 479 } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) { 480 // 1:4 upscale - in two steps 481 int midWidth = 2 * srcWidth; 482 int midHeight = 2 * srcHeight; 483 byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight); 484 dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight); 485 486 } else { 487 throw new RuntimeException("Can not find proper scaling function"); 488 } 489 490 return dst; 491 } 492 cacheScaledImage( String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight, String dstYuvFilename, int dstFrameWidth, int dstFrameHeight)493 private void cacheScaledImage( 494 String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight, 495 String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception { 496 InputStream srcStream = OpenFileOrResource(srcYuvFilename, srcResource); 497 FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false); 498 int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2; 499 byte[] srcFrame = new byte[srcFrameSize]; 500 byte[] dstFrame = null; 501 Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename); 502 while (true) { 503 int bytesRead = srcStream.read(srcFrame); 504 if (bytesRead != srcFrame.length) { 505 break; 506 } 507 if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) { 508 dstFrame = srcFrame; 509 } else { 510 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight, 511 dstFrameWidth, dstFrameHeight); 512 } 513 dstFile.write(dstFrame); 514 } 515 srcStream.close(); 516 dstFile.close(); 517 } 518 519 520 /** 521 * A basic check if an encoded stream is decodable. 522 * 523 * The most basic confirmation we can get about a frame 524 * being properly encoded is trying to decode it. 525 * (Especially in realtime mode encode output is non- 526 * deterministic, therefore a more thorough check like 527 * md5 sum comparison wouldn't work.) 528 * 529 * Indeed, MediaCodec will raise an IllegalStateException 530 * whenever video decoder fails to decode a frame, and 531 * this test uses that fact to verify the bitstream. 532 * 533 * @param inputIvfFilename The name of the IVF file containing encoded bitsream. 534 * @param outputYuvFilename The name of the output YUV file (optional). 535 * @param frameRate Frame rate of input file in frames per second 536 * @param forceGoogleDecoder Force to use Google Video decoder. 537 * @param codecConfigs Codec config buffers to be added to the format 538 */ decode( String inputIvfFilename, String outputYuvFilename, String codecMimeType, int frameRate, boolean forceGoogleDecoder, ArrayList<ByteBuffer> codecConfigs)539 protected ArrayList<MediaCodec.BufferInfo> decode( 540 String inputIvfFilename, 541 String outputYuvFilename, 542 String codecMimeType, 543 int frameRate, 544 boolean forceGoogleDecoder, 545 ArrayList<ByteBuffer> codecConfigs) throws Exception { 546 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 547 548 // Open input/output. 549 IvfReader ivf = new IvfReader(inputIvfFilename); 550 int frameWidth = ivf.getWidth(); 551 int frameHeight = ivf.getHeight(); 552 int frameCount = ivf.getFrameCount(); 553 int frameStride = frameWidth; 554 int frameSliceHeight = frameHeight; 555 int cropLeft = 0; 556 int cropTop = 0; 557 int cropWidth = frameWidth; 558 int cropHeight = frameHeight; 559 assertTrue(frameWidth > 0); 560 assertTrue(frameHeight > 0); 561 assertTrue(frameCount > 0); 562 563 // Create decoder. 564 MediaFormat format = MediaFormat.createVideoFormat( 565 codecMimeType, ivf.getWidth(), ivf.getHeight()); 566 CodecProperties properties = getVideoCodecProperties( 567 false /* encoder */, format, forceGoogleDecoder); 568 if (properties == null) { 569 ivf.close(); 570 return null; 571 } 572 int frameColorFormat = properties.colorFormat; 573 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 574 int csdIndex = 0; 575 for (ByteBuffer config : codecConfigs) { 576 format.setByteBuffer("csd-" + csdIndex, config); 577 ++csdIndex; 578 } 579 580 FileOutputStream yuv = null; 581 if (outputYuvFilename != null) { 582 yuv = new FileOutputStream(outputYuvFilename, false); 583 } 584 585 Log.d(TAG, "Creating decoder " + properties.codecName + 586 ". Color format: 0x" + Integer.toHexString(frameColorFormat) + 587 ". " + frameWidth + " x " + frameHeight); 588 Log.d(TAG, " Format: " + format); 589 Log.d(TAG, " In: " + inputIvfFilename + ". Out:" + outputYuvFilename); 590 MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName); 591 decoder.configure(format, 592 null, // surface 593 null, // crypto 594 0); // flags 595 decoder.start(); 596 597 ByteBuffer[] inputBuffers = decoder.getInputBuffers(); 598 ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); 599 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 600 601 // decode loop 602 int inputFrameIndex = 0; 603 int outputFrameIndex = 0; 604 long inPresentationTimeUs = 0; 605 long outPresentationTimeUs = 0; 606 boolean sawOutputEOS = false; 607 boolean sawInputEOS = false; 608 609 while (!sawOutputEOS) { 610 if (!sawInputEOS) { 611 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US); 612 if (inputBufIndex >= 0) { 613 byte[] frame = ivf.readFrame(inputFrameIndex); 614 615 if (inputFrameIndex == frameCount - 1) { 616 Log.d(TAG, " Input EOS for frame # " + inputFrameIndex); 617 sawInputEOS = true; 618 } 619 620 inputBuffers[inputBufIndex].clear(); 621 inputBuffers[inputBufIndex].put(frame); 622 inputBuffers[inputBufIndex].rewind(); 623 inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate; 624 625 decoder.queueInputBuffer( 626 inputBufIndex, 627 0, // offset 628 frame.length, 629 inPresentationTimeUs, 630 sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 631 632 inputFrameIndex++; 633 } 634 } 635 636 int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 637 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 638 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 639 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 640 outputBuffers = decoder.getOutputBuffers(); 641 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 642 // Process format change 643 format = decoder.getOutputFormat(); 644 frameWidth = format.getInteger(MediaFormat.KEY_WIDTH); 645 frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT); 646 frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 647 Log.d(TAG, "Decoder output format change. Color: 0x" + 648 Integer.toHexString(frameColorFormat)); 649 Log.d(TAG, "Format: " + format.toString()); 650 651 // Parse frame and slice height from undocumented values 652 if (format.containsKey("stride")) { 653 frameStride = format.getInteger("stride"); 654 } else { 655 frameStride = frameWidth; 656 } 657 if (format.containsKey("slice-height")) { 658 frameSliceHeight = format.getInteger("slice-height"); 659 } else { 660 frameSliceHeight = frameHeight; 661 } 662 Log.d(TAG, "Frame stride and slice height: " + frameStride + 663 " x " + frameSliceHeight); 664 frameStride = Math.max(frameWidth, frameStride); 665 frameSliceHeight = Math.max(frameHeight, frameSliceHeight); 666 667 // Parse crop window for the area of recording decoded frame data. 668 if (format.containsKey("crop-left")) { 669 cropLeft = format.getInteger("crop-left"); 670 } 671 if (format.containsKey("crop-top")) { 672 cropTop = format.getInteger("crop-top"); 673 } 674 if (format.containsKey("crop-right")) { 675 cropWidth = format.getInteger("crop-right") - cropLeft + 1; 676 } else { 677 cropWidth = frameWidth; 678 } 679 if (format.containsKey("crop-bottom")) { 680 cropHeight = format.getInteger("crop-bottom") - cropTop + 1; 681 } else { 682 cropHeight = frameHeight; 683 } 684 Log.d(TAG, "Frame crop window origin: " + cropLeft + " x " + cropTop 685 + ", size: " + cropWidth + " x " + cropHeight); 686 cropWidth = Math.min(frameWidth - cropLeft, cropWidth); 687 cropHeight = Math.min(frameHeight - cropTop, cropHeight); 688 } 689 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US); 690 } 691 if (result >= 0) { 692 int outputBufIndex = result; 693 outPresentationTimeUs = bufferInfo.presentationTimeUs; 694 Log.v(TAG, "Writing buffer # " + outputFrameIndex + 695 ". Size: " + bufferInfo.size + 696 ". InTime: " + (inPresentationTimeUs + 500)/1000 + 697 ". OutTime: " + (outPresentationTimeUs + 500)/1000); 698 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 699 sawOutputEOS = true; 700 Log.d(TAG, " Output EOS for frame # " + outputFrameIndex); 701 } 702 703 if (bufferInfo.size > 0) { 704 // Save decoder output to yuv file. 705 if (yuv != null) { 706 byte[] frame = new byte[bufferInfo.size]; 707 outputBuffers[outputBufIndex].position(bufferInfo.offset); 708 outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size); 709 // Convert NV12 to YUV420 if necessary. 710 if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 711 frame = NV12ToYUV420(frameWidth, frameHeight, 712 frameStride, frameSliceHeight, frame); 713 } 714 int writeLength = Math.min(cropWidth * cropHeight * 3 / 2, frame.length); 715 // Pack frame if necessary. 716 if (writeLength < frame.length && 717 (frameStride > cropWidth || frameSliceHeight > cropHeight)) { 718 frame = PackYUV420(cropLeft, cropTop, cropWidth, cropHeight, 719 frameStride, frameSliceHeight, frame); 720 } 721 yuv.write(frame, 0, writeLength); 722 } 723 outputFrameIndex++; 724 725 // Update statistics - store presentation time delay in offset 726 long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs; 727 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 728 bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size, 729 outPresentationTimeUs, bufferInfo.flags); 730 bufferInfos.add(bufferInfoCopy); 731 } 732 decoder.releaseOutputBuffer(outputBufIndex, false); 733 } 734 } 735 decoder.stop(); 736 decoder.release(); 737 ivf.close(); 738 if (yuv != null) { 739 yuv.close(); 740 } 741 742 return bufferInfos; 743 } 744 745 746 /** 747 * Helper function to return InputStream from either fully specified filename (if set) 748 * or resource name within test assets (if filename is not set). 749 */ OpenFileOrResource(String filename, final String resource)750 private InputStream OpenFileOrResource(String filename, final String resource) 751 throws Exception { 752 if (filename != null) { 753 Preconditions.assertTestFileExists(filename); 754 return new FileInputStream(filename); 755 } 756 Preconditions.assertTestFileExists(mInpPrefix + resource); 757 return new FileInputStream(mInpPrefix + resource); 758 } 759 760 /** 761 * Results of frame encoding. 762 */ 763 protected class MediaEncoderOutput { 764 public long inPresentationTimeUs; 765 public long outPresentationTimeUs; 766 public boolean outputGenerated; 767 public int flags; 768 public byte[] buffer; 769 } 770 771 protected class MediaEncoderAsyncHelper { 772 private final EncoderOutputStreamParameters mStreamParams; 773 private final CodecProperties mProperties; 774 private final ArrayList<MediaCodec.BufferInfo> mBufferInfos; 775 private final IvfWriter mIvf; 776 private final ArrayList<ByteBuffer> mCodecConfigs; 777 private final byte[] mSrcFrame; 778 779 private InputStream mYuvStream; 780 private int mInputFrameIndex; 781 MediaEncoderAsyncHelper( EncoderOutputStreamParameters streamParams, CodecProperties properties, ArrayList<MediaCodec.BufferInfo> bufferInfos, IvfWriter ivf, ArrayList<ByteBuffer> codecConfigs)782 MediaEncoderAsyncHelper( 783 EncoderOutputStreamParameters streamParams, 784 CodecProperties properties, 785 ArrayList<MediaCodec.BufferInfo> bufferInfos, 786 IvfWriter ivf, 787 ArrayList<ByteBuffer> codecConfigs) 788 throws Exception { 789 mStreamParams = streamParams; 790 mProperties = properties; 791 mBufferInfos = bufferInfos; 792 mIvf = ivf; 793 mCodecConfigs = codecConfigs; 794 795 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 796 mSrcFrame = new byte[srcFrameSize]; 797 798 mYuvStream = OpenFileOrResource( 799 streamParams.inputYuvFilename, streamParams.inputResource); 800 } 801 getInputFrame()802 public byte[] getInputFrame() { 803 // Check EOS 804 if (mStreamParams.frameCount == 0 805 || (mStreamParams.frameCount > 0 806 && mInputFrameIndex >= mStreamParams.frameCount)) { 807 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex); 808 return null; 809 } 810 811 try { 812 int bytesRead = mYuvStream.read(mSrcFrame); 813 814 if (bytesRead == -1) { 815 // rewind to beginning of file 816 mYuvStream.close(); 817 mYuvStream = OpenFileOrResource( 818 mStreamParams.inputYuvFilename, mStreamParams.inputResource); 819 bytesRead = mYuvStream.read(mSrcFrame); 820 } 821 } catch (Exception e) { 822 Log.e(TAG, "Failed to read YUV file."); 823 return null; 824 } 825 mInputFrameIndex++; 826 827 // Convert YUV420 to NV12 if necessary 828 if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 829 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight, 830 mSrcFrame); 831 } else { 832 return mSrcFrame; 833 } 834 } 835 saveOutputFrame(MediaEncoderOutput out)836 public boolean saveOutputFrame(MediaEncoderOutput out) { 837 if (out.outputGenerated) { 838 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 839 Log.d(TAG, "Storing codec config separately"); 840 ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer); 841 csdBuffer.rewind(); 842 mCodecConfigs.add(csdBuffer); 843 out.buffer = new byte[0]; 844 } 845 if (out.buffer.length > 0) { 846 // Save frame 847 try { 848 mIvf.writeFrame(out.buffer, out.outPresentationTimeUs); 849 } catch (Exception e) { 850 Log.d(TAG, "Failed to write frame"); 851 return true; 852 } 853 854 // Update statistics - store presentation time delay in offset 855 long presentationTimeUsDelta = out.inPresentationTimeUs - 856 out.outPresentationTimeUs; 857 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 858 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 859 out.outPresentationTimeUs, out.flags); 860 mBufferInfos.add(bufferInfoCopy); 861 } 862 // Detect output EOS 863 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 864 Log.d(TAG, "----Output EOS "); 865 return true; 866 } 867 } 868 return false; 869 } 870 } 871 872 /** 873 * Video encoder wrapper class. 874 * Allows to run the encoder either in a callee's thread or in a looper thread 875 * using buffer dequeue ready notification callbacks. 876 * 877 * Function feedInput() is used to send raw video frame to the encoder input. When encoder 878 * is configured to run in async mode the function will run in a looper thread. 879 * Encoded frame can be retrieved by calling getOutput() function. 880 */ 881 protected class MediaEncoderAsync extends Thread { 882 private int mId; 883 private MediaCodecWrapper mCodec; 884 private ByteBuffer[] mInputBuffers; 885 private ByteBuffer[] mOutputBuffers; 886 private int mInputFrameIndex; 887 private int mOutputFrameIndex; 888 private int mInputBufIndex; 889 private int mFrameRate; 890 private long mTimeout; 891 private MediaCodec.BufferInfo mBufferInfo; 892 private long mInPresentationTimeUs; 893 private long mOutPresentationTimeUs; 894 private boolean mAsync; 895 // Flag indicating if input frame was consumed by the encoder in feedInput() call. 896 private boolean mConsumedInput; 897 // Result of frame encoding returned by getOutput() call. 898 private MediaEncoderOutput mOutput; 899 // Object used to signal that looper thread has started and Handler instance associated 900 // with looper thread has been allocated. 901 private final Object mThreadEvent = new Object(); 902 // Object used to signal that MediaCodec buffer dequeue notification callback 903 // was received. 904 private final Object mCallbackEvent = new Object(); 905 private Handler mHandler; 906 private boolean mCallbackReceived; 907 private MediaEncoderAsyncHelper mHelper; 908 private final Object mCompletionEvent = new Object(); 909 private boolean mCompleted; 910 private boolean mInitialSyncFrameReceived; 911 912 private MediaCodec.Callback mCallback = new MediaCodec.Callback() { 913 @Override 914 public void onInputBufferAvailable(MediaCodec codec, int index) { 915 if (mHelper == null) { 916 Log.e(TAG, "async helper not available"); 917 return; 918 } 919 920 byte[] encFrame = mHelper.getInputFrame(); 921 boolean inputEOS = (encFrame == null); 922 923 int encFrameLength = 0; 924 int flags = 0; 925 if (inputEOS) { 926 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 927 } else { 928 encFrameLength = encFrame.length; 929 930 ByteBuffer byteBuffer = mCodec.getInputBuffer(index); 931 byteBuffer.put(encFrame); 932 byteBuffer.rewind(); 933 934 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 935 936 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 937 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 938 939 mInputFrameIndex++; 940 } 941 942 mCodec.queueInputBuffer( 943 index, 944 0, // offset 945 encFrameLength, // size 946 mInPresentationTimeUs, 947 flags); 948 } 949 950 @Override 951 public void onOutputBufferAvailable(MediaCodec codec, 952 int index, MediaCodec.BufferInfo info) { 953 if (mHelper == null) { 954 Log.e(TAG, "async helper not available"); 955 return; 956 } 957 958 MediaEncoderOutput out = new MediaEncoderOutput(); 959 960 out.buffer = new byte[info.size]; 961 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index); 962 outputBuffer.get(out.buffer, 0, info.size); 963 mOutPresentationTimeUs = info.presentationTimeUs; 964 965 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 966 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 967 logStr += " CONFIG. "; 968 } 969 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 970 logStr += " KEY. "; 971 if (!mInitialSyncFrameReceived) { 972 mInitialSyncFrameReceived = true; 973 } 974 } 975 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 976 logStr += " EOS. "; 977 } 978 logStr += " Size: " + info.size; 979 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 980 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 981 Log.v(TAG, logStr); 982 983 if (!mInitialSyncFrameReceived 984 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 985 throw new RuntimeException("Non codec_config_frame before first sync."); 986 } 987 988 if (info.size > 0) { 989 mOutputFrameIndex++; 990 out.inPresentationTimeUs = mInPresentationTimeUs; 991 out.outPresentationTimeUs = mOutPresentationTimeUs; 992 } 993 mCodec.releaseOutputBuffer(index, false); 994 995 out.flags = info.flags; 996 out.outputGenerated = true; 997 998 if (mHelper.saveOutputFrame(out)) { 999 // output EOS 1000 signalCompletion(); 1001 } 1002 } 1003 1004 @Override 1005 public void onError(MediaCodec codec, CodecException e) { 1006 Log.e(TAG, "onError: " + e 1007 + ", transient " + e.isTransient() 1008 + ", recoverable " + e.isRecoverable() 1009 + ", error " + e.getErrorCode()); 1010 } 1011 1012 @Override 1013 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 1014 Log.i(TAG, "onOutputFormatChanged: " + format.toString()); 1015 } 1016 }; 1017 requestStart()1018 private synchronized void requestStart() throws Exception { 1019 mHandler = null; 1020 start(); 1021 // Wait for Hander allocation 1022 synchronized (mThreadEvent) { 1023 while (mHandler == null) { 1024 mThreadEvent.wait(); 1025 } 1026 } 1027 } 1028 setAsyncHelper(MediaEncoderAsyncHelper helper)1029 public void setAsyncHelper(MediaEncoderAsyncHelper helper) { 1030 mHelper = helper; 1031 } 1032 1033 @Override run()1034 public void run() { 1035 Looper.prepare(); 1036 synchronized (mThreadEvent) { 1037 mHandler = new Handler(); 1038 mThreadEvent.notify(); 1039 } 1040 Looper.loop(); 1041 } 1042 runCallable(final Callable<?> callable)1043 private void runCallable(final Callable<?> callable) throws Exception { 1044 if (mAsync) { 1045 final Exception[] exception = new Exception[1]; 1046 final CountDownLatch countDownLatch = new CountDownLatch(1); 1047 mHandler.post( new Runnable() { 1048 @Override 1049 public void run() { 1050 try { 1051 callable.call(); 1052 } catch (Exception e) { 1053 exception[0] = e; 1054 } finally { 1055 countDownLatch.countDown(); 1056 } 1057 } 1058 } ); 1059 1060 // Wait for task completion 1061 countDownLatch.await(); 1062 if (exception[0] != null) { 1063 throw exception[0]; 1064 } 1065 } else { 1066 callable.call(); 1067 } 1068 } 1069 requestStop()1070 private synchronized void requestStop() throws Exception { 1071 mHandler.post( new Runnable() { 1072 @Override 1073 public void run() { 1074 // This will run on the Looper thread 1075 Log.v(TAG, "MediaEncoder looper quitting"); 1076 Looper.myLooper().quitSafely(); 1077 } 1078 } ); 1079 // Wait for completion 1080 join(); 1081 mHandler = null; 1082 } 1083 createCodecInternal(final String name, final MediaFormat format, final long timeout, boolean useNdk)1084 private void createCodecInternal(final String name, 1085 final MediaFormat format, final long timeout, boolean useNdk) throws Exception { 1086 mBufferInfo = new MediaCodec.BufferInfo(); 1087 mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE); 1088 mTimeout = timeout; 1089 mInputFrameIndex = 0; 1090 mOutputFrameIndex = 0; 1091 mInPresentationTimeUs = 0; 1092 mOutPresentationTimeUs = 0; 1093 1094 if (useNdk) { 1095 mCodec = new NdkMediaCodec(name); 1096 } else { 1097 mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync); 1098 } 1099 if (mAsync) { 1100 mCodec.setCallback(mCallback); 1101 } 1102 mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE); 1103 mCodec.start(); 1104 1105 // get the cached input/output only in sync mode 1106 if (!mAsync) { 1107 mInputBuffers = mCodec.getInputBuffers(); 1108 mOutputBuffers = mCodec.getOutputBuffers(); 1109 } 1110 } 1111 createCodec(int id, final String name, final MediaFormat format, final long timeout, boolean async, final boolean useNdk)1112 public void createCodec(int id, final String name, final MediaFormat format, 1113 final long timeout, boolean async, final boolean useNdk) throws Exception { 1114 mId = id; 1115 mAsync = async; 1116 if (mAsync) { 1117 requestStart(); // start looper thread 1118 } 1119 runCallable( new Callable<Void>() { 1120 @Override 1121 public Void call() throws Exception { 1122 createCodecInternal(name, format, timeout, useNdk); 1123 return null; 1124 } 1125 } ); 1126 } 1127 feedInputInternal(final byte[] encFrame, final boolean inputEOS)1128 private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) { 1129 mConsumedInput = false; 1130 // Feed input 1131 mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout); 1132 1133 if (mInputBufIndex >= 0) { 1134 ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex); 1135 inputBuffer.clear(); 1136 inputBuffer.put(encFrame); 1137 inputBuffer.rewind(); 1138 int encFrameLength = encFrame.length; 1139 int flags = 0; 1140 if (inputEOS) { 1141 encFrameLength = 0; 1142 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 1143 } 1144 if (!inputEOS) { 1145 Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex + 1146 ". InTime: " + (mInPresentationTimeUs + 500)/1000); 1147 mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate; 1148 mInputFrameIndex++; 1149 } 1150 1151 mCodec.queueInputBuffer( 1152 mInputBufIndex, 1153 0, // offset 1154 encFrameLength, // size 1155 mInPresentationTimeUs, 1156 flags); 1157 1158 mConsumedInput = true; 1159 } else { 1160 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER"); 1161 } 1162 mCallbackReceived = false; 1163 } 1164 feedInput(final byte[] encFrame, final boolean inputEOS)1165 public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception { 1166 runCallable( new Callable<Void>() { 1167 @Override 1168 public Void call() throws Exception { 1169 feedInputInternal(encFrame, inputEOS); 1170 return null; 1171 } 1172 } ); 1173 return mConsumedInput; 1174 } 1175 getOutputInternal()1176 private void getOutputInternal() { 1177 mOutput = new MediaEncoderOutput(); 1178 mOutput.inPresentationTimeUs = mInPresentationTimeUs; 1179 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1180 mOutput.outputGenerated = false; 1181 1182 // Get output from the encoder 1183 int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1184 while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || 1185 result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1186 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 1187 mOutputBuffers = mCodec.getOutputBuffers(); 1188 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 1189 Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString()); 1190 } 1191 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout); 1192 } 1193 if (result == MediaCodec.INFO_TRY_AGAIN_LATER) { 1194 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER"); 1195 } 1196 1197 if (result >= 0) { 1198 int outputBufIndex = result; 1199 mOutput.buffer = new byte[mBufferInfo.size]; 1200 ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex); 1201 outputBuffer.position(mBufferInfo.offset); 1202 outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size); 1203 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs; 1204 1205 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex; 1206 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 1207 logStr += " CONFIG. "; 1208 } 1209 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1210 logStr += " KEY. "; 1211 if (!mInitialSyncFrameReceived) { 1212 mInitialSyncFrameReceived = true; 1213 } 1214 } 1215 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1216 logStr += " EOS. "; 1217 } 1218 logStr += " Size: " + mBufferInfo.size; 1219 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 + 1220 ". OutTime: " + (mOutPresentationTimeUs + 500)/1000; 1221 Log.v(TAG, logStr); 1222 1223 if (!mInitialSyncFrameReceived 1224 && (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 1225 throw new RuntimeException("Non codec_config_frame before first sync."); 1226 } 1227 1228 if (mBufferInfo.size > 0) { 1229 mOutputFrameIndex++; 1230 mOutput.outPresentationTimeUs = mOutPresentationTimeUs; 1231 } 1232 mCodec.releaseOutputBuffer(outputBufIndex, false); 1233 1234 mOutput.flags = mBufferInfo.flags; 1235 mOutput.outputGenerated = true; 1236 } 1237 mCallbackReceived = false; 1238 } 1239 getOutput()1240 public MediaEncoderOutput getOutput() throws Exception { 1241 runCallable( new Callable<Void>() { 1242 @Override 1243 public Void call() throws Exception { 1244 getOutputInternal(); 1245 return null; 1246 } 1247 } ); 1248 return mOutput; 1249 } 1250 forceSyncFrame()1251 public void forceSyncFrame() throws Exception { 1252 final Bundle syncFrame = new Bundle(); 1253 syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 1254 runCallable( new Callable<Void>() { 1255 @Override 1256 public Void call() throws Exception { 1257 mCodec.setParameters(syncFrame); 1258 return null; 1259 } 1260 } ); 1261 } 1262 updateBitrate(int bitrate)1263 public void updateBitrate(int bitrate) throws Exception { 1264 final Bundle bitrateUpdate = new Bundle(); 1265 bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate); 1266 runCallable( new Callable<Void>() { 1267 @Override 1268 public Void call() throws Exception { 1269 mCodec.setParameters(bitrateUpdate); 1270 return null; 1271 } 1272 } ); 1273 } 1274 1275 waitForBufferEvent()1276 public void waitForBufferEvent() throws Exception { 1277 Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent"); 1278 if (mAsync) { 1279 synchronized (mCallbackEvent) { 1280 if (!mCallbackReceived) { 1281 mCallbackEvent.wait(1000); // wait 1 sec for a callback 1282 // throw an exception if callback was not received 1283 if (!mCallbackReceived) { 1284 throw new RuntimeException("MediaCodec callback was not received"); 1285 } 1286 } 1287 } 1288 } else { 1289 Thread.sleep(5); 1290 } 1291 Log.v(TAG, "----Waiting for bufferEvent done"); 1292 } 1293 1294 waitForCompletion(long timeoutMs)1295 public void waitForCompletion(long timeoutMs) throws Exception { 1296 synchronized (mCompletionEvent) { 1297 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs; 1298 1299 while (!mCompleted) { 1300 mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis()); 1301 if (System.currentTimeMillis() >= timeoutExpiredMs) { 1302 throw new RuntimeException("encoding has timed out!"); 1303 } 1304 } 1305 } 1306 } 1307 signalCompletion()1308 public void signalCompletion() { 1309 synchronized (mCompletionEvent) { 1310 mCompleted = true; 1311 mCompletionEvent.notify(); 1312 } 1313 } 1314 deleteCodec()1315 public void deleteCodec() throws Exception { 1316 runCallable( new Callable<Void>() { 1317 @Override 1318 public Void call() throws Exception { 1319 mCodec.stop(); 1320 mCodec.release(); 1321 return null; 1322 } 1323 } ); 1324 if (mAsync) { 1325 requestStop(); // Stop looper thread 1326 } 1327 } 1328 } 1329 1330 /** 1331 * @see #encode(EncoderOutputStreamParameters, ArrayList<ByteBuffer>) 1332 */ encode( EncoderOutputStreamParameters streamParams)1333 protected ArrayList<MediaCodec.BufferInfo> encode( 1334 EncoderOutputStreamParameters streamParams) throws Exception { 1335 return encode(streamParams, new ArrayList<ByteBuffer>()); 1336 } 1337 1338 /** 1339 * Video encoding loop supporting encoding single streams with an option 1340 * to run in a looper thread and use buffer ready notification callbacks. 1341 * 1342 * Output stream is described by encodingParams parameters. 1343 * 1344 * MediaCodec will raise an IllegalStateException 1345 * whenever video encoder fails to encode a frame. 1346 * 1347 * Color format of input file should be YUV420, and frameWidth, 1348 * frameHeight should be supplied correctly as raw input file doesn't 1349 * include any header data. 1350 * 1351 * @param streamParams Structure with encoder parameters 1352 * @param codecConfigs List to be filled with codec config buffers 1353 * @return Returns array of encoded frames information for each frame. 1354 */ encode( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1355 protected ArrayList<MediaCodec.BufferInfo> encode( 1356 EncoderOutputStreamParameters streamParams, 1357 ArrayList<ByteBuffer> codecConfigs) throws Exception { 1358 1359 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1360 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1361 streamParams.frameHeight); 1362 int bitrate = streamParams.bitrateSet[0]; 1363 1364 // Create minimal media format signifying desired output. 1365 MediaFormat format = MediaFormat.createVideoFormat( 1366 streamParams.codecMimeType, streamParams.frameWidth, 1367 streamParams.frameHeight); 1368 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1369 CodecProperties properties = getVideoCodecProperties( 1370 true, format, streamParams.forceGoogleEncoder); 1371 if (properties == null) { 1372 return null; 1373 } 1374 1375 // Open input/output 1376 InputStream yuvStream = OpenFileOrResource( 1377 streamParams.inputYuvFilename, streamParams.inputResource); 1378 IvfWriter ivf = new IvfWriter( 1379 streamParams.outputIvfFilename, streamParams.codecMimeType, 1380 streamParams.frameWidth, streamParams.frameHeight); 1381 1382 // Create a media format signifying desired output. 1383 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1384 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1385 } 1386 if (streamParams.temporalLayers > 0) { 1387 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1388 } 1389 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1390 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1391 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1392 streamParams.frameRate; 1393 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1394 1395 // Create encoder 1396 Log.d(TAG, "Creating encoder " + properties.codecName + 1397 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1398 streamParams.frameWidth + " x " + streamParams.frameHeight + 1399 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1400 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1401 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1402 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1403 Log.d(TAG, " Format: " + format); 1404 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1405 MediaEncoderAsync codec = new MediaEncoderAsync(); 1406 codec.createCodec(0, properties.codecName, format, 1407 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk); 1408 1409 // encode loop 1410 boolean sawInputEOS = false; // no more data 1411 boolean consumedInputEOS = false; // EOS flag is consumed dy encoder 1412 boolean sawOutputEOS = false; 1413 boolean inputConsumed = true; 1414 int inputFrameIndex = 0; 1415 int lastBitrate = bitrate; 1416 int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2; 1417 byte[] srcFrame = new byte[srcFrameSize]; 1418 1419 while (!sawOutputEOS) { 1420 1421 // Read and feed input frame 1422 if (!consumedInputEOS) { 1423 1424 // Read new input buffers - if previous input was consumed and no EOS 1425 if (inputConsumed && !sawInputEOS) { 1426 int bytesRead = yuvStream.read(srcFrame); 1427 1428 // Check EOS 1429 if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) { 1430 sawInputEOS = true; 1431 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1432 } 1433 1434 if (!sawInputEOS && bytesRead == -1) { 1435 if (streamParams.frameCount == 0) { 1436 sawInputEOS = true; 1437 Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex); 1438 } else { 1439 yuvStream.close(); 1440 yuvStream = OpenFileOrResource( 1441 streamParams.inputYuvFilename, streamParams.inputResource); 1442 bytesRead = yuvStream.read(srcFrame); 1443 } 1444 } 1445 1446 // Force sync frame if syncForceFrameinterval is set. 1447 if (!sawInputEOS && inputFrameIndex > 0 && 1448 streamParams.syncForceFrameInterval > 0 && 1449 (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) { 1450 Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex); 1451 codec.forceSyncFrame(); 1452 } 1453 1454 // Dynamic bitrate change. 1455 if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) { 1456 int newBitrate = streamParams.bitrateSet[inputFrameIndex]; 1457 if (newBitrate != lastBitrate) { 1458 Log.d(TAG, "--- Requesting new bitrate " + newBitrate + 1459 " for frame " + inputFrameIndex); 1460 codec.updateBitrate(newBitrate); 1461 lastBitrate = newBitrate; 1462 } 1463 } 1464 1465 // Convert YUV420 to NV12 if necessary 1466 if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) { 1467 srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight, 1468 srcFrame); 1469 } 1470 } 1471 1472 inputConsumed = codec.feedInput(srcFrame, sawInputEOS); 1473 if (inputConsumed) { 1474 inputFrameIndex++; 1475 consumedInputEOS = sawInputEOS; 1476 } 1477 } 1478 1479 // Get output from the encoder 1480 MediaEncoderOutput out = codec.getOutput(); 1481 if (out.outputGenerated) { 1482 // Detect output EOS 1483 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1484 Log.d(TAG, "----Output EOS "); 1485 sawOutputEOS = true; 1486 } 1487 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 1488 Log.d(TAG, "Storing codec config separately"); 1489 ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer); 1490 csdBuffer.rewind(); 1491 codecConfigs.add(csdBuffer); 1492 out.buffer = new byte[0]; 1493 } 1494 1495 if (out.buffer.length > 0) { 1496 // Save frame 1497 ivf.writeFrame(out.buffer, out.outPresentationTimeUs); 1498 1499 // Update statistics - store presentation time delay in offset 1500 long presentationTimeUsDelta = out.inPresentationTimeUs - 1501 out.outPresentationTimeUs; 1502 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1503 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1504 out.outPresentationTimeUs, out.flags); 1505 bufferInfos.add(bufferInfoCopy); 1506 } 1507 } 1508 1509 // If codec is not ready to accept input/poutput - wait for buffer ready callback 1510 if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) { 1511 codec.waitForBufferEvent(); 1512 } 1513 } 1514 1515 codec.deleteCodec(); 1516 ivf.close(); 1517 yuvStream.close(); 1518 1519 return bufferInfos; 1520 } 1521 1522 /** 1523 * Video encoding run in a looper thread and use buffer ready callbacks. 1524 * 1525 * Output stream is described by encodingParams parameters. 1526 * 1527 * MediaCodec will raise an IllegalStateException 1528 * whenever video encoder fails to encode a frame. 1529 * 1530 * Color format of input file should be YUV420, and frameWidth, 1531 * frameHeight should be supplied correctly as raw input file doesn't 1532 * include any header data. 1533 * 1534 * @param streamParams Structure with encoder parameters 1535 * @param codecConfigs List to be filled with codec config buffers 1536 * @return Returns array of encoded frames information for each frame. 1537 */ encodeAsync( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1538 protected ArrayList<MediaCodec.BufferInfo> encodeAsync( 1539 EncoderOutputStreamParameters streamParams, 1540 ArrayList<ByteBuffer> codecConfigs) throws Exception { 1541 if (!streamParams.runInLooperThread) { 1542 throw new RuntimeException("encodeAsync should run with a looper thread!"); 1543 } 1544 1545 ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>(); 1546 Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " + 1547 streamParams.frameHeight); 1548 int bitrate = streamParams.bitrateSet[0]; 1549 1550 // Create minimal media format signifying desired output. 1551 MediaFormat format = MediaFormat.createVideoFormat( 1552 streamParams.codecMimeType, streamParams.frameWidth, 1553 streamParams.frameHeight); 1554 format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1555 CodecProperties properties = getVideoCodecProperties( 1556 true, format, streamParams.forceGoogleEncoder); 1557 if (properties == null) { 1558 return null; 1559 } 1560 1561 // Open input/output 1562 IvfWriter ivf = new IvfWriter( 1563 streamParams.outputIvfFilename, streamParams.codecMimeType, 1564 streamParams.frameWidth, streamParams.frameHeight); 1565 1566 // Create a media format signifying desired output. 1567 if (streamParams.bitrateType == VIDEO_ControlRateConstant) { 1568 format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1569 } 1570 if (streamParams.temporalLayers > 0) { 1571 format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer 1572 } 1573 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1574 format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate); 1575 int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) / 1576 streamParams.frameRate; 1577 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1578 1579 // Create encoder 1580 Log.d(TAG, "Creating encoder " + properties.codecName + 1581 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1582 streamParams.frameWidth + " x " + streamParams.frameHeight + 1583 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType + 1584 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers + 1585 ". Key frame:" + syncFrameInterval * streamParams.frameRate + 1586 ". Force keyFrame: " + streamParams.syncForceFrameInterval); 1587 Log.d(TAG, " Format: " + format); 1588 Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename); 1589 1590 MediaEncoderAsync codec = new MediaEncoderAsync(); 1591 MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper( 1592 streamParams, properties, bufferInfos, ivf, codecConfigs); 1593 1594 codec.setAsyncHelper(helper); 1595 codec.createCodec(0, properties.codecName, format, 1596 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk); 1597 codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS); 1598 1599 codec.deleteCodec(); 1600 ivf.close(); 1601 1602 return bufferInfos; 1603 } 1604 1605 /** 1606 * Video encoding loop supporting encoding multiple streams at a time. 1607 * Each output stream is described by encodingParams parameters allowing 1608 * simultaneous encoding of various resolutions, bitrates with an option to 1609 * control key frame and dynamic bitrate for each output stream indepandently. 1610 * 1611 * MediaCodec will raise an IllegalStateException 1612 * whenever video encoder fails to encode a frame. 1613 * 1614 * Color format of input file should be YUV420, and frameWidth, 1615 * frameHeight should be supplied correctly as raw input file doesn't 1616 * include any header data. 1617 * 1618 * @param srcFrameWidth Frame width of input yuv file 1619 * @param srcFrameHeight Frame height of input yuv file 1620 * @param encodingParams Encoder parameters 1621 * @param codecConfigs List to be filled with codec config buffers 1622 * @return Returns 2D array of encoded frames information for each stream and 1623 * for each frame. 1624 */ encodeSimulcast( int srcFrameWidth, int srcFrameHeight, ArrayList<EncoderOutputStreamParameters> encodingParams, ArrayList<ArrayList<ByteBuffer>> codecConfigs)1625 protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast( 1626 int srcFrameWidth, 1627 int srcFrameHeight, 1628 ArrayList<EncoderOutputStreamParameters> encodingParams, 1629 ArrayList<ArrayList<ByteBuffer>> codecConfigs) throws Exception { 1630 int numEncoders = encodingParams.size(); 1631 1632 // Create arrays of input/output, formats, bitrates etc 1633 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos = 1634 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders); 1635 InputStream yuvStream[] = new InputStream[numEncoders]; 1636 IvfWriter[] ivf = new IvfWriter[numEncoders]; 1637 FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders]; 1638 MediaFormat[] format = new MediaFormat[numEncoders]; 1639 MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders]; 1640 int[] inputFrameIndex = new int[numEncoders]; 1641 boolean[] sawInputEOS = new boolean[numEncoders]; 1642 boolean[] consumedInputEOS = new boolean[numEncoders]; 1643 boolean[] inputConsumed = new boolean[numEncoders]; 1644 boolean[] bufferConsumed = new boolean[numEncoders]; 1645 boolean[] sawOutputEOS = new boolean[numEncoders]; 1646 byte[][] srcFrame = new byte[numEncoders][]; 1647 boolean sawOutputEOSTotal = false; 1648 boolean bufferConsumedTotal = false; 1649 CodecProperties[] codecProperties = new CodecProperties[numEncoders]; 1650 1651 numEncoders = 0; 1652 for (EncoderOutputStreamParameters params : encodingParams) { 1653 int i = numEncoders; 1654 Log.d(TAG, "Source resolution: " + params.frameWidth + " x " + 1655 params.frameHeight); 1656 int bitrate = params.bitrateSet[0]; 1657 1658 // Create minimal media format signifying desired output. 1659 format[i] = MediaFormat.createVideoFormat( 1660 params.codecMimeType, params.frameWidth, 1661 params.frameHeight); 1662 format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 1663 CodecProperties properties = getVideoCodecProperties( 1664 true, format[i], params.forceGoogleEncoder); 1665 if (properties == null) { 1666 continue; 1667 } 1668 1669 // Check if scaled image was created 1670 int scale = params.frameWidth / srcFrameWidth; 1671 if (!mScaledImages.contains(scale)) { 1672 // resize image 1673 cacheScaledImage(params.inputYuvFilename, params.inputResource, 1674 srcFrameWidth, srcFrameHeight, 1675 params.scaledYuvFilename, params.frameWidth, params.frameHeight); 1676 mScaledImages.add(scale); 1677 } 1678 1679 // Create buffer info storage 1680 bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>()); 1681 1682 // Create YUV reader 1683 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1684 1685 // Create IVF writer 1686 ivf[i] = new IvfWriter( 1687 params.outputIvfFilename, params.codecMimeType, 1688 params.frameWidth, params.frameHeight); 1689 1690 // Frame buffer 1691 int frameSize = params.frameWidth * params.frameHeight * 3 / 2; 1692 srcFrame[i] = new byte[frameSize]; 1693 1694 // Create a media format signifying desired output. 1695 if (params.bitrateType == VIDEO_ControlRateConstant) { 1696 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR 1697 } 1698 if (params.temporalLayers > 0) { 1699 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer 1700 } 1701 format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); 1702 format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate); 1703 int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) / 1704 params.frameRate; // in sec 1705 format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval); 1706 // Create encoder 1707 Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName + 1708 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " + 1709 params.frameWidth + " x " + params.frameHeight + 1710 ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType + 1711 ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers + 1712 ". Key frame:" + syncFrameInterval * params.frameRate + 1713 ". Force keyFrame: " + params.syncForceFrameInterval); 1714 Log.d(TAG, " Format: " + format[i]); 1715 Log.d(TAG, " Output ivf:" + params.outputIvfFilename); 1716 1717 // Create encoder 1718 codec[i] = new MediaEncoderAsync(); 1719 codec[i].createCodec(i, properties.codecName, format[i], 1720 params.timeoutDequeue, params.runInLooperThread, params.useNdk); 1721 codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat); 1722 1723 inputConsumed[i] = true; 1724 ++numEncoders; 1725 } 1726 if (numEncoders == 0) { 1727 Log.i(TAG, "no suitable encoders found for any of the streams"); 1728 return null; 1729 } 1730 1731 while (!sawOutputEOSTotal) { 1732 // Feed input buffer to all encoders 1733 for (int i = 0; i < numEncoders; i++) { 1734 bufferConsumed[i] = false; 1735 if (consumedInputEOS[i]) { 1736 continue; 1737 } 1738 1739 EncoderOutputStreamParameters params = encodingParams.get(i); 1740 // Read new input buffers - if previous input was consumed and no EOS 1741 if (inputConsumed[i] && !sawInputEOS[i]) { 1742 int bytesRead = yuvStream[i].read(srcFrame[i]); 1743 1744 // Check EOS 1745 if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) { 1746 sawInputEOS[i] = true; 1747 Log.d(TAG, "---Enc" + i + 1748 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1749 } 1750 1751 if (!sawInputEOS[i] && bytesRead == -1) { 1752 if (params.frameCount == 0) { 1753 sawInputEOS[i] = true; 1754 Log.d(TAG, "---Enc" + i + 1755 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]); 1756 } else { 1757 yuvStream[i].close(); 1758 yuvStream[i] = new FileInputStream(params.scaledYuvFilename); 1759 bytesRead = yuvStream[i].read(srcFrame[i]); 1760 } 1761 } 1762 1763 // Convert YUV420 to NV12 if necessary 1764 if (codecProperties[i].colorFormat != 1765 CodecCapabilities.COLOR_FormatYUV420Planar) { 1766 srcFrame[i] = 1767 YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]); 1768 } 1769 } 1770 1771 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]); 1772 if (inputConsumed[i]) { 1773 inputFrameIndex[i]++; 1774 consumedInputEOS[i] = sawInputEOS[i]; 1775 bufferConsumed[i] = true; 1776 } 1777 1778 } 1779 1780 // Get output from all encoders 1781 for (int i = 0; i < numEncoders; i++) { 1782 if (sawOutputEOS[i]) { 1783 continue; 1784 } 1785 1786 MediaEncoderOutput out = codec[i].getOutput(); 1787 if (out.outputGenerated) { 1788 bufferConsumed[i] = true; 1789 // Detect output EOS 1790 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1791 Log.d(TAG, "----Enc" + i + ". Output EOS "); 1792 sawOutputEOS[i] = true; 1793 } 1794 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 1795 Log.d(TAG, "----Enc" + i + ". Storing codec config separately"); 1796 ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer); 1797 csdBuffer.rewind(); 1798 codecConfigs.get(i).add(csdBuffer); 1799 out.buffer = new byte[0]; 1800 } 1801 1802 if (out.buffer.length > 0) { 1803 // Save frame 1804 ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs); 1805 1806 // Update statistics - store presentation time delay in offset 1807 long presentationTimeUsDelta = out.inPresentationTimeUs - 1808 out.outPresentationTimeUs; 1809 MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo(); 1810 bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length, 1811 out.outPresentationTimeUs, out.flags); 1812 bufferInfos.get(i).add(bufferInfoCopy); 1813 } 1814 } 1815 } 1816 1817 // If codec is not ready to accept input/output - wait for buffer ready callback 1818 bufferConsumedTotal = false; 1819 for (boolean bufferConsumedCurrent : bufferConsumed) { 1820 bufferConsumedTotal |= bufferConsumedCurrent; 1821 } 1822 if (!bufferConsumedTotal) { 1823 // Pick the encoder to wait for 1824 for (int i = 0; i < numEncoders; i++) { 1825 if (!bufferConsumed[i] && !sawOutputEOS[i]) { 1826 codec[i].waitForBufferEvent(); 1827 break; 1828 } 1829 } 1830 } 1831 1832 // Check if EOS happened for all encoders 1833 sawOutputEOSTotal = true; 1834 for (boolean sawOutputEOSStream : sawOutputEOS) { 1835 sawOutputEOSTotal &= sawOutputEOSStream; 1836 } 1837 } 1838 1839 for (int i = 0; i < numEncoders; i++) { 1840 codec[i].deleteCodec(); 1841 ivf[i].close(); 1842 yuvStream[i].close(); 1843 if (yuvScaled[i] != null) { 1844 yuvScaled[i].close(); 1845 } 1846 } 1847 1848 return bufferInfos; 1849 } 1850 1851 /** 1852 * Some encoding statistics. 1853 */ 1854 protected class VideoEncodingStatistics { VideoEncodingStatistics()1855 VideoEncodingStatistics() { 1856 mBitrates = new ArrayList<Integer>(); 1857 mFrames = new ArrayList<Integer>(); 1858 mKeyFrames = new ArrayList<Integer>(); 1859 mMinimumKeyFrameInterval = Integer.MAX_VALUE; 1860 } 1861 1862 public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream. 1863 public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream. 1864 public int mAverageBitrate; // Average stream bitrate. 1865 public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream. 1866 public int mAverageKeyFrameInterval; // Average key frame interval. 1867 public int mMaximumKeyFrameInterval; // Maximum key frame interval. 1868 public int mMinimumKeyFrameInterval; // Minimum key frame interval. 1869 } 1870 1871 /** 1872 * Calculates average bitrate and key frame interval for the encoded streams. 1873 * Output mBitrates field will contain bitrate values for every second 1874 * of the encoded stream. 1875 * Average stream bitrate will be stored in mAverageBitrate field. 1876 * mKeyFrames array will contain the position of key frames in the encoded stream and 1877 * mKeyFrameInterval - average key frame interval. 1878 */ computeEncodingStatistics(int encoderId, ArrayList<MediaCodec.BufferInfo> bufferInfos )1879 protected VideoEncodingStatistics computeEncodingStatistics(int encoderId, 1880 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1881 VideoEncodingStatistics statistics = new VideoEncodingStatistics(); 1882 1883 int totalSize = 0; 1884 int frames = 0; 1885 int framesPerSecond = 0; 1886 int totalFrameSizePerSecond = 0; 1887 int maxFrameSize = 0; 1888 int currentSecond; 1889 int nextSecond = 0; 1890 String keyFrameList = " IFrame List: "; 1891 String bitrateList = " Bitrate list: "; 1892 String framesList = " FPS list: "; 1893 1894 1895 for (int j = 0; j < bufferInfos.size(); j++) { 1896 MediaCodec.BufferInfo info = bufferInfos.get(j); 1897 currentSecond = (int)(info.presentationTimeUs / 1000000); 1898 boolean lastFrame = (j == bufferInfos.size() - 1); 1899 if (!lastFrame) { 1900 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000); 1901 } 1902 1903 totalSize += info.size; 1904 totalFrameSizePerSecond += info.size; 1905 maxFrameSize = Math.max(maxFrameSize, info.size); 1906 framesPerSecond++; 1907 frames++; 1908 1909 // Update the bitrate statistics if the next frame will 1910 // be for the next second 1911 if (lastFrame || nextSecond > currentSecond) { 1912 int currentBitrate = totalFrameSizePerSecond * 8; 1913 bitrateList += (currentBitrate + " "); 1914 framesList += (framesPerSecond + " "); 1915 statistics.mBitrates.add(currentBitrate); 1916 statistics.mFrames.add(framesPerSecond); 1917 totalFrameSizePerSecond = 0; 1918 framesPerSecond = 0; 1919 } 1920 1921 // Update key frame statistics. 1922 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) { 1923 statistics.mKeyFrames.add(j); 1924 keyFrameList += (j + " "); 1925 } 1926 } 1927 int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000); 1928 duration = (duration + 500) / 1000; 1929 statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration); 1930 Log.d(TAG, "Statistics for encoder # " + encoderId); 1931 // Calculate average key frame interval in frames. 1932 int keyFrames = statistics.mKeyFrames.size(); 1933 if (keyFrames > 1) { 1934 statistics.mAverageKeyFrameInterval = 1935 statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0); 1936 statistics.mAverageKeyFrameInterval = 1937 Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1)); 1938 for (int j = 1; j < keyFrames; j++) { 1939 int keyFrameInterval = 1940 statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1); 1941 statistics.mMaximumKeyFrameInterval = 1942 Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval); 1943 statistics.mMinimumKeyFrameInterval = 1944 Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval); 1945 } 1946 Log.d(TAG, " Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval + 1947 ". Min: " + statistics.mMinimumKeyFrameInterval + 1948 ". Avg: " + statistics.mAverageKeyFrameInterval); 1949 } 1950 Log.d(TAG, " Frames: " + frames + ". Duration: " + duration + 1951 ". Total size: " + totalSize + ". Key frames: " + keyFrames); 1952 Log.d(TAG, keyFrameList); 1953 Log.d(TAG, bitrateList); 1954 Log.d(TAG, framesList); 1955 Log.d(TAG, " Bitrate average: " + statistics.mAverageBitrate); 1956 Log.d(TAG, " Maximum frame size: " + maxFrameSize); 1957 1958 return statistics; 1959 } 1960 computeEncodingStatistics( ArrayList<MediaCodec.BufferInfo> bufferInfos )1961 protected VideoEncodingStatistics computeEncodingStatistics( 1962 ArrayList<MediaCodec.BufferInfo> bufferInfos ) { 1963 return computeEncodingStatistics(0, bufferInfos); 1964 } 1965 computeSimulcastEncodingStatistics( ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos)1966 protected ArrayList<VideoEncodingStatistics> computeSimulcastEncodingStatistics( 1967 ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) { 1968 int numCodecs = bufferInfos.size(); 1969 ArrayList<VideoEncodingStatistics> statistics = new ArrayList<VideoEncodingStatistics>(); 1970 1971 for (int i = 0; i < numCodecs; i++) { 1972 VideoEncodingStatistics currentStatistics = 1973 computeEncodingStatistics(i, bufferInfos.get(i)); 1974 statistics.add(currentStatistics); 1975 } 1976 return statistics; 1977 } 1978 1979 /** 1980 * Calculates maximum latency for encoder/decoder based on buffer info array 1981 * generated either by encoder or decoder. 1982 */ maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos)1983 protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) { 1984 int maxValue = 0; 1985 for (MediaCodec.BufferInfo bufferInfo : bufferInfos) { 1986 maxValue = Math.max(maxValue, bufferInfo.offset); 1987 } 1988 maxValue = (maxValue + 500) / 1000; // mcs -> ms 1989 return maxValue; 1990 } 1991 1992 /** 1993 * Decoding PSNR statistics. 1994 */ 1995 protected class VideoDecodingStatistics { VideoDecodingStatistics()1996 VideoDecodingStatistics() { 1997 mMinimumPSNR = Integer.MAX_VALUE; 1998 } 1999 public double mAveragePSNR; 2000 public double mMinimumPSNR; 2001 } 2002 2003 /** 2004 * Calculates PSNR value between two video frames. 2005 */ computePSNR(byte[] data0, byte[] data1)2006 private double computePSNR(byte[] data0, byte[] data1) { 2007 long squareError = 0; 2008 assertTrue(data0.length == data1.length); 2009 int length = data0.length; 2010 for (int i = 0 ; i < length; i++) { 2011 int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff); 2012 squareError += diff * diff; 2013 } 2014 double meanSquareError = (double)squareError / length; 2015 double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError); 2016 return psnr; 2017 } 2018 2019 /** 2020 * Calculates average and minimum PSNR values between 2021 * set of reference and decoded video frames. 2022 * Runs PSNR calculation for the full duration of the decoded data. 2023 */ computeDecodingStatistics( String referenceYuvFilename, String referenceYuvRaw, String decodedYuvFilename, int width, int height)2024 protected VideoDecodingStatistics computeDecodingStatistics( 2025 String referenceYuvFilename, 2026 String referenceYuvRaw, 2027 String decodedYuvFilename, 2028 int width, 2029 int height) throws Exception { 2030 VideoDecodingStatistics statistics = new VideoDecodingStatistics(); 2031 InputStream referenceStream = 2032 OpenFileOrResource(referenceYuvFilename, referenceYuvRaw); 2033 InputStream decodedStream = new FileInputStream(decodedYuvFilename); 2034 2035 int ySize = width * height; 2036 int uvSize = width * height / 4; 2037 byte[] yRef = new byte[ySize]; 2038 byte[] yDec = new byte[ySize]; 2039 byte[] uvRef = new byte[uvSize]; 2040 byte[] uvDec = new byte[uvSize]; 2041 2042 int frames = 0; 2043 double averageYPSNR = 0; 2044 double averageUPSNR = 0; 2045 double averageVPSNR = 0; 2046 double minimumYPSNR = Integer.MAX_VALUE; 2047 double minimumUPSNR = Integer.MAX_VALUE; 2048 double minimumVPSNR = Integer.MAX_VALUE; 2049 int minimumPSNRFrameIndex = 0; 2050 2051 while (true) { 2052 // Calculate Y PSNR. 2053 int bytesReadRef = referenceStream.read(yRef); 2054 int bytesReadDec = decodedStream.read(yDec); 2055 if (bytesReadDec == -1) { 2056 break; 2057 } 2058 if (bytesReadRef == -1) { 2059 // Reference file wrapping up 2060 referenceStream.close(); 2061 referenceStream = 2062 OpenFileOrResource(referenceYuvFilename, referenceYuvRaw); 2063 bytesReadRef = referenceStream.read(yRef); 2064 } 2065 double curYPSNR = computePSNR(yRef, yDec); 2066 averageYPSNR += curYPSNR; 2067 minimumYPSNR = Math.min(minimumYPSNR, curYPSNR); 2068 double curMinimumPSNR = curYPSNR; 2069 2070 // Calculate U PSNR. 2071 bytesReadRef = referenceStream.read(uvRef); 2072 bytesReadDec = decodedStream.read(uvDec); 2073 double curUPSNR = computePSNR(uvRef, uvDec); 2074 averageUPSNR += curUPSNR; 2075 minimumUPSNR = Math.min(minimumUPSNR, curUPSNR); 2076 curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR); 2077 2078 // Calculate V PSNR. 2079 bytesReadRef = referenceStream.read(uvRef); 2080 bytesReadDec = decodedStream.read(uvDec); 2081 double curVPSNR = computePSNR(uvRef, uvDec); 2082 averageVPSNR += curVPSNR; 2083 minimumVPSNR = Math.min(minimumVPSNR, curVPSNR); 2084 curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR); 2085 2086 // Frame index for minimum PSNR value - help to detect possible distortions 2087 if (curMinimumPSNR < statistics.mMinimumPSNR) { 2088 statistics.mMinimumPSNR = curMinimumPSNR; 2089 minimumPSNRFrameIndex = frames; 2090 } 2091 2092 String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f", 2093 frames, curYPSNR, curUPSNR, curVPSNR); 2094 Log.v(TAG, logStr); 2095 2096 frames++; 2097 } 2098 2099 averageYPSNR /= frames; 2100 averageUPSNR /= frames; 2101 averageVPSNR /= frames; 2102 statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6; 2103 2104 Log.d(TAG, "PSNR statistics for " + frames + " frames."); 2105 String logStr = String.format(Locale.US, 2106 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f", 2107 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR); 2108 Log.d(TAG, logStr); 2109 logStr = String.format(Locale.US, 2110 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d", 2111 minimumYPSNR, minimumUPSNR, minimumVPSNR, 2112 statistics.mMinimumPSNR, minimumPSNRFrameIndex); 2113 Log.d(TAG, logStr); 2114 2115 referenceStream.close(); 2116 decodedStream.close(); 2117 return statistics; 2118 } 2119 } 2120