1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.IntDef; 20 import android.annotation.NonNull; 21 import android.compat.annotation.UnsupportedAppUsage; 22 import android.media.MediaCodec.BufferInfo; 23 import android.os.Build; 24 25 import dalvik.system.CloseGuard; 26 27 import java.io.FileDescriptor; 28 import java.io.IOException; 29 import java.io.RandomAccessFile; 30 import java.lang.annotation.Retention; 31 import java.lang.annotation.RetentionPolicy; 32 import java.nio.ByteBuffer; 33 import java.util.Map; 34 35 /** 36 * MediaMuxer facilitates muxing elementary streams. Currently MediaMuxer supports MP4, Webm 37 * and 3GP file as the output. It also supports muxing B-frames in MP4 since Android Nougat. 38 * <p> 39 * It is generally used like this: 40 * 41 * <pre> 42 * MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4); 43 * // More often, the MediaFormat will be retrieved from MediaCodec.getOutputFormat() 44 * // or MediaExtractor.getTrackFormat(). 45 * MediaFormat audioFormat = new MediaFormat(...); 46 * MediaFormat videoFormat = new MediaFormat(...); 47 * int audioTrackIndex = muxer.addTrack(audioFormat); 48 * int videoTrackIndex = muxer.addTrack(videoFormat); 49 * ByteBuffer inputBuffer = ByteBuffer.allocate(bufferSize); 50 * boolean finished = false; 51 * BufferInfo bufferInfo = new BufferInfo(); 52 * 53 * muxer.start(); 54 * while(!finished) { 55 * // getInputBuffer() will fill the inputBuffer with one frame of encoded 56 * // sample from either MediaCodec or MediaExtractor, set isAudioSample to 57 * // true when the sample is audio data, set up all the fields of bufferInfo, 58 * // and return true if there are no more samples. 59 * finished = getInputBuffer(inputBuffer, isAudioSample, bufferInfo); 60 * if (!finished) { 61 * int currentTrackIndex = isAudioSample ? audioTrackIndex : videoTrackIndex; 62 * muxer.writeSampleData(currentTrackIndex, inputBuffer, bufferInfo); 63 * } 64 * }; 65 * muxer.stop(); 66 * muxer.release(); 67 * </pre> 68 * 69 70 <h4>Metadata Track</h4> 71 <p> 72 Per-frame metadata carries information that correlates with video or audio to facilitate offline 73 processing. For example, gyro signals from the sensor can help video stabilization when doing 74 offline processing. Metadata tracks are only supported when multiplexing to the MP4 container 75 format. When adding a new metadata track, the MIME type format must start with prefix 76 "application/" (for example, "application/gyro"). The format of the metadata is 77 application-defined. Metadata timestamps must be in the same time base as video and audio 78 timestamps. The generated MP4 file uses TextMetaDataSampleEntry (defined in section 12.3.3.2 of 79 the ISOBMFF specification) to signal the metadata's MIME type. 80 81 <pre class=prettyprint> 82 MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4); 83 // SetUp Video/Audio Tracks. 84 MediaFormat audioFormat = new MediaFormat(...); 85 MediaFormat videoFormat = new MediaFormat(...); 86 int audioTrackIndex = muxer.addTrack(audioFormat); 87 int videoTrackIndex = muxer.addTrack(videoFormat); 88 89 // Setup Metadata Track 90 MediaFormat metadataFormat = new MediaFormat(...); 91 metadataFormat.setString(KEY_MIME, "application/gyro"); 92 int metadataTrackIndex = muxer.addTrack(metadataFormat); 93 94 muxer.start(); 95 while(..) { 96 // Allocate bytebuffer and write gyro data(x,y,z) into it. 97 ByteBuffer metaData = ByteBuffer.allocate(bufferSize); 98 metaData.putFloat(x); 99 metaData.putFloat(y); 100 metaData.putFloat(z); 101 BufferInfo metaInfo = new BufferInfo(); 102 // Associate this metadata with the video frame by setting 103 // the same timestamp as the video frame. 104 metaInfo.presentationTimeUs = currentVideoTrackTimeUs; 105 metaInfo.offset = 0; 106 metaInfo.flags = 0; 107 metaInfo.size = bufferSize; 108 muxer.writeSampleData(metadataTrackIndex, metaData, metaInfo); 109 }; 110 muxer.stop(); 111 muxer.release(); 112 }</pre> 113 114 <h2 id=History><a name="History"></a>Features and API History</h2> 115 <p> 116 The following table summarizes the feature support in different API version and containers. 117 For API version numbers, see {@link android.os.Build.VERSION_CODES}. 118 119 <style> 120 .api > tr > th, .api > tr > td { text-align: center; padding: 4px 4px; } 121 .api > tr > th { vertical-align: bottom; } 122 .api > tr > td { vertical-align: middle; } 123 .sml > tr > th, .sml > tr > td { text-align: center; padding: 2px 4px; } 124 .fn { text-align: center; } 125 </style> 126 127 <table align="right" style="width: 0%"> 128 <thead> 129 <tbody class=api> 130 <tr><th>Symbol</th> 131 <th>Meaning</th></tr> 132 </tbody> 133 </thead> 134 <tbody class=sml> 135 <tr><td>●</td><td>Supported</td></tr> 136 <tr><td>○</td><td>Not supported</td></tr> 137 <tr><td>▧</td><td>Supported in MP4/WebM/3GP</td></tr> 138 <tr><td>⁕</td><td>Only Supported in MP4</td></tr> 139 </tbody> 140 </table> 141 <table align="center" style="width: 100%;"> 142 <thead class=api> 143 <tr> 144 <th rowspan=2>Feature</th> 145 <th colspan="24">SDK Version</th> 146 </tr> 147 <tr> 148 <th>18</th> 149 <th>19</th> 150 <th>20</th> 151 <th>21</th> 152 <th>22</th> 153 <th>23</th> 154 <th>24</th> 155 <th>25</th> 156 <th>26+</th> 157 </tr> 158 </thead> 159 <tbody class=api> 160 <tr> 161 <td align="center">MP4 container</td> 162 <td>●</td> 163 <td>●</td> 164 <td>●</td> 165 <td>●</td> 166 <td>●</td> 167 <td>●</td> 168 <td>●</td> 169 <td>●</td> 170 <td>●</td> 171 </tr> 172 <td align="center">WebM container</td> 173 <td>○</td> 174 <td>○</td> 175 <td>○</td> 176 <td>●</td> 177 <td>●</td> 178 <td>●</td> 179 <td>●</td> 180 <td>●</td> 181 <td>●</td> 182 </tr> 183 <td align="center">3GP container</td> 184 <td>○</td> 185 <td>○</td> 186 <td>○</td> 187 <td>○</td> 188 <td>○</td> 189 <td>○</td> 190 <td>○</td> 191 <td>○</td> 192 <td>●</td> 193 </tr> 194 <td align="center">Muxing B-Frames(bi-directional predicted frames)</td> 195 <td>○</td> 196 <td>○</td> 197 <td>○</td> 198 <td>○</td> 199 <td>○</td> 200 <td>○</td> 201 <td>⁕</td> 202 <td>⁕</td> 203 <td>⁕</td> 204 </tr> 205 </tr> 206 <td align="center">Muxing Single Video/Audio Track</td> 207 <td>▧</td> 208 <td>▧</td> 209 <td>▧</td> 210 <td>▧</td> 211 <td>▧</td> 212 <td>▧</td> 213 <td>▧</td> 214 <td>▧</td> 215 <td>▧</td> 216 </tr> 217 </tr> 218 <td align="center">Muxing Multiple Video/Audio Tracks</td> 219 <td>○</td> 220 <td>○</td> 221 <td>○</td> 222 <td>○</td> 223 <td>○</td> 224 <td>○</td> 225 <td>○</td> 226 <td>○</td> 227 <td>⁕</td> 228 </tr> 229 </tr> 230 <td align="center">Muxing Metadata Tracks</td> 231 <td>○</td> 232 <td>○</td> 233 <td>○</td> 234 <td>○</td> 235 <td>○</td> 236 <td>○</td> 237 <td>○</td> 238 <td>○</td> 239 <td>⁕</td> 240 </tr> 241 </tbody> 242 </table> 243 */ 244 245 final public class MediaMuxer { 246 247 static { 248 System.loadLibrary("media_jni"); 249 } 250 251 /** 252 * Defines the output format. These constants are used with constructor. 253 */ 254 public static final class OutputFormat { 255 /* Do not change these values without updating their counterparts 256 * in include/media/stagefright/MediaMuxer.h! 257 */ OutputFormat()258 private OutputFormat() {} 259 /** @hide */ 260 public static final int MUXER_OUTPUT_FIRST = 0; 261 /** MPEG4 media file format*/ 262 public static final int MUXER_OUTPUT_MPEG_4 = MUXER_OUTPUT_FIRST; 263 /** WEBM media file format*/ 264 public static final int MUXER_OUTPUT_WEBM = MUXER_OUTPUT_FIRST + 1; 265 /** 3GPP media file format*/ 266 public static final int MUXER_OUTPUT_3GPP = MUXER_OUTPUT_FIRST + 2; 267 /** HEIF media file format*/ 268 public static final int MUXER_OUTPUT_HEIF = MUXER_OUTPUT_FIRST + 3; 269 /** Ogg media file format*/ 270 public static final int MUXER_OUTPUT_OGG = MUXER_OUTPUT_FIRST + 4; 271 /** @hide */ 272 public static final int MUXER_OUTPUT_LAST = MUXER_OUTPUT_OGG; 273 }; 274 275 /** @hide */ 276 @IntDef({ 277 OutputFormat.MUXER_OUTPUT_MPEG_4, 278 OutputFormat.MUXER_OUTPUT_WEBM, 279 OutputFormat.MUXER_OUTPUT_3GPP, 280 OutputFormat.MUXER_OUTPUT_HEIF, 281 OutputFormat.MUXER_OUTPUT_OGG, 282 }) 283 @Retention(RetentionPolicy.SOURCE) 284 public @interface Format {} 285 286 // All the native functions are listed here. 287 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeSetup(@onNull FileDescriptor fd, int format)288 private static native long nativeSetup(@NonNull FileDescriptor fd, int format) 289 throws IllegalArgumentException, IOException; 290 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeRelease(long nativeObject)291 private static native void nativeRelease(long nativeObject); nativeStart(long nativeObject)292 private static native void nativeStart(long nativeObject); nativeStop(long nativeObject)293 private static native void nativeStop(long nativeObject); nativeAddTrack( long nativeObject, @NonNull String[] keys, @NonNull Object[] values)294 private static native int nativeAddTrack( 295 long nativeObject, @NonNull String[] keys, @NonNull Object[] values); nativeSetOrientationHint( long nativeObject, int degrees)296 private static native void nativeSetOrientationHint( 297 long nativeObject, int degrees); nativeSetLocation(long nativeObject, int latitude, int longitude)298 private static native void nativeSetLocation(long nativeObject, int latitude, int longitude); nativeWriteSampleData( long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf, int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags)299 private static native void nativeWriteSampleData( 300 long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf, 301 int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags); 302 303 // Muxer internal states. 304 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 305 private static final int MUXER_STATE_UNINITIALIZED = -1; 306 private static final int MUXER_STATE_INITIALIZED = 0; 307 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 308 private static final int MUXER_STATE_STARTED = 1; 309 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 310 private static final int MUXER_STATE_STOPPED = 2; 311 312 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 313 private int mState = MUXER_STATE_UNINITIALIZED; 314 315 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 316 private final CloseGuard mCloseGuard = CloseGuard.get(); 317 private int mLastTrackIndex = -1; 318 319 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 320 private long mNativeObject; 321 convertMuxerStateCodeToString(int aState)322 private String convertMuxerStateCodeToString(int aState) { 323 switch (aState) { 324 case MUXER_STATE_UNINITIALIZED: 325 return "UNINITIALIZED"; 326 case MUXER_STATE_INITIALIZED: 327 return "INITIALIZED"; 328 case MUXER_STATE_STARTED: 329 return "STARTED"; 330 case MUXER_STATE_STOPPED: 331 return "STOPPED"; 332 default: 333 return "UNKNOWN"; 334 } 335 } 336 337 /** 338 * Creates a media muxer that writes to the specified path. 339 * <p>The caller must not use the file {@code path} before calling {@link #stop}. 340 * @param path The path of the output media file. 341 * @param format The format of the output media file. 342 * @see android.media.MediaMuxer.OutputFormat 343 * @throws IllegalArgumentException if path is invalid or format is not supported. 344 * @throws IOException if an error occurs while opening or creating the output file. 345 */ MediaMuxer(@onNull String path, @Format int format)346 public MediaMuxer(@NonNull String path, @Format int format) throws IOException { 347 if (path == null) { 348 throw new IllegalArgumentException("path must not be null"); 349 } 350 // Use RandomAccessFile so we can open the file with RW access; 351 // RW access allows the native writer to memory map the output file. 352 RandomAccessFile file = null; 353 try { 354 file = new RandomAccessFile(path, "rws"); 355 file.setLength(0); 356 FileDescriptor fd = file.getFD(); 357 setUpMediaMuxer(fd, format); 358 } finally { 359 if (file != null) { 360 file.close(); 361 } 362 } 363 } 364 365 /** 366 * Creates a media muxer that writes to the specified FileDescriptor. 367 * <p>The caller must not use the file referenced by the specified {@code fd} before calling 368 * {@link #stop}. 369 * <p>It is the caller's responsibility to close the file descriptor, which is safe to do so 370 * as soon as this call returns. 371 * @param fd The FileDescriptor of the output media file. If {@code format} is 372 * {@link OutputFormat#MUXER_OUTPUT_WEBM}, {@code fd} must be open in read-write mode. 373 * Otherwise, write mode is sufficient, but read-write is also accepted. 374 * @param format The format of the output media file. 375 * @see android.media.MediaMuxer.OutputFormat 376 * @throws IllegalArgumentException if {@code format} is not supported, or if {@code fd} is 377 * not open in the expected mode. 378 * @throws IOException if an error occurs while performing an IO operation. 379 */ MediaMuxer(@onNull FileDescriptor fd, @Format int format)380 public MediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException { 381 setUpMediaMuxer(fd, format); 382 } 383 setUpMediaMuxer(@onNull FileDescriptor fd, @Format int format)384 private void setUpMediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException { 385 if (format < OutputFormat.MUXER_OUTPUT_FIRST || format > OutputFormat.MUXER_OUTPUT_LAST) { 386 throw new IllegalArgumentException("format: " + format + " is invalid"); 387 } 388 mNativeObject = nativeSetup(fd, format); 389 mState = MUXER_STATE_INITIALIZED; 390 mCloseGuard.open("release"); 391 } 392 393 /** 394 * Sets the orientation hint for output video playback. 395 * <p>This method should be called before {@link #start}. Calling this 396 * method will not rotate the video frame when muxer is generating the file, 397 * but add a composition matrix containing the rotation angle in the output 398 * video if the output format is 399 * {@link OutputFormat#MUXER_OUTPUT_MPEG_4} so that a video player can 400 * choose the proper orientation for playback. Note that some video players 401 * may choose to ignore the composition matrix in a video during playback. 402 * By default, the rotation degree is 0.</p> 403 * @param degrees the angle to be rotated clockwise in degrees. 404 * The supported angles are 0, 90, 180, and 270 degrees. 405 * @throws IllegalArgumentException if degree is not supported. 406 * @throws IllegalStateException If this method is called after {@link #start}. 407 */ setOrientationHint(int degrees)408 public void setOrientationHint(int degrees) { 409 if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { 410 throw new IllegalArgumentException("Unsupported angle: " + degrees); 411 } 412 if (mState == MUXER_STATE_INITIALIZED) { 413 nativeSetOrientationHint(mNativeObject, degrees); 414 } else { 415 throw new IllegalStateException("Can't set rotation degrees due" + 416 " to wrong state(" + convertMuxerStateCodeToString(mState) + ")"); 417 } 418 } 419 420 /** 421 * Set and store the geodata (latitude and longitude) in the output file. 422 * This method should be called before {@link #start}. The geodata is stored 423 * in udta box if the output format is 424 * {@link OutputFormat#MUXER_OUTPUT_MPEG_4}, and is ignored for other output 425 * formats. The geodata is stored according to ISO-6709 standard. 426 * 427 * @param latitude Latitude in degrees. Its value must be in the range [-90, 428 * 90]. 429 * @param longitude Longitude in degrees. Its value must be in the range 430 * [-180, 180]. 431 * @throws IllegalArgumentException If the given latitude or longitude is out 432 * of range. 433 * @throws IllegalStateException If this method is called after {@link #start}. 434 */ setLocation(float latitude, float longitude)435 public void setLocation(float latitude, float longitude) { 436 int latitudex10000 = Math.round(latitude * 10000); 437 int longitudex10000 = Math.round(longitude * 10000); 438 439 if (latitudex10000 > 900000 || latitudex10000 < -900000) { 440 String msg = "Latitude: " + latitude + " out of range."; 441 throw new IllegalArgumentException(msg); 442 } 443 if (longitudex10000 > 1800000 || longitudex10000 < -1800000) { 444 String msg = "Longitude: " + longitude + " out of range"; 445 throw new IllegalArgumentException(msg); 446 } 447 448 if (mState == MUXER_STATE_INITIALIZED && mNativeObject != 0) { 449 nativeSetLocation(mNativeObject, latitudex10000, longitudex10000); 450 } else { 451 throw new IllegalStateException("Can't set location due to wrong state(" 452 + convertMuxerStateCodeToString(mState) + ")"); 453 } 454 } 455 456 /** 457 * Starts the muxer. 458 * <p>Make sure this is called after {@link #addTrack} and before 459 * {@link #writeSampleData}.</p> 460 * @throws IllegalStateException If this method is called after {@link #start} 461 * or Muxer is released 462 */ start()463 public void start() { 464 if (mNativeObject == 0) { 465 throw new IllegalStateException("Muxer has been released!"); 466 } 467 if (mState == MUXER_STATE_INITIALIZED) { 468 nativeStart(mNativeObject); 469 mState = MUXER_STATE_STARTED; 470 } else { 471 throw new IllegalStateException("Can't start due to wrong state(" 472 + convertMuxerStateCodeToString(mState) + ")"); 473 } 474 } 475 476 /** 477 * Stops the muxer. 478 * <p>Once the muxer stops, it can not be restarted.</p> 479 * @throws IllegalStateException if muxer is in the wrong state. 480 */ stop()481 public void stop() { 482 if (mState == MUXER_STATE_STARTED) { 483 try { 484 nativeStop(mNativeObject); 485 } catch (Exception e) { 486 throw e; 487 } finally { 488 mState = MUXER_STATE_STOPPED; 489 } 490 } else { 491 throw new IllegalStateException("Can't stop due to wrong state(" 492 + convertMuxerStateCodeToString(mState) + ")"); 493 } 494 } 495 496 @Override finalize()497 protected void finalize() throws Throwable { 498 try { 499 if (mCloseGuard != null) { 500 mCloseGuard.warnIfOpen(); 501 } 502 if (mNativeObject != 0) { 503 nativeRelease(mNativeObject); 504 mNativeObject = 0; 505 } 506 } finally { 507 super.finalize(); 508 } 509 } 510 511 /** 512 * Adds a track with the specified format. 513 * <p> 514 * The following table summarizes support for specific format keys across android releases. 515 * Keys marked with '+:' are required. 516 * 517 * <table> 518 * <thead> 519 * <tr> 520 * <th rowspan=2>OS Version(s)</th> 521 * <td colspan=3>{@code MediaFormat} keys used for</th> 522 * </tr><tr> 523 * <th>All Tracks</th> 524 * <th>Audio Tracks</th> 525 * <th>Video Tracks</th> 526 * </tr> 527 * </thead> 528 * <tbody> 529 * <tr> 530 * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td> 531 * <td rowspan=7>+: {@link MediaFormat#KEY_MIME}</td> 532 * <td rowspan=3>+: {@link MediaFormat#KEY_SAMPLE_RATE},<br> 533 * +: {@link MediaFormat#KEY_CHANNEL_COUNT},<br> 534 * +: <strong>codec-specific data<sup>AAC</sup></strong></td> 535 * <td rowspan=5>+: {@link MediaFormat#KEY_WIDTH},<br> 536 * +: {@link MediaFormat#KEY_HEIGHT},<br> 537 * no {@code KEY_ROTATION}, 538 * use {@link #setOrientationHint setOrientationHint()}<sup>.mp4</sup>,<br> 539 * +: <strong>codec-specific data<sup>AVC, MPEG4</sup></strong></td> 540 * </tr><tr> 541 * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td> 542 * </tr><tr> 543 * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td> 544 * </tr><tr> 545 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td> 546 * <td rowspan=4>as above, plus<br> 547 * +: <strong>codec-specific data<sup>Vorbis & .webm</sup></strong></td> 548 * </tr><tr> 549 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td> 550 * </tr><tr> 551 * <td>{@link android.os.Build.VERSION_CODES#M}</td> 552 * <td>as above, plus<br> 553 * {@link MediaFormat#KEY_BIT_RATE}<sup>AAC</sup></td> 554 * </tr><tr> 555 * <td>{@link android.os.Build.VERSION_CODES#N}</td> 556 * <td>as above, plus<br> 557 * <!-- {link MediaFormat#KEY_MAX_BIT_RATE}<sup>AAC, MPEG4</sup>,<br> --> 558 * {@link MediaFormat#KEY_BIT_RATE}<sup>MPEG4</sup>,<br> 559 * {@link MediaFormat#KEY_HDR_STATIC_INFO}<sup>#, .webm</sup>,<br> 560 * {@link MediaFormat#KEY_COLOR_STANDARD}<sup>#</sup>,<br> 561 * {@link MediaFormat#KEY_COLOR_TRANSFER}<sup>#</sup>,<br> 562 * {@link MediaFormat#KEY_COLOR_RANGE}<sup>#</sup>,<br> 563 * +: <strong>codec-specific data<sup>HEVC</sup></strong>,<br> 564 * codec-specific data<sup>VP9</sup></td> 565 * </tr> 566 * <tr> 567 * <td colspan=4> 568 * <p class=note><strong>Notes:</strong><br> 569 * #: storing into container metadata.<br> 570 * .mp4, .webm…: for listed containers<br> 571 * MPEG4, AAC…: for listed codecs 572 * </td> 573 * </tr><tr> 574 * <td colspan=4> 575 * <p class=note>Note that the codec-specific data for the track must be specified using 576 * this method. Furthermore, codec-specific data must not be passed/specified via the 577 * {@link #writeSampleData writeSampleData()} call. 578 * </td> 579 * </tr> 580 * </tbody> 581 * </table> 582 * 583 * <p> 584 * The following table summarizes codec support for containers across android releases: 585 * 586 * <table> 587 * <thead> 588 * <tr> 589 * <th rowspan=2>OS Version(s)</th> 590 * <td colspan=3>Codec support</th> 591 * </tr><tr> 592 * <th>{@linkplain OutputFormat#MUXER_OUTPUT_MPEG_4 MP4}</th> 593 * <th>{@linkplain OutputFormat#MUXER_OUTPUT_WEBM WEBM}</th> 594 * </tr> 595 * </thead> 596 * <tbody> 597 * <tr> 598 * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td> 599 * <td rowspan=6>{@link MediaFormat#MIMETYPE_AUDIO_AAC AAC},<br> 600 * {@link MediaFormat#MIMETYPE_AUDIO_AMR_NB NB-AMR},<br> 601 * {@link MediaFormat#MIMETYPE_AUDIO_AMR_WB WB-AMR},<br> 602 * {@link MediaFormat#MIMETYPE_VIDEO_H263 H.263},<br> 603 * {@link MediaFormat#MIMETYPE_VIDEO_MPEG4 MPEG-4},<br> 604 * {@link MediaFormat#MIMETYPE_VIDEO_AVC AVC} (H.264)</td> 605 * <td rowspan=3>Not supported</td> 606 * </tr><tr> 607 * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td> 608 * </tr><tr> 609 * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td> 610 * </tr><tr> 611 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td> 612 * <td rowspan=3>{@link MediaFormat#MIMETYPE_AUDIO_VORBIS Vorbis},<br> 613 * {@link MediaFormat#MIMETYPE_VIDEO_VP8 VP8}</td> 614 * </tr><tr> 615 * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td> 616 * </tr><tr> 617 * <td>{@link android.os.Build.VERSION_CODES#M}</td> 618 * </tr><tr> 619 * <td>{@link android.os.Build.VERSION_CODES#N}</td> 620 * <td>as above, plus<br> 621 * {@link MediaFormat#MIMETYPE_VIDEO_HEVC HEVC} (H.265)</td> 622 * <td>as above, plus<br> 623 * {@link MediaFormat#MIMETYPE_VIDEO_VP9 VP9}</td> 624 * </tr> 625 * </tbody> 626 * </table> 627 * 628 * @param format The media format for the track. This must not be an empty 629 * MediaFormat. 630 * @return The track index for this newly added track, and it should be used 631 * in the {@link #writeSampleData}. 632 * @throws IllegalArgumentException if format is invalid. 633 * @throws IllegalStateException if muxer is in the wrong state. 634 */ addTrack(@onNull MediaFormat format)635 public int addTrack(@NonNull MediaFormat format) { 636 if (format == null) { 637 throw new IllegalArgumentException("format must not be null."); 638 } 639 if (mState != MUXER_STATE_INITIALIZED) { 640 throw new IllegalStateException("Muxer is not initialized."); 641 } 642 if (mNativeObject == 0) { 643 throw new IllegalStateException("Muxer has been released!"); 644 } 645 int trackIndex = -1; 646 // Convert the MediaFormat into key-value pairs and send to the native. 647 Map<String, Object> formatMap = format.getMap(); 648 649 String[] keys = null; 650 Object[] values = null; 651 int mapSize = formatMap.size(); 652 if (mapSize > 0) { 653 keys = new String[mapSize]; 654 values = new Object[mapSize]; 655 int i = 0; 656 for (Map.Entry<String, Object> entry : formatMap.entrySet()) { 657 keys[i] = entry.getKey(); 658 values[i] = entry.getValue(); 659 ++i; 660 } 661 trackIndex = nativeAddTrack(mNativeObject, keys, values); 662 } else { 663 throw new IllegalArgumentException("format must not be empty."); 664 } 665 666 // Track index number is expected to incremented as addTrack succeed. 667 // However, if format is invalid, it will get a negative trackIndex. 668 if (mLastTrackIndex >= trackIndex) { 669 throw new IllegalArgumentException("Invalid format."); 670 } 671 mLastTrackIndex = trackIndex; 672 return trackIndex; 673 } 674 675 /** 676 * Writes an encoded sample into the muxer. 677 * <p>The application needs to make sure that the samples are written into 678 * the right tracks. Also, it needs to make sure the samples for each track 679 * are written in chronological order (e.g. in the order they are provided 680 * by the encoder.)</p> 681 * <p> For MPEG4 media format, the duration of the last sample in a track can be set by passing 682 * an additional empty buffer(bufferInfo.size = 0) with MediaCodec.BUFFER_FLAG_END_OF_STREAM 683 * flag and a suitable presentation timestamp set in bufferInfo parameter as the last sample of 684 * that track. This last sample's presentation timestamp shall be a sum of the presentation 685 * timestamp and the duration preferred for the original last sample. If no explicit 686 * END_OF_STREAM sample was passed, then the duration of the last sample would be the same as 687 * that of the sample before that.</p> 688 * @param byteBuf The encoded sample. 689 * @param trackIndex The track index for this sample. 690 * @param bufferInfo The buffer information related to this sample. 691 * @throws IllegalArgumentException if trackIndex, byteBuf or bufferInfo is invalid. 692 * @throws IllegalStateException if muxer is in wrong state. 693 * MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo}, 694 * to signal sync frames. 695 */ writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, @NonNull BufferInfo bufferInfo)696 public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf, 697 @NonNull BufferInfo bufferInfo) { 698 if (trackIndex < 0 || trackIndex > mLastTrackIndex) { 699 throw new IllegalArgumentException("trackIndex is invalid"); 700 } 701 702 if (byteBuf == null) { 703 throw new IllegalArgumentException("byteBuffer must not be null"); 704 } 705 706 if (bufferInfo == null) { 707 throw new IllegalArgumentException("bufferInfo must not be null"); 708 } 709 if (bufferInfo.size < 0 || bufferInfo.offset < 0 710 || (bufferInfo.offset + bufferInfo.size) > byteBuf.capacity()) { 711 throw new IllegalArgumentException("bufferInfo must specify a" + 712 " valid buffer offset and size"); 713 } 714 715 if (mNativeObject == 0) { 716 throw new IllegalStateException("Muxer has been released!"); 717 } 718 719 if (mState != MUXER_STATE_STARTED) { 720 throw new IllegalStateException("Can't write, muxer is not started"); 721 } 722 723 nativeWriteSampleData(mNativeObject, trackIndex, byteBuf, 724 bufferInfo.offset, bufferInfo.size, 725 bufferInfo.presentationTimeUs, bufferInfo.flags); 726 } 727 728 /** 729 * Make sure you call this when you're done to free up any resources 730 * instead of relying on the garbage collector to do this for you at 731 * some point in the future. 732 */ release()733 public void release() { 734 if (mState == MUXER_STATE_STARTED) { 735 stop(); 736 } 737 if (mNativeObject != 0) { 738 nativeRelease(mNativeObject); 739 mNativeObject = 0; 740 mCloseGuard.close(); 741 } 742 mState = MUXER_STATE_UNINITIALIZED; 743 } 744 } 745