1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.IntDef; 20 import android.annotation.NonNull; 21 import android.annotation.Nullable; 22 import android.graphics.ImageFormat; 23 import android.graphics.Rect; 24 import android.graphics.SurfaceTexture; 25 import android.media.MediaCodecInfo.CodecCapabilities; 26 import android.os.Bundle; 27 import android.os.Handler; 28 import android.os.Looper; 29 import android.os.Message; 30 import android.view.Surface; 31 32 import java.io.IOException; 33 import java.lang.annotation.Retention; 34 import java.lang.annotation.RetentionPolicy; 35 import java.nio.ByteBuffer; 36 import java.nio.ByteOrder; 37 import java.nio.ReadOnlyBufferException; 38 import java.util.Arrays; 39 import java.util.HashMap; 40 import java.util.Map; 41 42 /** 43 MediaCodec class can be used to access low-level media codecs, i.e. encoder/decoder components. 44 It is part of the Android low-level multimedia support infrastructure (normally used together 45 with {@link MediaExtractor}, {@link MediaSync}, {@link MediaMuxer}, {@link MediaCrypto}, 46 {@link MediaDrm}, {@link Image}, {@link Surface}, and {@link AudioTrack}.) 47 <p> 48 <center><object style="width: 540px; height: 205px;" type="image/svg+xml" 49 data="../../../images/media/mediacodec_buffers.svg"><img 50 src="../../../images/media/mediacodec_buffers.png" style="width: 540px; height: 205px" 51 alt="MediaCodec buffer flow diagram"></object></center> 52 <p> 53 In broad terms, a codec processes input data to generate output data. It processes data 54 asynchronously and uses a set of input and output buffers. At a simplistic level, you request 55 (or receive) an empty input buffer, fill it up with data and send it to the codec for 56 processing. The codec uses up the data and transforms it into one of its empty output buffers. 57 Finally, you request (or receive) a filled output buffer, consume its contents and release it 58 back to the codec. 59 60 <h3>Data Types</h3> 61 <p> 62 Codecs operate on three kinds of data: compressed data, raw audio data and raw video data. 63 All three kinds of data can be processed using {@link ByteBuffer ByteBuffers}, but you should use 64 a {@link Surface} for raw video data to improve codec performance. Surface uses native video 65 buffers without mapping or copying them to ByteBuffers; thus, it is much more efficient. 66 You normally cannot access the raw video data when using a Surface, but you can use the 67 {@link ImageReader} class to access unsecured decoded (raw) video frames. This may still be more 68 efficient than using ByteBuffers, as some native buffers may be mapped into {@linkplain 69 ByteBuffer#isDirect direct} ByteBuffers. When using ByteBuffer mode, you can access raw video 70 frames using the {@link Image} class and {@link #getInputImage getInput}/{@link #getOutputImage 71 OutputImage(int)}. 72 73 <h4>Compressed Buffers</h4> 74 <p> 75 Input buffers (for decoders) and output buffers (for encoders) contain compressed data according 76 to the {@linkplain MediaFormat#KEY_MIME format's type}. For video types this is a single 77 compressed video frame. For audio data this is normally a single access unit (an encoded audio 78 segment typically containing a few milliseconds of audio as dictated by the format type), but 79 this requirement is slightly relaxed in that a buffer may contain multiple encoded access units 80 of audio. In either case, buffers do not start or end on arbitrary byte boundaries, but rather on 81 frame/access unit boundaries. 82 83 <h4>Raw Audio Buffers</h4> 84 <p> 85 Raw audio buffers contain entire frames of PCM audio data, which is one sample for each channel 86 in channel order. Each sample is a {@linkplain AudioFormat#ENCODING_PCM_16BIT 16-bit signed 87 integer in native byte order}. 88 89 <pre class=prettyprint> 90 short[] getSamplesForChannel(MediaCodec codec, int bufferId, int channelIx) { 91 ByteBuffer outputBuffer = codec.getOutputBuffer(bufferId); 92 MediaFormat format = codec.getOutputFormat(bufferId); 93 ShortBuffer samples = outputBuffer.order(ByteOrder.nativeOrder()).asShortBuffer(); 94 int numChannels = formet.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 95 if (channelIx < 0 || channelIx >= numChannels) { 96 return null; 97 } 98 short[] res = new short[samples.remaining() / numChannels]; 99 for (int i = 0; i < res.length; ++i) { 100 res[i] = samples.get(i * numChannels + channelIx); 101 } 102 return res; 103 }</pre> 104 105 <h4>Raw Video Buffers</h4> 106 <p> 107 In ByteBuffer mode video buffers are laid out according to their {@linkplain 108 MediaFormat#KEY_COLOR_FORMAT color format}. You can get the supported color formats as an array 109 from {@link #getCodecInfo}{@code .}{@link MediaCodecInfo#getCapabilitiesForType 110 getCapabilitiesForType(…)}{@code .}{@link CodecCapabilities#colorFormats colorFormats}. 111 Video codecs may support three kinds of color formats: 112 <ul> 113 <li><strong>native raw video format:</strong> This is marked by {@link 114 CodecCapabilities#COLOR_FormatSurface} and it can be used with an input or output Surface.</li> 115 <li><strong>flexible YUV buffers</strong> (such as {@link 116 CodecCapabilities#COLOR_FormatYUV420Flexible}): These can be used with an input/output Surface, 117 as well as in ByteBuffer mode, by using {@link #getInputImage getInput}/{@link #getOutputImage 118 OutputImage(int)}.</li> 119 <li><strong>other, specific formats:</strong> These are normally only supported in ByteBuffer 120 mode. Some color formats are vendor specific. Others are defined in {@link CodecCapabilities}. 121 For color formats that are equivalent to a flexible format, you can still use {@link 122 #getInputImage getInput}/{@link #getOutputImage OutputImage(int)}.</li> 123 </ul> 124 <p> 125 All video codecs support flexible YUV 4:2:0 buffers since {@link 126 android.os.Build.VERSION_CODES#LOLLIPOP_MR1}. 127 128 <h3>States</h3> 129 <p> 130 During its life a codec conceptually exists in one of three states: Stopped, Executing or 131 Released. The Stopped collective state is actually the conglomeration of three states: 132 Uninitialized, Configured and Error, whereas the Executing state conceptually progresses through 133 three sub-states: Flushed, Running and End-of-Stream. 134 <p> 135 <center><object style="width: 516px; height: 353px;" type="image/svg+xml" 136 data="../../../images/media/mediacodec_states.svg"><img 137 src="../../../images/media/mediacodec_states.png" style="width: 519px; height: 356px" 138 alt="MediaCodec state diagram"></object></center> 139 <p> 140 When you create a codec using one of the factory methods, the codec is in the Uninitialized 141 state. First, you need to configure it via {@link #configure configure(…)}, which brings 142 it to the Configured state, then call {@link #start} to move it to the Executing state. In this 143 state you can process data through the buffer queue manipulation described above. 144 <p> 145 The Executing state has three sub-states: Flushed, Running and End-of-Stream. Immediately after 146 {@link #start} the codec is in the Flushed sub-state, where it holds all the buffers. As soon 147 as the first input buffer is dequeued, the codec moves to the Running sub-state, where it spends 148 most of its life. When you queue an input buffer with the {@linkplain #BUFFER_FLAG_END_OF_STREAM 149 end-of-stream marker}, the codec transitions to the End-of-Stream sub-state. In this state the 150 codec no longer accepts further input buffers, but still generates output buffers until the 151 end-of-stream is reached on the output. You can move back to the Flushed sub-state at any time 152 while in the Executing state using {@link #flush}. 153 <p> 154 Call {@link #stop} to return the codec to the Uninitialized state, whereupon it may be configured 155 again. When you are done using a codec, you must release it by calling {@link #release}. 156 <p> 157 On rare occasions the codec may encounter an error and move to the Error state. This is 158 communicated using an invalid return value from a queuing operation, or sometimes via an 159 exception. Call {@link #reset} to make the codec usable again. You can call it from any state to 160 move the codec back to the Uninitialized state. Otherwise, call {@link #release} to move to the 161 terminal Released state. 162 163 <h3>Creation</h3> 164 <p> 165 Use {@link MediaCodecList} to create a MediaCodec for a specific {@link MediaFormat}. When 166 decoding a file or a stream, you can get the desired format from {@link 167 MediaExtractor#getTrackFormat MediaExtractor.getTrackFormat}. Inject any specific features that 168 you want to add using {@link MediaFormat#setFeatureEnabled MediaFormat.setFeatureEnabled}, then 169 call {@link MediaCodecList#findDecoderForFormat MediaCodecList.findDecoderForFormat} to get the 170 name of a codec that can handle that specific media format. Finally, create the codec using 171 {@link #createByCodecName}. 172 <p class=note> 173 <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the format to 174 {@code MediaCodecList.findDecoder}/{@code EncoderForFormat} must not contain a {@linkplain 175 MediaFormat#KEY_FRAME_RATE frame rate}. Use 176 <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code> 177 to clear any existing frame rate setting in the format. 178 <p> 179 You can also create the preferred codec for a specific MIME type using {@link 180 #createDecoderByType createDecoder}/{@link #createEncoderByType EncoderByType(String)}. 181 This, however, cannot be used to inject features, and may create a codec that cannot handle the 182 specific desired media format. 183 184 <h4>Creating secure decoders</h4> 185 <p> 186 On versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and earlier, secure codecs might 187 not be listed in {@link MediaCodecList}, but may still be available on the system. Secure codecs 188 that exist can be instantiated by name only, by appending {@code ".secure"} to the name of a 189 regular codec (the name of all secure codecs must end in {@code ".secure"}.) {@link 190 #createByCodecName} will throw an {@code IOException} if the codec is not present on the system. 191 <p> 192 From {@link android.os.Build.VERSION_CODES#LOLLIPOP} onwards, you should use the {@link 193 CodecCapabilities#FEATURE_SecurePlayback} feature in the media format to create a secure decoder. 194 195 <h3>Initialization</h3> 196 <p> 197 After creating the codec, you can set a callback using {@link #setCallback setCallback} if you 198 want to process data asynchronously. Then, {@linkplain #configure configure} the codec using the 199 specific media format. This is when you can specify the output {@link Surface} for video 200 producers – codecs that generate raw video data (e.g. video decoders). This is also when 201 you can set the decryption parameters for secure codecs (see {@link MediaCrypto}). Finally, since 202 some codecs can operate in multiple modes, you must specify whether you want it to work as a 203 decoder or an encoder. 204 <p> 205 Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you can query the resulting input and 206 output format in the Configured state. You can use this to verify the resulting configuration, 207 e.g. color formats, before starting the codec. 208 <p> 209 If you want to process raw input video buffers natively with a video consumer – a codec 210 that processes raw video input, such as a video encoder – create a destination Surface for 211 your input data using {@link #createInputSurface} after configuration. Alternately, set up the 212 codec to use a previously created {@linkplain #createPersistentInputSurface persistent input 213 surface} by calling {@link #setInputSurface}. 214 215 <h4 id=CSD><a name="CSD"></a>Codec-specific Data</h4> 216 <p> 217 Some formats, notably AAC audio and MPEG4, H.264 and H.265 video formats require the actual data 218 to be prefixed by a number of buffers containing setup data, or codec specific data. When 219 processing such compressed formats, this data must be submitted to the codec after {@link 220 #start} and before any frame data. Such data must be marked using the flag {@link 221 #BUFFER_FLAG_CODEC_CONFIG} in a call to {@link #queueInputBuffer queueInputBuffer}. 222 <p> 223 Codec-specific data can also be included in the format passed to {@link #configure configure} in 224 ByteBuffer entries with keys "csd-0", "csd-1", etc. These keys are always included in the track 225 {@link MediaFormat} obtained from the {@link MediaExtractor#getTrackFormat MediaExtractor}. 226 Codec-specific data in the format is automatically submitted to the codec upon {@link #start}; 227 you <strong>MUST NOT</strong> submit this data explicitly. If the format did not contain codec 228 specific data, you can choose to submit it using the specified number of buffers in the correct 229 order, according to the format requirements. Alternately, you can concatenate all codec-specific 230 data and submit it as a single codec-config buffer. 231 <p> 232 Android uses the following codec-specific data buffers. These are also required to be set in 233 the track format for proper {@link MediaMuxer} track configuration. Each parameter set and the 234 codec-specific-data sections marked with (<sup>*</sup>) must start with a start code of 235 {@code "\x00\x00\x00\x01"}. 236 <p> 237 <style>td.NA { background: #ccc; } .mid > tr > td { vertical-align: middle; }</style> 238 <table> 239 <thead> 240 <th>Format</th> 241 <th>CSD buffer #0</th> 242 <th>CSD buffer #1</th> 243 <th>CSD buffer #2</th> 244 </thead> 245 <tbody class=mid> 246 <tr> 247 <td>AAC</td> 248 <td>Decoder-specific information from ESDS<sup>*</sup></td> 249 <td class=NA>Not Used</td> 250 <td class=NA>Not Used</td> 251 </tr> 252 <tr> 253 <td>VORBIS</td> 254 <td>Identification header</td> 255 <td>Setup header</td> 256 <td class=NA>Not Used</td> 257 </tr> 258 <tr> 259 <td>OPUS</td> 260 <td>Identification header</td> 261 <td>Pre-skip in nanosecs<br> 262 (unsigned 64-bit {@linkplain ByteOrder#nativeOrder native-order} integer.)<br> 263 This overrides the pre-skip value in the identification header.</td> 264 <td>Seek Pre-roll in nanosecs<br> 265 (unsigned 64-bit {@linkplain ByteOrder#nativeOrder native-order} integer.)</td> 266 </tr> 267 <tr> 268 <td>MPEG-4</td> 269 <td>Decoder-specific information from ESDS<sup>*</sup></td> 270 <td class=NA>Not Used</td> 271 <td class=NA>Not Used</td> 272 </tr> 273 <tr> 274 <td>H.264 AVC</td> 275 <td>SPS (Sequence Parameter Sets<sup>*</sup>)</td> 276 <td>PPS (Picture Parameter Sets<sup>*</sup>)</td> 277 <td class=NA>Not Used</td> 278 </tr> 279 <tr> 280 <td>H.265 HEVC</td> 281 <td>VPS (Video Parameter Sets<sup>*</sup>) +<br> 282 SPS (Sequence Parameter Sets<sup>*</sup>) +<br> 283 PPS (Picture Parameter Sets<sup>*</sup>)</td> 284 <td class=NA>Not Used</td> 285 <td class=NA>Not Used</td> 286 </tr> 287 </tbody> 288 </table> 289 290 <p class=note> 291 <strong>Note:</strong> care must be taken if the codec is flushed immediately or shortly 292 after start, before any output buffer or output format change has been returned, as the codec 293 specific data may be lost during the flush. You must resubmit the data using buffers marked with 294 {@link #BUFFER_FLAG_CODEC_CONFIG} after such flush to ensure proper codec operation. 295 <p> 296 Encoders (or codecs that generate compressed data) will create and return the codec specific data 297 before any valid output buffer in output buffers marked with the {@linkplain 298 #BUFFER_FLAG_CODEC_CONFIG codec-config flag}. Buffers containing codec-specific-data have no 299 meaningful timestamps. 300 301 <h3>Data Processing</h3> 302 <p> 303 Each codec maintains a set of input and output buffers that are referred to by a buffer-ID in 304 API calls. After a successful call to {@link #start} the client "owns" neither input nor output 305 buffers. In synchronous mode, call {@link #dequeueInputBuffer dequeueInput}/{@link 306 #dequeueOutputBuffer OutputBuffer(…)} to obtain (get ownership of) an input or output 307 buffer from the codec. In asynchronous mode, you will automatically receive available buffers via 308 the {@link Callback#onInputBufferAvailable MediaCodec.Callback.onInput}/{@link 309 Callback#onOutputBufferAvailable OutputBufferAvailable(…)} callbacks. 310 <p> 311 Upon obtaining an input buffer, fill it with data and submit it to the codec using {@link 312 #queueInputBuffer queueInputBuffer} – or {@link #queueSecureInputBuffer 313 queueSecureInputBuffer} if using decryption. Do not submit multiple input buffers with the same 314 timestamp (unless it is <a href="#CSD">codec-specific data</a> marked as such). 315 <p> 316 The codec in turn will return a read-only output buffer via the {@link 317 Callback#onOutputBufferAvailable onOutputBufferAvailable} callback in asynchronous mode, or in 318 response to a {@link #dequeueOutputBuffer dequeuOutputBuffer} call in synchronous mode. After the 319 output buffer has been processed, call one of the {@link #releaseOutputBuffer 320 releaseOutputBuffer} methods to return the buffer to the codec. 321 <p> 322 While you are not required to resubmit/release buffers immediately to the codec, holding onto 323 input and/or output buffers may stall the codec, and this behavior is device dependent. 324 <strong>Specifically, it is possible that a codec may hold off on generating output buffers until 325 <em>all</em> outstanding buffers have been released/resubmitted.</strong> Therefore, try to 326 hold onto to available buffers as little as possible. 327 <p> 328 Depending on the API version, you can process data in three ways: 329 <table> 330 <thead> 331 <tr> 332 <th>Processing Mode</th> 333 <th>API version <= 20<br>Jelly Bean/KitKat</th> 334 <th>API version >= 21<br>Lollipop and later</th> 335 </tr> 336 </thead> 337 <tbody> 338 <tr> 339 <td>Synchronous API using buffer arrays</td> 340 <td>Supported</td> 341 <td>Deprecated</td> 342 </tr> 343 <tr> 344 <td>Synchronous API using buffers</td> 345 <td class=NA>Not Available</td> 346 <td>Supported</td> 347 </tr> 348 <tr> 349 <td>Asynchronous API using buffers</td> 350 <td class=NA>Not Available</td> 351 <td>Supported</td> 352 </tr> 353 </tbody> 354 </table> 355 356 <h4>Asynchronous Processing using Buffers</h4> 357 <p> 358 Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the preferred method is to process data 359 asynchronously by setting a callback before calling {@link #configure configure}. Asynchronous 360 mode changes the state transitions slightly, because you must call {@link #start} after {@link 361 #flush} to transition the codec to the Running sub-state and start receiving input buffers. 362 Similarly, upon an initial call to {@code start} the codec will move directly to the Running 363 sub-state and start passing available input buffers via the callback. 364 <p> 365 <center><object style="width: 516px; height: 353px;" type="image/svg+xml" 366 data="../../../images/media/mediacodec_async_states.svg"><img 367 src="../../../images/media/mediacodec_async_states.png" style="width: 516px; height: 353px" 368 alt="MediaCodec state diagram for asynchronous operation"></object></center> 369 <p> 370 MediaCodec is typically used like this in asynchronous mode: 371 <pre class=prettyprint> 372 MediaCodec codec = MediaCodec.createByCodecName(name); 373 MediaFormat mOutputFormat; // member variable 374 codec.setCallback(new MediaCodec.Callback() { 375 {@literal @Override} 376 void onInputBufferAvailable(MediaCodec mc, int inputBufferId) { 377 ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId); 378 // fill inputBuffer with valid data 379 … 380 codec.queueInputBuffer(inputBufferId, …); 381 } 382 383 {@literal @Override} 384 void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) { 385 ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId); 386 MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A 387 // bufferFormat is equivalent to mOutputFormat 388 // outputBuffer is ready to be processed or rendered. 389 … 390 codec.releaseOutputBuffer(outputBufferId, …); 391 } 392 393 {@literal @Override} 394 void onOutputFormatChanged(MediaCodec mc, MediaFormat format) { 395 // Subsequent data will conform to new format. 396 // Can ignore if using getOutputFormat(outputBufferId) 397 mOutputFormat = format; // option B 398 } 399 400 {@literal @Override} 401 void onError(…) { 402 … 403 } 404 }); 405 codec.configure(format, …); 406 mOutputFormat = codec.getOutputFormat(); // option B 407 codec.start(); 408 // wait for processing to complete 409 codec.stop(); 410 codec.release();</pre> 411 412 <h4>Synchronous Processing using Buffers</h4> 413 <p> 414 Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you should retrieve input and output 415 buffers using {@link #getInputBuffer getInput}/{@link #getOutputBuffer OutputBuffer(int)} and/or 416 {@link #getInputImage getInput}/{@link #getOutputImage OutputImage(int)} even when using the 417 codec in synchronous mode. This allows certain optimizations by the framework, e.g. when 418 processing dynamic content. This optimization is disabled if you call {@link #getInputBuffers 419 getInput}/{@link #getOutputBuffers OutputBuffers()}. 420 421 <p class=note> 422 <strong>Note:</strong> do not mix the methods of using buffers and buffer arrays at the same 423 time. Specifically, only call {@code getInput}/{@code OutputBuffers} directly after {@link 424 #start} or after having dequeued an output buffer ID with the value of {@link 425 #INFO_OUTPUT_FORMAT_CHANGED}. 426 <p> 427 MediaCodec is typically used like this in synchronous mode: 428 <pre> 429 MediaCodec codec = MediaCodec.createByCodecName(name); 430 codec.configure(format, …); 431 MediaFormat outputFormat = codec.getOutputFormat(); // option B 432 codec.start(); 433 for (;;) { 434 int inputBufferId = codec.dequeueInputBuffer(timeoutUs); 435 if (inputBufferId >= 0) { 436 ByteBuffer inputBuffer = codec.getInputBuffer(…); 437 // fill inputBuffer with valid data 438 … 439 codec.queueInputBuffer(inputBufferId, …); 440 } 441 int outputBufferId = codec.dequeueOutputBuffer(…); 442 if (outputBufferId >= 0) { 443 ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId); 444 MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A 445 // bufferFormat is identical to outputFormat 446 // outputBuffer is ready to be processed or rendered. 447 … 448 codec.releaseOutputBuffer(outputBufferId, …); 449 } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 450 // Subsequent data will conform to new format. 451 // Can ignore if using getOutputFormat(outputBufferId) 452 outputFormat = codec.getOutputFormat(); // option B 453 } 454 } 455 codec.stop(); 456 codec.release();</pre> 457 458 <h4>Synchronous Processing using Buffer Arrays (deprecated)</h4> 459 <p> 460 In versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and before, the set of input and 461 output buffers are represented by the {@code ByteBuffer[]} arrays. After a successful call to 462 {@link #start}, retrieve the buffer arrays using {@link #getInputBuffers getInput}/{@link 463 #getOutputBuffers OutputBuffers()}. Use the buffer ID-s as indices into these arrays (when 464 non-negative), as demonstrated in the sample below. Note that there is no inherent correlation 465 between the size of the arrays and the number of input and output buffers used by the system, 466 although the array size provides an upper bound. 467 <pre> 468 MediaCodec codec = MediaCodec.createByCodecName(name); 469 codec.configure(format, …); 470 codec.start(); 471 ByteBuffer[] inputBuffers = codec.getInputBuffers(); 472 ByteBuffer[] outputBuffers = codec.getOutputBuffers(); 473 for (;;) { 474 int inputBufferId = codec.dequeueInputBuffer(…); 475 if (inputBufferId >= 0) { 476 // fill inputBuffers[inputBufferId] with valid data 477 … 478 codec.queueInputBuffer(inputBufferId, …); 479 } 480 int outputBufferId = codec.dequeueOutputBuffer(…); 481 if (outputBufferId >= 0) { 482 // outputBuffers[outputBufferId] is ready to be processed or rendered. 483 … 484 codec.releaseOutputBuffer(outputBufferId, …); 485 } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 486 outputBuffers = codec.getOutputBuffers(); 487 } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 488 // Subsequent data will conform to new format. 489 MediaFormat format = codec.getOutputFormat(); 490 } 491 } 492 codec.stop(); 493 codec.release();</pre> 494 495 <h4>End-of-stream Handling</h4> 496 <p> 497 When you reach the end of the input data, you must signal it to the codec by specifying the 498 {@link #BUFFER_FLAG_END_OF_STREAM} flag in the call to {@link #queueInputBuffer 499 queueInputBuffer}. You can do this on the last valid input buffer, or by submitting an additional 500 empty input buffer with the end-of-stream flag set. If using an empty buffer, the timestamp will 501 be ignored. 502 <p> 503 The codec will continue to return output buffers until it eventually signals the end of the 504 output stream by specifying the same end-of-stream flag in the {@link BufferInfo} set in {@link 505 #dequeueOutputBuffer dequeueOutputBuffer} or returned via {@link Callback#onOutputBufferAvailable 506 onOutputBufferAvailable}. This can be set on the last valid output buffer, or on an empty buffer 507 after the last valid output buffer. The timestamp of such empty buffer should be ignored. 508 <p> 509 Do not submit additional input buffers after signaling the end of the input stream, unless the 510 codec has been flushed, or stopped and restarted. 511 512 <h4>Using an Output Surface</h4> 513 <p> 514 The data processing is nearly identical to the ByteBuffer mode when using an output {@link 515 Surface}; however, the output buffers will not be accessible, and are represented as {@code null} 516 values. E.g. {@link #getOutputBuffer getOutputBuffer}/{@link #getOutputImage Image(int)} will 517 return {@code null} and {@link #getOutputBuffers} will return an array containing only {@code 518 null}-s. 519 <p> 520 When using an output Surface, you can select whether or not to render each output buffer on the 521 surface. You have three choices: 522 <ul> 523 <li><strong>Do not render the buffer:</strong> Call {@link #releaseOutputBuffer(int, boolean) 524 releaseOutputBuffer(bufferId, false)}.</li> 525 <li><strong>Render the buffer with the default timestamp:</strong> Call {@link 526 #releaseOutputBuffer(int, boolean) releaseOutputBuffer(bufferId, true)}.</li> 527 <li><strong>Render the buffer with a specific timestamp:</strong> Call {@link 528 #releaseOutputBuffer(int, long) releaseOutputBuffer(bufferId, timestamp)}.</li> 529 </ul> 530 <p> 531 Since {@link android.os.Build.VERSION_CODES#M}, the default timestamp is the {@linkplain 532 BufferInfo#presentationTimeUs presentation timestamp} of the buffer (converted to nanoseconds). 533 It was not defined prior to that. 534 <p> 535 Also since {@link android.os.Build.VERSION_CODES#M}, you can change the output Surface 536 dynamically using {@link #setOutputSurface setOutputSurface}. 537 538 <h4>Using an Input Surface</h4> 539 <p> 540 When using an input Surface, there are no accessible input buffers, as buffers are automatically 541 passed from the input surface to the codec. Calling {@link #dequeueInputBuffer 542 dequeueInputBuffer} will throw an {@code IllegalStateException}, and {@link #getInputBuffers} 543 returns a bogus {@code ByteBuffer[]} array that <strong>MUST NOT</strong> be written into. 544 <p> 545 Call {@link #signalEndOfInputStream} to signal end-of-stream. The input surface will stop 546 submitting data to the codec immediately after this call. 547 <p> 548 549 <h3>Seeking & Adaptive Playback Support</h3> 550 <p> 551 Video decoders (and in general codecs that consume compressed video data) behave differently 552 regarding seek and format change whether or not they support and are configured for adaptive 553 playback. You can check if a decoder supports {@linkplain 554 CodecCapabilities#FEATURE_AdaptivePlayback adaptive playback} via {@link 555 CodecCapabilities#isFeatureSupported CodecCapabilities.isFeatureSupported(String)}. Adaptive 556 playback support for video decoders is only activated if you configure the codec to decode onto a 557 {@link Surface}. 558 559 <h4 id=KeyFrames><a name="KeyFrames"></a>Stream Boundary and Key Frames</h4> 560 <p> 561 It is important that the input data after {@link #start} or {@link #flush} starts at a suitable 562 stream boundary: the first frame must a key frame. A <em>key frame</em> can be decoded 563 completely on its own (for most codecs this means an I-frame), and no frames that are to be 564 displayed after a key frame refer to frames before the key frame. 565 <p> 566 The following table summarizes suitable key frames for various video formats. 567 <table> 568 <thead> 569 <tr> 570 <th>Format</th> 571 <th>Suitable key frame</th> 572 </tr> 573 </thead> 574 <tbody class=mid> 575 <tr> 576 <td>VP9/VP8</td> 577 <td>a suitable intraframe where no subsequent frames refer to frames prior to this frame.<br> 578 <i>(There is no specific name for such key frame.)</i></td> 579 </tr> 580 <tr> 581 <td>H.265 HEVC</td> 582 <td>IDR or CRA</td> 583 </tr> 584 <tr> 585 <td>H.264 AVC</td> 586 <td>IDR</td> 587 </tr> 588 <tr> 589 <td>MPEG-4<br>H.263<br>MPEG-2</td> 590 <td>a suitable I-frame where no subsequent frames refer to frames prior to this frame.<br> 591 <i>(There is no specific name for such key frame.)</td> 592 </tr> 593 </tbody> 594 </table> 595 596 <h4>For decoders that do not support adaptive playback (including when not decoding onto a 597 Surface)</h4> 598 <p> 599 In order to start decoding data that is not adjacent to previously submitted data (i.e. after a 600 seek) you <strong>MUST</strong> flush the decoder. Since all output buffers are immediately 601 revoked at the point of the flush, you may want to first signal then wait for the end-of-stream 602 before you call {@code flush}. It is important that the input data after a flush starts at a 603 suitable stream boundary/key frame. 604 <p class=note> 605 <strong>Note:</strong> the format of the data submitted after a flush must not change; {@link 606 #flush} does not support format discontinuities; for that, a full {@link #stop} - {@link 607 #configure configure(…)} - {@link #start} cycle is necessary. 608 609 <p class=note> 610 <strong>Also note:</strong> if you flush the codec too soon after {@link #start} – 611 generally, before the first output buffer or output format change is received – you 612 will need to resubmit the codec-specific-data to the codec. See the <a 613 href="#CSD">codec-specific-data section</a> for more info. 614 615 <h4>For decoders that support and are configured for adaptive playback</h4> 616 <p> 617 In order to start decoding data that is not adjacent to previously submitted data (i.e. after a 618 seek) it is <em>not necessary</em> to flush the decoder; however, input data after the 619 discontinuity must start at a suitable stream boundary/key frame. 620 <p> 621 For some video formats - namely H.264, H.265, VP8 and VP9 - it is also possible to change the 622 picture size or configuration mid-stream. To do this you must package the entire new 623 codec-specific configuration data together with the key frame into a single buffer (including 624 any start codes), and submit it as a <strong>regular</strong> input buffer. 625 <p> 626 You will receive an {@link #INFO_OUTPUT_FORMAT_CHANGED} return value from {@link 627 #dequeueOutputBuffer dequeueOutputBuffer} or a {@link Callback#onOutputBufferAvailable 628 onOutputFormatChanged} callback just after the picture-size change takes place and before any 629 frames with the new size have been returned. 630 <p class=note> 631 <strong>Note:</strong> just as the case for codec-specific data, be careful when calling 632 {@link #flush} shortly after you have changed the picture size. If you have not received 633 confirmation of the picture size change, you will need to repeat the request for the new picture 634 size. 635 636 <h3>Error handling</h3> 637 <p> 638 The factory methods {@link #createByCodecName createByCodecName} and {@link #createDecoderByType 639 createDecoder}/{@link #createEncoderByType EncoderByType} throw {@code IOException} on failure 640 which you must catch or declare to pass up. MediaCodec methods throw {@code 641 IllegalStateException} when the method is called from a codec state that does not allow it; this 642 is typically due to incorrect application API usage. Methods involving secure buffers may throw 643 {@link CryptoException}, which has further error information obtainable from {@link 644 CryptoException#getErrorCode}. 645 <p> 646 Internal codec errors result in a {@link CodecException}, which may be due to media content 647 corruption, hardware failure, resource exhaustion, and so forth, even when the application is 648 correctly using the API. The recommended action when receiving a {@code CodecException} 649 can be determined by calling {@link CodecException#isRecoverable} and {@link 650 CodecException#isTransient}: 651 <ul> 652 <li><strong>recoverable errors:</strong> If {@code isRecoverable()} returns true, then call 653 {@link #stop}, {@link #configure configure(…)}, and {@link #start} to recover.</li> 654 <li><strong>transient errors:</strong> If {@code isTransient()} returns true, then resources are 655 temporarily unavailable and the method may be retried at a later time.</li> 656 <li><strong>fatal errors:</strong> If both {@code isRecoverable()} and {@code isTransient()} 657 return false, then the {@code CodecException} is fatal and the codec must be {@linkplain #reset 658 reset} or {@linkplain #release released}.</li> 659 </ul> 660 <p> 661 Both {@code isRecoverable()} and {@code isTransient()} do not return true at the same time. 662 663 <h2 id=History><a name="History"></a>Valid API Calls and API History</h2> 664 <p> 665 This sections summarizes the valid API calls in each state and the API history of the MediaCodec 666 class. For API version numbers, see {@link android.os.Build.VERSION_CODES}. 667 668 <style> 669 .api > tr > th, .api > tr > td { text-align: center; padding: 4px 4px; } 670 .api > tr > th { vertical-align: bottom; } 671 .api > tr > td { vertical-align: middle; } 672 .sml > tr > th, .sml > tr > td { text-align: center; padding: 2px 4px; } 673 .fn { text-align: left; } 674 .fn > code > a { font: 14px/19px Roboto Condensed, sans-serif; } 675 .deg45 { 676 white-space: nowrap; background: none; border: none; vertical-align: bottom; 677 width: 30px; height: 83px; 678 } 679 .deg45 > div { 680 transform: skew(-45deg, 0deg) translate(1px, -67px); 681 transform-origin: bottom left 0; 682 width: 30px; height: 20px; 683 } 684 .deg45 > div > div { border: 1px solid #ddd; background: #999; height: 90px; width: 42px; } 685 .deg45 > div > div > div { transform: skew(45deg, 0deg) translate(-55px, 55px) rotate(-45deg); } 686 </style> 687 688 <table align="right" style="width: 0%"> 689 <thead> 690 <tr><th>Symbol</th><th>Meaning</th></tr> 691 </thead> 692 <tbody class=sml> 693 <tr><td>●</td><td>Supported</td></tr> 694 <tr><td>⁕</td><td>Semantics changed</td></tr> 695 <tr><td>○</td><td>Experimental support</td></tr> 696 <tr><td>[ ]</td><td>Deprecated</td></tr> 697 <tr><td>⎋</td><td>Restricted to surface input mode</td></tr> 698 <tr><td>⎆</td><td>Restricted to surface output mode</td></tr> 699 <tr><td>▧</td><td>Restricted to ByteBuffer input mode</td></tr> 700 <tr><td>↩</td><td>Restricted to synchronous mode</td></tr> 701 <tr><td>⇄</td><td>Restricted to asynchronous mode</td></tr> 702 <tr><td>( )</td><td>Can be called, but shouldn't</td></tr> 703 </tbody> 704 </table> 705 706 <table style="width: 100%;"> 707 <thead class=api> 708 <tr> 709 <th class=deg45><div><div style="background:#4285f4"><div>Uninitialized</div></div></div></th> 710 <th class=deg45><div><div style="background:#f4b400"><div>Configured</div></div></div></th> 711 <th class=deg45><div><div style="background:#e67c73"><div>Flushed</div></div></div></th> 712 <th class=deg45><div><div style="background:#0f9d58"><div>Running</div></div></div></th> 713 <th class=deg45><div><div style="background:#f7cb4d"><div>End of Stream</div></div></div></th> 714 <th class=deg45><div><div style="background:#db4437"><div>Error</div></div></div></th> 715 <th class=deg45><div><div style="background:#666"><div>Released</div></div></div></th> 716 <th></th> 717 <th colspan="8">SDK Version</th> 718 </tr> 719 <tr> 720 <th colspan="7">State</th> 721 <th>Method</th> 722 <th>16</th> 723 <th>17</th> 724 <th>18</th> 725 <th>19</th> 726 <th>20</th> 727 <th>21</th> 728 <th>22</th> 729 <th>23</th> 730 </tr> 731 </thead> 732 <tbody class=api> 733 <tr> 734 <td></td> 735 <td></td> 736 <td></td> 737 <td></td> 738 <td></td> 739 <td></td> 740 <td></td> 741 <td class=fn>{@link #createByCodecName createByCodecName}</td> 742 <td>●</td> 743 <td>●</td> 744 <td>●</td> 745 <td>●</td> 746 <td>●</td> 747 <td>●</td> 748 <td>●</td> 749 <td>●</td> 750 </tr> 751 <tr> 752 <td></td> 753 <td></td> 754 <td></td> 755 <td></td> 756 <td></td> 757 <td></td> 758 <td></td> 759 <td class=fn>{@link #createDecoderByType createDecoderByType}</td> 760 <td>●</td> 761 <td>●</td> 762 <td>●</td> 763 <td>●</td> 764 <td>●</td> 765 <td>●</td> 766 <td>●</td> 767 <td>●</td> 768 </tr> 769 <tr> 770 <td></td> 771 <td></td> 772 <td></td> 773 <td></td> 774 <td></td> 775 <td></td> 776 <td></td> 777 <td class=fn>{@link #createEncoderByType createEncoderByType}</td> 778 <td>●</td> 779 <td>●</td> 780 <td>●</td> 781 <td>●</td> 782 <td>●</td> 783 <td>●</td> 784 <td>●</td> 785 <td>●</td> 786 </tr> 787 <tr> 788 <td></td> 789 <td></td> 790 <td></td> 791 <td></td> 792 <td></td> 793 <td></td> 794 <td></td> 795 <td class=fn>{@link #createPersistentInputSurface createPersistentInputSurface}</td> 796 <td></td> 797 <td></td> 798 <td></td> 799 <td></td> 800 <td></td> 801 <td></td> 802 <td></td> 803 <td>●</td> 804 </tr> 805 <tr> 806 <td>16+</td> 807 <td>-</td> 808 <td>-</td> 809 <td>-</td> 810 <td>-</td> 811 <td>-</td> 812 <td>-</td> 813 <td class=fn>{@link #configure configure}</td> 814 <td>●</td> 815 <td>●</td> 816 <td>●</td> 817 <td>●</td> 818 <td>●</td> 819 <td>⁕</td> 820 <td>●</td> 821 <td>●</td> 822 </tr> 823 <tr> 824 <td>-</td> 825 <td>18+</td> 826 <td>-</td> 827 <td>-</td> 828 <td>-</td> 829 <td>-</td> 830 <td>-</td> 831 <td class=fn>{@link #createInputSurface createInputSurface}</td> 832 <td></td> 833 <td></td> 834 <td>⎋</td> 835 <td>⎋</td> 836 <td>⎋</td> 837 <td>⎋</td> 838 <td>⎋</td> 839 <td>⎋</td> 840 </tr> 841 <tr> 842 <td>-</td> 843 <td>-</td> 844 <td>16+</td> 845 <td>16+</td> 846 <td>(16+)</td> 847 <td>-</td> 848 <td>-</td> 849 <td class=fn>{@link #dequeueInputBuffer dequeueInputBuffer}</td> 850 <td>●</td> 851 <td>●</td> 852 <td>▧</td> 853 <td>▧</td> 854 <td>▧</td> 855 <td>⁕▧↩</td> 856 <td>▧↩</td> 857 <td>▧↩</td> 858 </tr> 859 <tr> 860 <td>-</td> 861 <td>-</td> 862 <td>16+</td> 863 <td>16+</td> 864 <td>16+</td> 865 <td>-</td> 866 <td>-</td> 867 <td class=fn>{@link #dequeueOutputBuffer dequeueOutputBuffer}</td> 868 <td>●</td> 869 <td>●</td> 870 <td>●</td> 871 <td>●</td> 872 <td>●</td> 873 <td>⁕↩</td> 874 <td>↩</td> 875 <td>↩</td> 876 </tr> 877 <tr> 878 <td>-</td> 879 <td>-</td> 880 <td>16+</td> 881 <td>16+</td> 882 <td>16+</td> 883 <td>-</td> 884 <td>-</td> 885 <td class=fn>{@link #flush flush}</td> 886 <td>●</td> 887 <td>●</td> 888 <td>●</td> 889 <td>●</td> 890 <td>●</td> 891 <td>●</td> 892 <td>●</td> 893 <td>●</td> 894 </tr> 895 <tr> 896 <td>18+</td> 897 <td>18+</td> 898 <td>18+</td> 899 <td>18+</td> 900 <td>18+</td> 901 <td>18+</td> 902 <td>-</td> 903 <td class=fn>{@link #getCodecInfo getCodecInfo}</td> 904 <td></td> 905 <td></td> 906 <td>●</td> 907 <td>●</td> 908 <td>●</td> 909 <td>●</td> 910 <td>●</td> 911 <td>●</td> 912 </tr> 913 <tr> 914 <td>-</td> 915 <td>-</td> 916 <td>(21+)</td> 917 <td>21+</td> 918 <td>(21+)</td> 919 <td>-</td> 920 <td>-</td> 921 <td class=fn>{@link #getInputBuffer getInputBuffer}</td> 922 <td></td> 923 <td></td> 924 <td></td> 925 <td></td> 926 <td></td> 927 <td>●</td> 928 <td>●</td> 929 <td>●</td> 930 </tr> 931 <tr> 932 <td>-</td> 933 <td>-</td> 934 <td>16+</td> 935 <td>(16+)</td> 936 <td>(16+)</td> 937 <td>-</td> 938 <td>-</td> 939 <td class=fn>{@link #getInputBuffers getInputBuffers}</td> 940 <td>●</td> 941 <td>●</td> 942 <td>●</td> 943 <td>●</td> 944 <td>●</td> 945 <td>[⁕↩]</td> 946 <td>[↩]</td> 947 <td>[↩]</td> 948 </tr> 949 <tr> 950 <td>-</td> 951 <td>21+</td> 952 <td>(21+)</td> 953 <td>(21+)</td> 954 <td>(21+)</td> 955 <td>-</td> 956 <td>-</td> 957 <td class=fn>{@link #getInputFormat getInputFormat}</td> 958 <td></td> 959 <td></td> 960 <td></td> 961 <td></td> 962 <td></td> 963 <td>●</td> 964 <td>●</td> 965 <td>●</td> 966 </tr> 967 <tr> 968 <td>-</td> 969 <td>-</td> 970 <td>(21+)</td> 971 <td>21+</td> 972 <td>(21+)</td> 973 <td>-</td> 974 <td>-</td> 975 <td class=fn>{@link #getInputImage getInputImage}</td> 976 <td></td> 977 <td></td> 978 <td></td> 979 <td></td> 980 <td></td> 981 <td>○</td> 982 <td>●</td> 983 <td>●</td> 984 </tr> 985 <tr> 986 <td>18+</td> 987 <td>18+</td> 988 <td>18+</td> 989 <td>18+</td> 990 <td>18+</td> 991 <td>18+</td> 992 <td>-</td> 993 <td class=fn>{@link #getName getName}</td> 994 <td></td> 995 <td></td> 996 <td>●</td> 997 <td>●</td> 998 <td>●</td> 999 <td>●</td> 1000 <td>●</td> 1001 <td>●</td> 1002 </tr> 1003 <tr> 1004 <td>-</td> 1005 <td>-</td> 1006 <td>(21+)</td> 1007 <td>21+</td> 1008 <td>21+</td> 1009 <td>-</td> 1010 <td>-</td> 1011 <td class=fn>{@link #getOutputBuffer getOutputBuffer}</td> 1012 <td></td> 1013 <td></td> 1014 <td></td> 1015 <td></td> 1016 <td></td> 1017 <td>●</td> 1018 <td>●</td> 1019 <td>●</td> 1020 </tr> 1021 <tr> 1022 <td>-</td> 1023 <td>-</td> 1024 <td>16+</td> 1025 <td>16+</td> 1026 <td>16+</td> 1027 <td>-</td> 1028 <td>-</td> 1029 <td class=fn>{@link #getOutputBuffers getOutputBuffers}</td> 1030 <td>●</td> 1031 <td>●</td> 1032 <td>●</td> 1033 <td>●</td> 1034 <td>●</td> 1035 <td>[⁕↩]</td> 1036 <td>[↩]</td> 1037 <td>[↩]</td> 1038 </tr> 1039 <tr> 1040 <td>-</td> 1041 <td>21+</td> 1042 <td>16+</td> 1043 <td>16+</td> 1044 <td>16+</td> 1045 <td>-</td> 1046 <td>-</td> 1047 <td class=fn>{@link #getOutputFormat()}</td> 1048 <td>●</td> 1049 <td>●</td> 1050 <td>●</td> 1051 <td>●</td> 1052 <td>●</td> 1053 <td>●</td> 1054 <td>●</td> 1055 <td>●</td> 1056 </tr> 1057 <tr> 1058 <td>-</td> 1059 <td>-</td> 1060 <td>(21+)</td> 1061 <td>21+</td> 1062 <td>21+</td> 1063 <td>-</td> 1064 <td>-</td> 1065 <td class=fn>{@link #getOutputFormat(int)}</td> 1066 <td></td> 1067 <td></td> 1068 <td></td> 1069 <td></td> 1070 <td></td> 1071 <td>●</td> 1072 <td>●</td> 1073 <td>●</td> 1074 </tr> 1075 <tr> 1076 <td>-</td> 1077 <td>-</td> 1078 <td>(21+)</td> 1079 <td>21+</td> 1080 <td>21+</td> 1081 <td>-</td> 1082 <td>-</td> 1083 <td class=fn>{@link #getOutputImage getOutputImage}</td> 1084 <td></td> 1085 <td></td> 1086 <td></td> 1087 <td></td> 1088 <td></td> 1089 <td>○</td> 1090 <td>●</td> 1091 <td>●</td> 1092 </tr> 1093 <tr> 1094 <td>-</td> 1095 <td>-</td> 1096 <td>-</td> 1097 <td>16+</td> 1098 <td>(16+)</td> 1099 <td>-</td> 1100 <td>-</td> 1101 <td class=fn>{@link #queueInputBuffer queueInputBuffer}</td> 1102 <td>●</td> 1103 <td>●</td> 1104 <td>●</td> 1105 <td>●</td> 1106 <td>●</td> 1107 <td>⁕</td> 1108 <td>●</td> 1109 <td>●</td> 1110 </tr> 1111 <tr> 1112 <td>-</td> 1113 <td>-</td> 1114 <td>-</td> 1115 <td>16+</td> 1116 <td>(16+)</td> 1117 <td>-</td> 1118 <td>-</td> 1119 <td class=fn>{@link #queueSecureInputBuffer queueSecureInputBuffer}</td> 1120 <td>●</td> 1121 <td>●</td> 1122 <td>●</td> 1123 <td>●</td> 1124 <td>●</td> 1125 <td>⁕</td> 1126 <td>●</td> 1127 <td>●</td> 1128 </tr> 1129 <tr> 1130 <td>16+</td> 1131 <td>16+</td> 1132 <td>16+</td> 1133 <td>16+</td> 1134 <td>16+</td> 1135 <td>16+</td> 1136 <td>16+</td> 1137 <td class=fn>{@link #release release}</td> 1138 <td>●</td> 1139 <td>●</td> 1140 <td>●</td> 1141 <td>●</td> 1142 <td>●</td> 1143 <td>●</td> 1144 <td>●</td> 1145 <td>●</td> 1146 </tr> 1147 <tr> 1148 <td>-</td> 1149 <td>-</td> 1150 <td>-</td> 1151 <td>16+</td> 1152 <td>16+</td> 1153 <td>-</td> 1154 <td>-</td> 1155 <td class=fn>{@link #releaseOutputBuffer(int, boolean)}</td> 1156 <td>●</td> 1157 <td>●</td> 1158 <td>●</td> 1159 <td>●</td> 1160 <td>●</td> 1161 <td>⁕</td> 1162 <td>●</td> 1163 <td>⁕</td> 1164 </tr> 1165 <tr> 1166 <td>-</td> 1167 <td>-</td> 1168 <td>-</td> 1169 <td>21+</td> 1170 <td>21+</td> 1171 <td>-</td> 1172 <td>-</td> 1173 <td class=fn>{@link #releaseOutputBuffer(int, long)}</td> 1174 <td></td> 1175 <td></td> 1176 <td></td> 1177 <td></td> 1178 <td></td> 1179 <td>⎆</td> 1180 <td>⎆</td> 1181 <td>⎆</td> 1182 </tr> 1183 <tr> 1184 <td>21+</td> 1185 <td>21+</td> 1186 <td>21+</td> 1187 <td>21+</td> 1188 <td>21+</td> 1189 <td>21+</td> 1190 <td>-</td> 1191 <td class=fn>{@link #reset reset}</td> 1192 <td></td> 1193 <td></td> 1194 <td></td> 1195 <td></td> 1196 <td></td> 1197 <td>●</td> 1198 <td>●</td> 1199 <td>●</td> 1200 </tr> 1201 <tr> 1202 <td>21+</td> 1203 <td>-</td> 1204 <td>-</td> 1205 <td>-</td> 1206 <td>-</td> 1207 <td>-</td> 1208 <td>-</td> 1209 <td class=fn>{@link #setCallback(Callback) setCallback}</td> 1210 <td></td> 1211 <td></td> 1212 <td></td> 1213 <td></td> 1214 <td></td> 1215 <td>●</td> 1216 <td>●</td> 1217 <td>{@link #setCallback(Callback, Handler) ⁕}</td> 1218 </tr> 1219 <tr> 1220 <td>-</td> 1221 <td>23+</td> 1222 <td>-</td> 1223 <td>-</td> 1224 <td>-</td> 1225 <td>-</td> 1226 <td>-</td> 1227 <td class=fn>{@link #setInputSurface setInputSurface}</td> 1228 <td></td> 1229 <td></td> 1230 <td></td> 1231 <td></td> 1232 <td></td> 1233 <td></td> 1234 <td></td> 1235 <td>⎋</td> 1236 </tr> 1237 <tr> 1238 <td>23+</td> 1239 <td>23+</td> 1240 <td>23+</td> 1241 <td>23+</td> 1242 <td>23+</td> 1243 <td>(23+)</td> 1244 <td>(23+)</td> 1245 <td class=fn>{@link #setOnFrameRenderedListener setOnFrameRenderedListener}</td> 1246 <td></td> 1247 <td></td> 1248 <td></td> 1249 <td></td> 1250 <td></td> 1251 <td></td> 1252 <td></td> 1253 <td>○ ⎆</td> 1254 </tr> 1255 <tr> 1256 <td>-</td> 1257 <td>23+</td> 1258 <td>23+</td> 1259 <td>23+</td> 1260 <td>23+</td> 1261 <td>-</td> 1262 <td>-</td> 1263 <td class=fn>{@link #setOutputSurface setOutputSurface}</td> 1264 <td></td> 1265 <td></td> 1266 <td></td> 1267 <td></td> 1268 <td></td> 1269 <td></td> 1270 <td></td> 1271 <td>⎆</td> 1272 </tr> 1273 <tr> 1274 <td>19+</td> 1275 <td>19+</td> 1276 <td>19+</td> 1277 <td>19+</td> 1278 <td>19+</td> 1279 <td>(19+)</td> 1280 <td>-</td> 1281 <td class=fn>{@link #setParameters setParameters}</td> 1282 <td></td> 1283 <td></td> 1284 <td></td> 1285 <td>●</td> 1286 <td>●</td> 1287 <td>●</td> 1288 <td>●</td> 1289 <td>●</td> 1290 </tr> 1291 <tr> 1292 <td>-</td> 1293 <td>16+</td> 1294 <td>16+</td> 1295 <td>16+</td> 1296 <td>16+</td> 1297 <td>(16+)</td> 1298 <td>-</td> 1299 <td class=fn>{@link #setVideoScalingMode setVideoScalingMode}</td> 1300 <td>⎆</td> 1301 <td>⎆</td> 1302 <td>⎆</td> 1303 <td>⎆</td> 1304 <td>⎆</td> 1305 <td>⎆</td> 1306 <td>⎆</td> 1307 <td>⎆</td> 1308 </tr> 1309 <tr> 1310 <td>-</td> 1311 <td>-</td> 1312 <td>18+</td> 1313 <td>18+</td> 1314 <td>-</td> 1315 <td>-</td> 1316 <td>-</td> 1317 <td class=fn>{@link #signalEndOfInputStream signalEndOfInputStream}</td> 1318 <td></td> 1319 <td></td> 1320 <td>⎋</td> 1321 <td>⎋</td> 1322 <td>⎋</td> 1323 <td>⎋</td> 1324 <td>⎋</td> 1325 <td>⎋</td> 1326 </tr> 1327 <tr> 1328 <td>-</td> 1329 <td>16+</td> 1330 <td>21+(⇄)</td> 1331 <td>-</td> 1332 <td>-</td> 1333 <td>-</td> 1334 <td>-</td> 1335 <td class=fn>{@link #start start}</td> 1336 <td>●</td> 1337 <td>●</td> 1338 <td>●</td> 1339 <td>●</td> 1340 <td>●</td> 1341 <td>⁕</td> 1342 <td>●</td> 1343 <td>●</td> 1344 </tr> 1345 <tr> 1346 <td>-</td> 1347 <td>-</td> 1348 <td>16+</td> 1349 <td>16+</td> 1350 <td>16+</td> 1351 <td>-</td> 1352 <td>-</td> 1353 <td class=fn>{@link #stop stop}</td> 1354 <td>●</td> 1355 <td>●</td> 1356 <td>●</td> 1357 <td>●</td> 1358 <td>●</td> 1359 <td>●</td> 1360 <td>●</td> 1361 <td>●</td> 1362 </tr> 1363 </tbody> 1364 </table> 1365 */ 1366 final public class MediaCodec { 1367 /** 1368 * Per buffer metadata includes an offset and size specifying 1369 * the range of valid data in the associated codec (output) buffer. 1370 */ 1371 public final static class BufferInfo { 1372 /** 1373 * Update the buffer metadata information. 1374 * 1375 * @param newOffset the start-offset of the data in the buffer. 1376 * @param newSize the amount of data (in bytes) in the buffer. 1377 * @param newTimeUs the presentation timestamp in microseconds. 1378 * @param newFlags buffer flags associated with the buffer. This 1379 * should be a combination of {@link #BUFFER_FLAG_KEY_FRAME} and 1380 * {@link #BUFFER_FLAG_END_OF_STREAM}. 1381 */ set( int newOffset, int newSize, long newTimeUs, @BufferFlag int newFlags)1382 public void set( 1383 int newOffset, int newSize, long newTimeUs, @BufferFlag int newFlags) { 1384 offset = newOffset; 1385 size = newSize; 1386 presentationTimeUs = newTimeUs; 1387 flags = newFlags; 1388 } 1389 1390 /** 1391 * The start-offset of the data in the buffer. 1392 */ 1393 public int offset; 1394 1395 /** 1396 * The amount of data (in bytes) in the buffer. If this is {@code 0}, 1397 * the buffer has no data in it and can be discarded. The only 1398 * use of a 0-size buffer is to carry the end-of-stream marker. 1399 */ 1400 public int size; 1401 1402 /** 1403 * The presentation timestamp in microseconds for the buffer. 1404 * This is derived from the presentation timestamp passed in 1405 * with the corresponding input buffer. This should be ignored for 1406 * a 0-sized buffer. 1407 */ 1408 public long presentationTimeUs; 1409 1410 /** 1411 * Buffer flags associated with the buffer. A combination of 1412 * {@link #BUFFER_FLAG_KEY_FRAME} and {@link #BUFFER_FLAG_END_OF_STREAM}. 1413 * 1414 * <p>Encoded buffers that are key frames are marked with 1415 * {@link #BUFFER_FLAG_KEY_FRAME}. 1416 * 1417 * <p>The last output buffer corresponding to the input buffer 1418 * marked with {@link #BUFFER_FLAG_END_OF_STREAM} will also be marked 1419 * with {@link #BUFFER_FLAG_END_OF_STREAM}. In some cases this could 1420 * be an empty buffer, whose sole purpose is to carry the end-of-stream 1421 * marker. 1422 */ 1423 @BufferFlag 1424 public int flags; 1425 1426 /** @hide */ 1427 @NonNull dup()1428 public BufferInfo dup() { 1429 BufferInfo copy = new BufferInfo(); 1430 copy.set(offset, size, presentationTimeUs, flags); 1431 return copy; 1432 } 1433 }; 1434 1435 // The follow flag constants MUST stay in sync with their equivalents 1436 // in MediaCodec.h ! 1437 1438 /** 1439 * This indicates that the (encoded) buffer marked as such contains 1440 * the data for a key frame. 1441 * 1442 * @deprecated Use {@link #BUFFER_FLAG_KEY_FRAME} instead. 1443 */ 1444 public static final int BUFFER_FLAG_SYNC_FRAME = 1; 1445 1446 /** 1447 * This indicates that the (encoded) buffer marked as such contains 1448 * the data for a key frame. 1449 */ 1450 public static final int BUFFER_FLAG_KEY_FRAME = 1; 1451 1452 /** 1453 * This indicated that the buffer marked as such contains codec 1454 * initialization / codec specific data instead of media data. 1455 */ 1456 public static final int BUFFER_FLAG_CODEC_CONFIG = 2; 1457 1458 /** 1459 * This signals the end of stream, i.e. no buffers will be available 1460 * after this, unless of course, {@link #flush} follows. 1461 */ 1462 public static final int BUFFER_FLAG_END_OF_STREAM = 4; 1463 1464 /** @hide */ 1465 @IntDef( 1466 flag = true, 1467 value = { 1468 BUFFER_FLAG_SYNC_FRAME, 1469 BUFFER_FLAG_KEY_FRAME, 1470 BUFFER_FLAG_CODEC_CONFIG, 1471 BUFFER_FLAG_END_OF_STREAM, 1472 }) 1473 @Retention(RetentionPolicy.SOURCE) 1474 public @interface BufferFlag {} 1475 1476 private EventHandler mEventHandler; 1477 private EventHandler mOnFrameRenderedHandler; 1478 private EventHandler mCallbackHandler; 1479 private Callback mCallback; 1480 private OnFrameRenderedListener mOnFrameRenderedListener; 1481 private Object mListenerLock = new Object(); 1482 1483 private static final int EVENT_CALLBACK = 1; 1484 private static final int EVENT_SET_CALLBACK = 2; 1485 private static final int EVENT_FRAME_RENDERED = 3; 1486 1487 private static final int CB_INPUT_AVAILABLE = 1; 1488 private static final int CB_OUTPUT_AVAILABLE = 2; 1489 private static final int CB_ERROR = 3; 1490 private static final int CB_OUTPUT_FORMAT_CHANGE = 4; 1491 1492 private class EventHandler extends Handler { 1493 private MediaCodec mCodec; 1494 EventHandler(@onNull MediaCodec codec, @NonNull Looper looper)1495 public EventHandler(@NonNull MediaCodec codec, @NonNull Looper looper) { 1496 super(looper); 1497 mCodec = codec; 1498 } 1499 1500 @Override handleMessage(@onNull Message msg)1501 public void handleMessage(@NonNull Message msg) { 1502 switch (msg.what) { 1503 case EVENT_CALLBACK: 1504 { 1505 handleCallback(msg); 1506 break; 1507 } 1508 case EVENT_SET_CALLBACK: 1509 { 1510 mCallback = (MediaCodec.Callback) msg.obj; 1511 break; 1512 } 1513 case EVENT_FRAME_RENDERED: 1514 synchronized (mListenerLock) { 1515 Map<String, Object> map = (Map<String, Object>)msg.obj; 1516 for (int i = 0; ; ++i) { 1517 Object mediaTimeUs = map.get(i + "-media-time-us"); 1518 Object systemNano = map.get(i + "-system-nano"); 1519 if (mediaTimeUs == null || systemNano == null 1520 || mOnFrameRenderedListener == null) { 1521 break; 1522 } 1523 mOnFrameRenderedListener.onFrameRendered( 1524 mCodec, (long)mediaTimeUs, (long)systemNano); 1525 } 1526 break; 1527 } 1528 default: 1529 { 1530 break; 1531 } 1532 } 1533 } 1534 handleCallback(@onNull Message msg)1535 private void handleCallback(@NonNull Message msg) { 1536 if (mCallback == null) { 1537 return; 1538 } 1539 1540 switch (msg.arg1) { 1541 case CB_INPUT_AVAILABLE: 1542 { 1543 int index = msg.arg2; 1544 synchronized(mBufferLock) { 1545 validateInputByteBuffer(mCachedInputBuffers, index); 1546 } 1547 mCallback.onInputBufferAvailable(mCodec, index); 1548 break; 1549 } 1550 1551 case CB_OUTPUT_AVAILABLE: 1552 { 1553 int index = msg.arg2; 1554 BufferInfo info = (MediaCodec.BufferInfo) msg.obj; 1555 synchronized(mBufferLock) { 1556 validateOutputByteBuffer(mCachedOutputBuffers, index, info); 1557 } 1558 mCallback.onOutputBufferAvailable( 1559 mCodec, index, info); 1560 break; 1561 } 1562 1563 case CB_ERROR: 1564 { 1565 mCallback.onError(mCodec, (MediaCodec.CodecException) msg.obj); 1566 break; 1567 } 1568 1569 case CB_OUTPUT_FORMAT_CHANGE: 1570 { 1571 mCallback.onOutputFormatChanged(mCodec, 1572 new MediaFormat((Map<String, Object>) msg.obj)); 1573 break; 1574 } 1575 1576 default: 1577 { 1578 break; 1579 } 1580 } 1581 } 1582 } 1583 1584 private boolean mHasSurface = false; 1585 1586 /** 1587 * Instantiate the preferred decoder supporting input data of the given mime type. 1588 * 1589 * The following is a partial list of defined mime types and their semantics: 1590 * <ul> 1591 * <li>"video/x-vnd.on2.vp8" - VP8 video (i.e. video in .webm) 1592 * <li>"video/x-vnd.on2.vp9" - VP9 video (i.e. video in .webm) 1593 * <li>"video/avc" - H.264/AVC video 1594 * <li>"video/hevc" - H.265/HEVC video 1595 * <li>"video/mp4v-es" - MPEG4 video 1596 * <li>"video/3gpp" - H.263 video 1597 * <li>"audio/3gpp" - AMR narrowband audio 1598 * <li>"audio/amr-wb" - AMR wideband audio 1599 * <li>"audio/mpeg" - MPEG1/2 audio layer III 1600 * <li>"audio/mp4a-latm" - AAC audio (note, this is raw AAC packets, not packaged in LATM!) 1601 * <li>"audio/vorbis" - vorbis audio 1602 * <li>"audio/g711-alaw" - G.711 alaw audio 1603 * <li>"audio/g711-mlaw" - G.711 ulaw audio 1604 * </ul> 1605 * 1606 * <strong>Note:</strong> It is preferred to use {@link MediaCodecList#findDecoderForFormat} 1607 * and {@link #createByCodecName} to ensure that the resulting codec can handle a 1608 * given format. 1609 * 1610 * @param type The mime type of the input data. 1611 * @throws IOException if the codec cannot be created. 1612 * @throws IllegalArgumentException if type is not a valid mime type. 1613 * @throws NullPointerException if type is null. 1614 */ 1615 @NonNull createDecoderByType(@onNull String type)1616 public static MediaCodec createDecoderByType(@NonNull String type) 1617 throws IOException { 1618 return new MediaCodec(type, true /* nameIsType */, false /* encoder */); 1619 } 1620 1621 /** 1622 * Instantiate the preferred encoder supporting output data of the given mime type. 1623 * 1624 * <strong>Note:</strong> It is preferred to use {@link MediaCodecList#findEncoderForFormat} 1625 * and {@link #createByCodecName} to ensure that the resulting codec can handle a 1626 * given format. 1627 * 1628 * @param type The desired mime type of the output data. 1629 * @throws IOException if the codec cannot be created. 1630 * @throws IllegalArgumentException if type is not a valid mime type. 1631 * @throws NullPointerException if type is null. 1632 */ 1633 @NonNull createEncoderByType(@onNull String type)1634 public static MediaCodec createEncoderByType(@NonNull String type) 1635 throws IOException { 1636 return new MediaCodec(type, true /* nameIsType */, true /* encoder */); 1637 } 1638 1639 /** 1640 * If you know the exact name of the component you want to instantiate 1641 * use this method to instantiate it. Use with caution. 1642 * Likely to be used with information obtained from {@link android.media.MediaCodecList} 1643 * @param name The name of the codec to be instantiated. 1644 * @throws IOException if the codec cannot be created. 1645 * @throws IllegalArgumentException if name is not valid. 1646 * @throws NullPointerException if name is null. 1647 */ 1648 @NonNull createByCodecName(@onNull String name)1649 public static MediaCodec createByCodecName(@NonNull String name) 1650 throws IOException { 1651 return new MediaCodec( 1652 name, false /* nameIsType */, false /* unused */); 1653 } 1654 MediaCodec( @onNull String name, boolean nameIsType, boolean encoder)1655 private MediaCodec( 1656 @NonNull String name, boolean nameIsType, boolean encoder) { 1657 Looper looper; 1658 if ((looper = Looper.myLooper()) != null) { 1659 mEventHandler = new EventHandler(this, looper); 1660 } else if ((looper = Looper.getMainLooper()) != null) { 1661 mEventHandler = new EventHandler(this, looper); 1662 } else { 1663 mEventHandler = null; 1664 } 1665 mCallbackHandler = mEventHandler; 1666 mOnFrameRenderedHandler = mEventHandler; 1667 1668 mBufferLock = new Object(); 1669 1670 native_setup(name, nameIsType, encoder); 1671 } 1672 1673 @Override finalize()1674 protected void finalize() { 1675 native_finalize(); 1676 } 1677 1678 /** 1679 * Returns the codec to its initial (Uninitialized) state. 1680 * 1681 * Call this if an {@link MediaCodec.CodecException#isRecoverable unrecoverable} 1682 * error has occured to reset the codec to its initial state after creation. 1683 * 1684 * @throws CodecException if an unrecoverable error has occured and the codec 1685 * could not be reset. 1686 * @throws IllegalStateException if in the Released state. 1687 */ reset()1688 public final void reset() { 1689 freeAllTrackedBuffers(); // free buffers first 1690 native_reset(); 1691 } 1692 native_reset()1693 private native final void native_reset(); 1694 1695 /** 1696 * Free up resources used by the codec instance. 1697 * 1698 * Make sure you call this when you're done to free up any opened 1699 * component instance instead of relying on the garbage collector 1700 * to do this for you at some point in the future. 1701 */ release()1702 public final void release() { 1703 freeAllTrackedBuffers(); // free buffers first 1704 native_release(); 1705 } 1706 native_release()1707 private native final void native_release(); 1708 1709 /** 1710 * If this codec is to be used as an encoder, pass this flag. 1711 */ 1712 public static final int CONFIGURE_FLAG_ENCODE = 1; 1713 1714 /** @hide */ 1715 @IntDef(flag = true, value = { CONFIGURE_FLAG_ENCODE }) 1716 @Retention(RetentionPolicy.SOURCE) 1717 public @interface ConfigureFlag {} 1718 1719 /** 1720 * Configures a component. 1721 * 1722 * @param format The format of the input data (decoder) or the desired 1723 * format of the output data (encoder). Passing {@code null} 1724 * as {@code format} is equivalent to passing an 1725 * {@link MediaFormat#MediaFormat an empty mediaformat}. 1726 * @param surface Specify a surface on which to render the output of this 1727 * decoder. Pass {@code null} as {@code surface} if the 1728 * codec does not generate raw video output (e.g. not a video 1729 * decoder) and/or if you want to configure the codec for 1730 * {@link ByteBuffer} output. 1731 * @param crypto Specify a crypto object to facilitate secure decryption 1732 * of the media data. Pass {@code null} as {@code crypto} for 1733 * non-secure codecs. 1734 * @param flags Specify {@link #CONFIGURE_FLAG_ENCODE} to configure the 1735 * component as an encoder. 1736 * @throws IllegalArgumentException if the surface has been released (or is invalid), 1737 * or the format is unacceptable (e.g. missing a mandatory key), 1738 * or the flags are not set properly 1739 * (e.g. missing {@link #CONFIGURE_FLAG_ENCODE} for an encoder). 1740 * @throws IllegalStateException if not in the Uninitialized state. 1741 * @throws CryptoException upon DRM error. 1742 * @throws CodecException upon codec error. 1743 */ configure( @ullable MediaFormat format, @Nullable Surface surface, @Nullable MediaCrypto crypto, @ConfigureFlag int flags)1744 public void configure( 1745 @Nullable MediaFormat format, 1746 @Nullable Surface surface, @Nullable MediaCrypto crypto, 1747 @ConfigureFlag int flags) { 1748 String[] keys = null; 1749 Object[] values = null; 1750 1751 if (format != null) { 1752 Map<String, Object> formatMap = format.getMap(); 1753 keys = new String[formatMap.size()]; 1754 values = new Object[formatMap.size()]; 1755 1756 int i = 0; 1757 for (Map.Entry<String, Object> entry: formatMap.entrySet()) { 1758 if (entry.getKey().equals(MediaFormat.KEY_AUDIO_SESSION_ID)) { 1759 int sessionId = 0; 1760 try { 1761 sessionId = (Integer)entry.getValue(); 1762 } 1763 catch (Exception e) { 1764 throw new IllegalArgumentException("Wrong Session ID Parameter!"); 1765 } 1766 keys[i] = "audio-hw-sync"; 1767 values[i] = AudioSystem.getAudioHwSyncForSession(sessionId); 1768 } else { 1769 keys[i] = entry.getKey(); 1770 values[i] = entry.getValue(); 1771 } 1772 ++i; 1773 } 1774 } 1775 1776 mHasSurface = surface != null; 1777 1778 native_configure(keys, values, surface, crypto, flags); 1779 } 1780 1781 /** 1782 * Dynamically sets the output surface of a codec. 1783 * <p> 1784 * This can only be used if the codec was configured with an output surface. The 1785 * new output surface should have a compatible usage type to the original output surface. 1786 * E.g. codecs may not support switching from a SurfaceTexture (GPU readable) output 1787 * to ImageReader (software readable) output. 1788 * @param surface the output surface to use. It must not be {@code null}. 1789 * @throws IllegalStateException if the codec does not support setting the output 1790 * surface in the current state. 1791 * @throws IllegalArgumentException if the new surface is not of a suitable type for the codec. 1792 */ setOutputSurface(@onNull Surface surface)1793 public void setOutputSurface(@NonNull Surface surface) { 1794 if (!mHasSurface) { 1795 throw new IllegalStateException("codec was not configured for an output surface"); 1796 } 1797 native_setSurface(surface); 1798 } 1799 native_setSurface(@onNull Surface surface)1800 private native void native_setSurface(@NonNull Surface surface); 1801 1802 /** 1803 * Create a persistent input surface that can be used with codecs that normally have an input 1804 * surface, such as video encoders. A persistent input can be reused by subsequent 1805 * {@link MediaCodec} or {@link MediaRecorder} instances, but can only be used by at 1806 * most one codec or recorder instance concurrently. 1807 * <p> 1808 * The application is responsible for calling release() on the Surface when done. 1809 * 1810 * @return an input surface that can be used with {@link #setInputSurface}. 1811 */ 1812 @NonNull createPersistentInputSurface()1813 public static Surface createPersistentInputSurface() { 1814 return native_createPersistentInputSurface(); 1815 } 1816 1817 static class PersistentSurface extends Surface { 1818 @SuppressWarnings("unused") PersistentSurface()1819 PersistentSurface() {} // used by native 1820 1821 @Override release()1822 public void release() { 1823 native_releasePersistentInputSurface(this); 1824 super.release(); 1825 } 1826 1827 private long mPersistentObject; 1828 }; 1829 1830 /** 1831 * Configures the codec (e.g. encoder) to use a persistent input surface in place of input 1832 * buffers. This may only be called after {@link #configure} and before {@link #start}, in 1833 * lieu of {@link #createInputSurface}. 1834 * @param surface a persistent input surface created by {@link #createPersistentInputSurface} 1835 * @throws IllegalStateException if not in the Configured state or does not require an input 1836 * surface. 1837 * @throws IllegalArgumentException if the surface was not created by 1838 * {@link #createPersistentInputSurface}. 1839 */ setInputSurface(@onNull Surface surface)1840 public void setInputSurface(@NonNull Surface surface) { 1841 if (!(surface instanceof PersistentSurface)) { 1842 throw new IllegalArgumentException("not a PersistentSurface"); 1843 } 1844 native_setInputSurface(surface); 1845 } 1846 1847 @NonNull native_createPersistentInputSurface()1848 private static native final PersistentSurface native_createPersistentInputSurface(); native_releasePersistentInputSurface(@onNull Surface surface)1849 private static native final void native_releasePersistentInputSurface(@NonNull Surface surface); native_setInputSurface(@onNull Surface surface)1850 private native final void native_setInputSurface(@NonNull Surface surface); 1851 native_setCallback(@ullable Callback cb)1852 private native final void native_setCallback(@Nullable Callback cb); 1853 native_configure( @ullable String[] keys, @Nullable Object[] values, @Nullable Surface surface, @Nullable MediaCrypto crypto, @ConfigureFlag int flags)1854 private native final void native_configure( 1855 @Nullable String[] keys, @Nullable Object[] values, 1856 @Nullable Surface surface, @Nullable MediaCrypto crypto, @ConfigureFlag int flags); 1857 1858 /** 1859 * Requests a Surface to use as the input to an encoder, in place of input buffers. This 1860 * may only be called after {@link #configure} and before {@link #start}. 1861 * <p> 1862 * The application is responsible for calling release() on the Surface when 1863 * done. 1864 * <p> 1865 * The Surface must be rendered with a hardware-accelerated API, such as OpenGL ES. 1866 * {@link android.view.Surface#lockCanvas(android.graphics.Rect)} may fail or produce 1867 * unexpected results. 1868 * @throws IllegalStateException if not in the Configured state. 1869 */ 1870 @NonNull createInputSurface()1871 public native final Surface createInputSurface(); 1872 1873 /** 1874 * After successfully configuring the component, call {@code start}. 1875 * <p> 1876 * Call {@code start} also if the codec is configured in asynchronous mode, 1877 * and it has just been flushed, to resume requesting input buffers. 1878 * @throws IllegalStateException if not in the Configured state 1879 * or just after {@link #flush} for a codec that is configured 1880 * in asynchronous mode. 1881 * @throws MediaCodec.CodecException upon codec error. Note that some codec errors 1882 * for start may be attributed to future method calls. 1883 */ start()1884 public final void start() { 1885 native_start(); 1886 synchronized(mBufferLock) { 1887 cacheBuffers(true /* input */); 1888 cacheBuffers(false /* input */); 1889 } 1890 } native_start()1891 private native final void native_start(); 1892 1893 /** 1894 * Finish the decode/encode session, note that the codec instance 1895 * remains active and ready to be {@link #start}ed again. 1896 * To ensure that it is available to other client call {@link #release} 1897 * and don't just rely on garbage collection to eventually do this for you. 1898 * @throws IllegalStateException if in the Released state. 1899 */ stop()1900 public final void stop() { 1901 native_stop(); 1902 freeAllTrackedBuffers(); 1903 1904 synchronized (mListenerLock) { 1905 if (mCallbackHandler != null) { 1906 mCallbackHandler.removeMessages(EVENT_SET_CALLBACK); 1907 mCallbackHandler.removeMessages(EVENT_CALLBACK); 1908 } 1909 if (mOnFrameRenderedHandler != null) { 1910 mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED); 1911 } 1912 } 1913 } 1914 native_stop()1915 private native final void native_stop(); 1916 1917 /** 1918 * Flush both input and output ports of the component. 1919 * <p> 1920 * Upon return, all indices previously returned in calls to {@link #dequeueInputBuffer 1921 * dequeueInputBuffer} and {@link #dequeueOutputBuffer dequeueOutputBuffer} — or obtained 1922 * via {@link Callback#onInputBufferAvailable onInputBufferAvailable} or 1923 * {@link Callback#onOutputBufferAvailable onOutputBufferAvailable} callbacks — become 1924 * invalid, and all buffers are owned by the codec. 1925 * <p> 1926 * If the codec is configured in asynchronous mode, call {@link #start} 1927 * after {@code flush} has returned to resume codec operations. The codec 1928 * will not request input buffers until this has happened. 1929 * <strong>Note, however, that there may still be outstanding {@code onOutputBufferAvailable} 1930 * callbacks that were not handled prior to calling {@code flush}. 1931 * The indices returned via these callbacks also become invalid upon calling {@code flush} and 1932 * should be discarded.</strong> 1933 * <p> 1934 * If the codec is configured in synchronous mode, codec will resume 1935 * automatically if it is configured with an input surface. Otherwise, it 1936 * will resume when {@link #dequeueInputBuffer dequeueInputBuffer} is called. 1937 * 1938 * @throws IllegalStateException if not in the Executing state. 1939 * @throws MediaCodec.CodecException upon codec error. 1940 */ flush()1941 public final void flush() { 1942 synchronized(mBufferLock) { 1943 invalidateByteBuffers(mCachedInputBuffers); 1944 invalidateByteBuffers(mCachedOutputBuffers); 1945 mDequeuedInputBuffers.clear(); 1946 mDequeuedOutputBuffers.clear(); 1947 } 1948 native_flush(); 1949 } 1950 native_flush()1951 private native final void native_flush(); 1952 1953 /** 1954 * Thrown when an internal codec error occurs. 1955 */ 1956 public final static class CodecException extends IllegalStateException { CodecException(int errorCode, int actionCode, @Nullable String detailMessage)1957 CodecException(int errorCode, int actionCode, @Nullable String detailMessage) { 1958 super(detailMessage); 1959 mErrorCode = errorCode; 1960 mActionCode = actionCode; 1961 1962 // TODO get this from codec 1963 final String sign = errorCode < 0 ? "neg_" : ""; 1964 mDiagnosticInfo = 1965 "android.media.MediaCodec.error_" + sign + Math.abs(errorCode); 1966 } 1967 1968 /** 1969 * Returns true if the codec exception is a transient issue, 1970 * perhaps due to resource constraints, and that the method 1971 * (or encoding/decoding) may be retried at a later time. 1972 */ 1973 public boolean isTransient() { 1974 return mActionCode == ACTION_TRANSIENT; 1975 } 1976 1977 /** 1978 * Returns true if the codec cannot proceed further, 1979 * but can be recovered by stopping, configuring, 1980 * and starting again. 1981 */ 1982 public boolean isRecoverable() { 1983 return mActionCode == ACTION_RECOVERABLE; 1984 } 1985 1986 /** 1987 * Retrieve the error code associated with a CodecException 1988 */ 1989 public int getErrorCode() { 1990 return mErrorCode; 1991 } 1992 1993 /** 1994 * Retrieve a developer-readable diagnostic information string 1995 * associated with the exception. Do not show this to end-users, 1996 * since this string will not be localized or generally 1997 * comprehensible to end-users. 1998 */ 1999 public @NonNull String getDiagnosticInfo() { 2000 return mDiagnosticInfo; 2001 } 2002 2003 /** 2004 * This indicates required resource was not able to be allocated. 2005 */ 2006 public static final int ERROR_INSUFFICIENT_RESOURCE = 1100; 2007 2008 /** 2009 * This indicates the resource manager reclaimed the media resource used by the codec. 2010 * <p> 2011 * With this exception, the codec must be released, as it has moved to terminal state. 2012 */ 2013 public static final int ERROR_RECLAIMED = 1101; 2014 2015 /** @hide */ 2016 @IntDef({ 2017 ERROR_INSUFFICIENT_RESOURCE, 2018 ERROR_RECLAIMED, 2019 }) 2020 @Retention(RetentionPolicy.SOURCE) 2021 public @interface ReasonCode {} 2022 2023 /* Must be in sync with android_media_MediaCodec.cpp */ 2024 private final static int ACTION_TRANSIENT = 1; 2025 private final static int ACTION_RECOVERABLE = 2; 2026 2027 private final String mDiagnosticInfo; 2028 private final int mErrorCode; 2029 private final int mActionCode; 2030 } 2031 2032 /** 2033 * Thrown when a crypto error occurs while queueing a secure input buffer. 2034 */ 2035 public final static class CryptoException extends RuntimeException { 2036 public CryptoException(int errorCode, @Nullable String detailMessage) { 2037 super(detailMessage); 2038 mErrorCode = errorCode; 2039 } 2040 2041 /** 2042 * This indicates that the requested key was not found when trying to 2043 * perform a decrypt operation. The operation can be retried after adding 2044 * the correct decryption key. 2045 */ 2046 public static final int ERROR_NO_KEY = 1; 2047 2048 /** 2049 * This indicates that the key used for decryption is no longer 2050 * valid due to license term expiration. The operation can be retried 2051 * after updating the expired keys. 2052 */ 2053 public static final int ERROR_KEY_EXPIRED = 2; 2054 2055 /** 2056 * This indicates that a required crypto resource was not able to be 2057 * allocated while attempting the requested operation. The operation 2058 * can be retried if the app is able to release resources. 2059 */ 2060 public static final int ERROR_RESOURCE_BUSY = 3; 2061 2062 /** 2063 * This indicates that the output protection levels supported by the 2064 * device are not sufficient to meet the requirements set by the 2065 * content owner in the license policy. 2066 */ 2067 public static final int ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4; 2068 2069 /** 2070 * This indicates that decryption was attempted on a session that is 2071 * not opened, which could be due to a failure to open the session, 2072 * closing the session prematurely, or the session being reclaimed 2073 * by the resource manager. 2074 */ 2075 public static final int ERROR_SESSION_NOT_OPENED = 5; 2076 2077 /** @hide */ 2078 @IntDef({ 2079 ERROR_NO_KEY, 2080 ERROR_KEY_EXPIRED, 2081 ERROR_RESOURCE_BUSY, 2082 ERROR_INSUFFICIENT_OUTPUT_PROTECTION, 2083 ERROR_SESSION_NOT_OPENED, 2084 }) 2085 @Retention(RetentionPolicy.SOURCE) 2086 public @interface CryptoErrorCode {} 2087 2088 /** 2089 * Retrieve the error code associated with a CryptoException 2090 */ 2091 @CryptoErrorCode 2092 public int getErrorCode() { 2093 return mErrorCode; 2094 } 2095 2096 private int mErrorCode; 2097 } 2098 2099 /** 2100 * After filling a range of the input buffer at the specified index 2101 * submit it to the component. Once an input buffer is queued to 2102 * the codec, it MUST NOT be used until it is later retrieved by 2103 * {@link #getInputBuffer} in response to a {@link #dequeueInputBuffer} 2104 * return value or a {@link Callback#onInputBufferAvailable} 2105 * callback. 2106 * <p> 2107 * Many decoders require the actual compressed data stream to be 2108 * preceded by "codec specific data", i.e. setup data used to initialize 2109 * the codec such as PPS/SPS in the case of AVC video or code tables 2110 * in the case of vorbis audio. 2111 * The class {@link android.media.MediaExtractor} provides codec 2112 * specific data as part of 2113 * the returned track format in entries named "csd-0", "csd-1" ... 2114 * <p> 2115 * These buffers can be submitted directly after {@link #start} or 2116 * {@link #flush} by specifying the flag {@link 2117 * #BUFFER_FLAG_CODEC_CONFIG}. However, if you configure the 2118 * codec with a {@link MediaFormat} containing these keys, they 2119 * will be automatically submitted by MediaCodec directly after 2120 * start. Therefore, the use of {@link 2121 * #BUFFER_FLAG_CODEC_CONFIG} flag is discouraged and is 2122 * recommended only for advanced users. 2123 * <p> 2124 * To indicate that this is the final piece of input data (or rather that 2125 * no more input data follows unless the decoder is subsequently flushed) 2126 * specify the flag {@link #BUFFER_FLAG_END_OF_STREAM}. 2127 * <p class=note> 2128 * <strong>Note:</strong> Prior to {@link android.os.Build.VERSION_CODES#M}, 2129 * {@code presentationTimeUs} was not propagated to the frame timestamp of (rendered) 2130 * Surface output buffers, and the resulting frame timestamp was undefined. 2131 * Use {@link #releaseOutputBuffer(int, long)} to ensure a specific frame timestamp is set. 2132 * Similarly, since frame timestamps can be used by the destination surface for rendering 2133 * synchronization, <strong>care must be taken to normalize presentationTimeUs so as to not be 2134 * mistaken for a system time. (See {@linkplain #releaseOutputBuffer(int, long) 2135 * SurfaceView specifics}).</strong> 2136 * 2137 * @param index The index of a client-owned input buffer previously returned 2138 * in a call to {@link #dequeueInputBuffer}. 2139 * @param offset The byte offset into the input buffer at which the data starts. 2140 * @param size The number of bytes of valid input data. 2141 * @param presentationTimeUs The presentation timestamp in microseconds for this 2142 * buffer. This is normally the media time at which this 2143 * buffer should be presented (rendered). When using an output 2144 * surface, this will be propagated as the {@link 2145 * SurfaceTexture#getTimestamp timestamp} for the frame (after 2146 * conversion to nanoseconds). 2147 * @param flags A bitmask of flags 2148 * {@link #BUFFER_FLAG_CODEC_CONFIG} and {@link #BUFFER_FLAG_END_OF_STREAM}. 2149 * While not prohibited, most codecs do not use the 2150 * {@link #BUFFER_FLAG_KEY_FRAME} flag for input buffers. 2151 * @throws IllegalStateException if not in the Executing state. 2152 * @throws MediaCodec.CodecException upon codec error. 2153 * @throws CryptoException if a crypto object has been specified in 2154 * {@link #configure} 2155 */ 2156 public final void queueInputBuffer( 2157 int index, 2158 int offset, int size, long presentationTimeUs, int flags) 2159 throws CryptoException { 2160 synchronized(mBufferLock) { 2161 invalidateByteBuffer(mCachedInputBuffers, index); 2162 mDequeuedInputBuffers.remove(index); 2163 } 2164 try { 2165 native_queueInputBuffer( 2166 index, offset, size, presentationTimeUs, flags); 2167 } catch (CryptoException | IllegalStateException e) { 2168 revalidateByteBuffer(mCachedInputBuffers, index); 2169 throw e; 2170 } 2171 } 2172 2173 private native final void native_queueInputBuffer( 2174 int index, 2175 int offset, int size, long presentationTimeUs, int flags) 2176 throws CryptoException; 2177 2178 // The following mode constants MUST stay in sync with their equivalents 2179 // in media/hardware/CryptoAPI.h ! 2180 public static final int CRYPTO_MODE_UNENCRYPTED = 0; 2181 public static final int CRYPTO_MODE_AES_CTR = 1; 2182 2183 /** 2184 * Metadata describing the structure of a (at least partially) encrypted 2185 * input sample. 2186 * A buffer's data is considered to be partitioned into "subSamples", 2187 * each subSample starts with a (potentially empty) run of plain, 2188 * unencrypted bytes followed by a (also potentially empty) run of 2189 * encrypted bytes. 2190 * numBytesOfClearData can be null to indicate that all data is encrypted. 2191 * This information encapsulates per-sample metadata as outlined in 2192 * ISO/IEC FDIS 23001-7:2011 "Common encryption in ISO base media file format files". 2193 */ 2194 public final static class CryptoInfo { 2195 public void set( 2196 int newNumSubSamples, 2197 @NonNull int[] newNumBytesOfClearData, 2198 @NonNull int[] newNumBytesOfEncryptedData, 2199 @NonNull byte[] newKey, 2200 @NonNull byte[] newIV, 2201 int newMode) { 2202 numSubSamples = newNumSubSamples; 2203 numBytesOfClearData = newNumBytesOfClearData; 2204 numBytesOfEncryptedData = newNumBytesOfEncryptedData; 2205 key = newKey; 2206 iv = newIV; 2207 mode = newMode; 2208 } 2209 2210 /** 2211 * The number of subSamples that make up the buffer's contents. 2212 */ 2213 public int numSubSamples; 2214 /** 2215 * The number of leading unencrypted bytes in each subSample. 2216 */ 2217 public int[] numBytesOfClearData; 2218 /** 2219 * The number of trailing encrypted bytes in each subSample. 2220 */ 2221 public int[] numBytesOfEncryptedData; 2222 /** 2223 * A 16-byte opaque key 2224 */ 2225 public byte[] key; 2226 /** 2227 * A 16-byte initialization vector 2228 */ 2229 public byte[] iv; 2230 /** 2231 * The type of encryption that has been applied, 2232 * see {@link #CRYPTO_MODE_UNENCRYPTED} and {@link #CRYPTO_MODE_AES_CTR}. 2233 */ 2234 public int mode; 2235 2236 @Override 2237 public String toString() { 2238 StringBuilder builder = new StringBuilder(); 2239 builder.append(numSubSamples + " subsamples, key ["); 2240 String hexdigits = "0123456789abcdef"; 2241 for (int i = 0; i < key.length; i++) { 2242 builder.append(hexdigits.charAt((key[i] & 0xf0) >> 4)); 2243 builder.append(hexdigits.charAt(key[i] & 0x0f)); 2244 } 2245 builder.append("], iv ["); 2246 for (int i = 0; i < key.length; i++) { 2247 builder.append(hexdigits.charAt((iv[i] & 0xf0) >> 4)); 2248 builder.append(hexdigits.charAt(iv[i] & 0x0f)); 2249 } 2250 builder.append("], clear "); Arrays.toString(numBytesOfClearData)2251 builder.append(Arrays.toString(numBytesOfClearData)); 2252 builder.append(", encrypted "); Arrays.toString(numBytesOfEncryptedData)2253 builder.append(Arrays.toString(numBytesOfEncryptedData)); 2254 return builder.toString(); 2255 } 2256 }; 2257 2258 /** 2259 * Similar to {@link #queueInputBuffer queueInputBuffer} but submits a buffer that is 2260 * potentially encrypted. 2261 * <strong>Check out further notes at {@link #queueInputBuffer queueInputBuffer}.</strong> 2262 * 2263 * @param index The index of a client-owned input buffer previously returned 2264 * in a call to {@link #dequeueInputBuffer}. 2265 * @param offset The byte offset into the input buffer at which the data starts. 2266 * @param info Metadata required to facilitate decryption, the object can be 2267 * reused immediately after this call returns. 2268 * @param presentationTimeUs The presentation timestamp in microseconds for this 2269 * buffer. This is normally the media time at which this 2270 * buffer should be presented (rendered). 2271 * @param flags A bitmask of flags 2272 * {@link #BUFFER_FLAG_CODEC_CONFIG} and {@link #BUFFER_FLAG_END_OF_STREAM}. 2273 * While not prohibited, most codecs do not use the 2274 * {@link #BUFFER_FLAG_KEY_FRAME} flag for input buffers. 2275 * @throws IllegalStateException if not in the Executing state. 2276 * @throws MediaCodec.CodecException upon codec error. 2277 * @throws CryptoException if an error occurs while attempting to decrypt the buffer. 2278 * An error code associated with the exception helps identify the 2279 * reason for the failure. 2280 */ queueSecureInputBuffer( int index, int offset, @NonNull CryptoInfo info, long presentationTimeUs, int flags)2281 public final void queueSecureInputBuffer( 2282 int index, 2283 int offset, 2284 @NonNull CryptoInfo info, 2285 long presentationTimeUs, 2286 int flags) throws CryptoException { 2287 synchronized(mBufferLock) { 2288 invalidateByteBuffer(mCachedInputBuffers, index); 2289 mDequeuedInputBuffers.remove(index); 2290 } 2291 try { 2292 native_queueSecureInputBuffer( 2293 index, offset, info, presentationTimeUs, flags); 2294 } catch (CryptoException | IllegalStateException e) { 2295 revalidateByteBuffer(mCachedInputBuffers, index); 2296 throw e; 2297 } 2298 } 2299 native_queueSecureInputBuffer( int index, int offset, @NonNull CryptoInfo info, long presentationTimeUs, int flags)2300 private native final void native_queueSecureInputBuffer( 2301 int index, 2302 int offset, 2303 @NonNull CryptoInfo info, 2304 long presentationTimeUs, 2305 int flags) throws CryptoException; 2306 2307 /** 2308 * Returns the index of an input buffer to be filled with valid data 2309 * or -1 if no such buffer is currently available. 2310 * This method will return immediately if timeoutUs == 0, wait indefinitely 2311 * for the availability of an input buffer if timeoutUs < 0 or wait up 2312 * to "timeoutUs" microseconds if timeoutUs > 0. 2313 * @param timeoutUs The timeout in microseconds, a negative timeout indicates "infinite". 2314 * @throws IllegalStateException if not in the Executing state, 2315 * or codec is configured in asynchronous mode. 2316 * @throws MediaCodec.CodecException upon codec error. 2317 */ dequeueInputBuffer(long timeoutUs)2318 public final int dequeueInputBuffer(long timeoutUs) { 2319 int res = native_dequeueInputBuffer(timeoutUs); 2320 if (res >= 0) { 2321 synchronized(mBufferLock) { 2322 validateInputByteBuffer(mCachedInputBuffers, res); 2323 } 2324 } 2325 return res; 2326 } 2327 native_dequeueInputBuffer(long timeoutUs)2328 private native final int native_dequeueInputBuffer(long timeoutUs); 2329 2330 /** 2331 * If a non-negative timeout had been specified in the call 2332 * to {@link #dequeueOutputBuffer}, indicates that the call timed out. 2333 */ 2334 public static final int INFO_TRY_AGAIN_LATER = -1; 2335 2336 /** 2337 * The output format has changed, subsequent data will follow the new 2338 * format. {@link #getOutputFormat()} returns the new format. Note, that 2339 * you can also use the new {@link #getOutputFormat(int)} method to 2340 * get the format for a specific output buffer. This frees you from 2341 * having to track output format changes. 2342 */ 2343 public static final int INFO_OUTPUT_FORMAT_CHANGED = -2; 2344 2345 /** 2346 * The output buffers have changed, the client must refer to the new 2347 * set of output buffers returned by {@link #getOutputBuffers} from 2348 * this point on. 2349 * 2350 * @deprecated This return value can be ignored as {@link 2351 * #getOutputBuffers} has been deprecated. Client should 2352 * request a current buffer using on of the get-buffer or 2353 * get-image methods each time one has been dequeued. 2354 */ 2355 public static final int INFO_OUTPUT_BUFFERS_CHANGED = -3; 2356 2357 /** @hide */ 2358 @IntDef({ 2359 INFO_TRY_AGAIN_LATER, 2360 INFO_OUTPUT_FORMAT_CHANGED, 2361 INFO_OUTPUT_BUFFERS_CHANGED, 2362 }) 2363 @Retention(RetentionPolicy.SOURCE) 2364 public @interface OutputBufferInfo {} 2365 2366 /** 2367 * Dequeue an output buffer, block at most "timeoutUs" microseconds. 2368 * Returns the index of an output buffer that has been successfully 2369 * decoded or one of the INFO_* constants. 2370 * @param info Will be filled with buffer meta data. 2371 * @param timeoutUs The timeout in microseconds, a negative timeout indicates "infinite". 2372 * @throws IllegalStateException if not in the Executing state, 2373 * or codec is configured in asynchronous mode. 2374 * @throws MediaCodec.CodecException upon codec error. 2375 */ 2376 @OutputBufferInfo dequeueOutputBuffer( @onNull BufferInfo info, long timeoutUs)2377 public final int dequeueOutputBuffer( 2378 @NonNull BufferInfo info, long timeoutUs) { 2379 int res = native_dequeueOutputBuffer(info, timeoutUs); 2380 synchronized(mBufferLock) { 2381 if (res == INFO_OUTPUT_BUFFERS_CHANGED) { 2382 cacheBuffers(false /* input */); 2383 } else if (res >= 0) { 2384 validateOutputByteBuffer(mCachedOutputBuffers, res, info); 2385 if (mHasSurface) { 2386 mDequeuedOutputInfos.put(res, info.dup()); 2387 } 2388 } 2389 } 2390 return res; 2391 } 2392 native_dequeueOutputBuffer( @onNull BufferInfo info, long timeoutUs)2393 private native final int native_dequeueOutputBuffer( 2394 @NonNull BufferInfo info, long timeoutUs); 2395 2396 /** 2397 * If you are done with a buffer, use this call to return the buffer to the codec 2398 * or to render it on the output surface. If you configured the codec with an 2399 * output surface, setting {@code render} to {@code true} will first send the buffer 2400 * to that output surface. The surface will release the buffer back to the codec once 2401 * it is no longer used/displayed. 2402 * 2403 * Once an output buffer is released to the codec, it MUST NOT 2404 * be used until it is later retrieved by {@link #getOutputBuffer} in response 2405 * to a {@link #dequeueOutputBuffer} return value or a 2406 * {@link Callback#onOutputBufferAvailable} callback. 2407 * 2408 * @param index The index of a client-owned output buffer previously returned 2409 * from a call to {@link #dequeueOutputBuffer}. 2410 * @param render If a valid surface was specified when configuring the codec, 2411 * passing true renders this output buffer to the surface. 2412 * @throws IllegalStateException if not in the Executing state. 2413 * @throws MediaCodec.CodecException upon codec error. 2414 */ releaseOutputBuffer(int index, boolean render)2415 public final void releaseOutputBuffer(int index, boolean render) { 2416 BufferInfo info = null; 2417 synchronized(mBufferLock) { 2418 invalidateByteBuffer(mCachedOutputBuffers, index); 2419 mDequeuedOutputBuffers.remove(index); 2420 if (mHasSurface) { 2421 info = mDequeuedOutputInfos.remove(index); 2422 } 2423 } 2424 releaseOutputBuffer(index, render, false /* updatePTS */, 0 /* dummy */); 2425 } 2426 2427 /** 2428 * If you are done with a buffer, use this call to update its surface timestamp 2429 * and return it to the codec to render it on the output surface. If you 2430 * have not specified an output surface when configuring this video codec, 2431 * this call will simply return the buffer to the codec.<p> 2432 * 2433 * The timestamp may have special meaning depending on the destination surface. 2434 * 2435 * <table> 2436 * <tr><th>SurfaceView specifics</th></tr> 2437 * <tr><td> 2438 * If you render your buffer on a {@link android.view.SurfaceView}, 2439 * you can use the timestamp to render the buffer at a specific time (at the 2440 * VSYNC at or after the buffer timestamp). For this to work, the timestamp 2441 * needs to be <i>reasonably close</i> to the current {@link System#nanoTime}. 2442 * Currently, this is set as within one (1) second. A few notes: 2443 * 2444 * <ul> 2445 * <li>the buffer will not be returned to the codec until the timestamp 2446 * has passed and the buffer is no longer used by the {@link android.view.Surface}. 2447 * <li>buffers are processed sequentially, so you may block subsequent buffers to 2448 * be displayed on the {@link android.view.Surface}. This is important if you 2449 * want to react to user action, e.g. stop the video or seek. 2450 * <li>if multiple buffers are sent to the {@link android.view.Surface} to be 2451 * rendered at the same VSYNC, the last one will be shown, and the other ones 2452 * will be dropped. 2453 * <li>if the timestamp is <em>not</em> "reasonably close" to the current system 2454 * time, the {@link android.view.Surface} will ignore the timestamp, and 2455 * display the buffer at the earliest feasible time. In this mode it will not 2456 * drop frames. 2457 * <li>for best performance and quality, call this method when you are about 2458 * two VSYNCs' time before the desired render time. For 60Hz displays, this is 2459 * about 33 msec. 2460 * </ul> 2461 * </td></tr> 2462 * </table> 2463 * 2464 * Once an output buffer is released to the codec, it MUST NOT 2465 * be used until it is later retrieved by {@link #getOutputBuffer} in response 2466 * to a {@link #dequeueOutputBuffer} return value or a 2467 * {@link Callback#onOutputBufferAvailable} callback. 2468 * 2469 * @param index The index of a client-owned output buffer previously returned 2470 * from a call to {@link #dequeueOutputBuffer}. 2471 * @param renderTimestampNs The timestamp to associate with this buffer when 2472 * it is sent to the Surface. 2473 * @throws IllegalStateException if not in the Executing state. 2474 * @throws MediaCodec.CodecException upon codec error. 2475 */ releaseOutputBuffer(int index, long renderTimestampNs)2476 public final void releaseOutputBuffer(int index, long renderTimestampNs) { 2477 BufferInfo info = null; 2478 synchronized(mBufferLock) { 2479 invalidateByteBuffer(mCachedOutputBuffers, index); 2480 mDequeuedOutputBuffers.remove(index); 2481 if (mHasSurface) { 2482 info = mDequeuedOutputInfos.remove(index); 2483 } 2484 } 2485 releaseOutputBuffer( 2486 index, true /* render */, true /* updatePTS */, renderTimestampNs); 2487 } 2488 releaseOutputBuffer( int index, boolean render, boolean updatePTS, long timeNs)2489 private native final void releaseOutputBuffer( 2490 int index, boolean render, boolean updatePTS, long timeNs); 2491 2492 /** 2493 * Signals end-of-stream on input. Equivalent to submitting an empty buffer with 2494 * {@link #BUFFER_FLAG_END_OF_STREAM} set. This may only be used with 2495 * encoders receiving input from a Surface created by {@link #createInputSurface}. 2496 * @throws IllegalStateException if not in the Executing state. 2497 * @throws MediaCodec.CodecException upon codec error. 2498 */ signalEndOfInputStream()2499 public native final void signalEndOfInputStream(); 2500 2501 /** 2502 * Call this after dequeueOutputBuffer signals a format change by returning 2503 * {@link #INFO_OUTPUT_FORMAT_CHANGED}. 2504 * You can also call this after {@link #configure} returns 2505 * successfully to get the output format initially configured 2506 * for the codec. Do this to determine what optional 2507 * configuration parameters were supported by the codec. 2508 * 2509 * @throws IllegalStateException if not in the Executing or 2510 * Configured state. 2511 * @throws MediaCodec.CodecException upon codec error. 2512 */ 2513 @NonNull getOutputFormat()2514 public final MediaFormat getOutputFormat() { 2515 return new MediaFormat(getFormatNative(false /* input */)); 2516 } 2517 2518 /** 2519 * Call this after {@link #configure} returns successfully to 2520 * get the input format accepted by the codec. Do this to 2521 * determine what optional configuration parameters were 2522 * supported by the codec. 2523 * 2524 * @throws IllegalStateException if not in the Executing or 2525 * Configured state. 2526 * @throws MediaCodec.CodecException upon codec error. 2527 */ 2528 @NonNull getInputFormat()2529 public final MediaFormat getInputFormat() { 2530 return new MediaFormat(getFormatNative(true /* input */)); 2531 } 2532 2533 /** 2534 * Returns the output format for a specific output buffer. 2535 * 2536 * @param index The index of a client-owned input buffer previously 2537 * returned from a call to {@link #dequeueInputBuffer}. 2538 * 2539 * @return the format for the output buffer, or null if the index 2540 * is not a dequeued output buffer. 2541 */ 2542 @NonNull getOutputFormat(int index)2543 public final MediaFormat getOutputFormat(int index) { 2544 return new MediaFormat(getOutputFormatNative(index)); 2545 } 2546 2547 @NonNull getFormatNative(boolean input)2548 private native final Map<String, Object> getFormatNative(boolean input); 2549 2550 @NonNull getOutputFormatNative(int index)2551 private native final Map<String, Object> getOutputFormatNative(int index); 2552 2553 // used to track dequeued buffers 2554 private static class BufferMap { 2555 // various returned representations of the codec buffer 2556 private static class CodecBuffer { 2557 private Image mImage; 2558 private ByteBuffer mByteBuffer; 2559 free()2560 public void free() { 2561 if (mByteBuffer != null) { 2562 // all of our ByteBuffers are direct 2563 java.nio.NioUtils.freeDirectBuffer(mByteBuffer); 2564 mByteBuffer = null; 2565 } 2566 if (mImage != null) { 2567 mImage.close(); 2568 mImage = null; 2569 } 2570 } 2571 setImage(@ullable Image image)2572 public void setImage(@Nullable Image image) { 2573 free(); 2574 mImage = image; 2575 } 2576 setByteBuffer(@ullable ByteBuffer buffer)2577 public void setByteBuffer(@Nullable ByteBuffer buffer) { 2578 free(); 2579 mByteBuffer = buffer; 2580 } 2581 } 2582 2583 private final Map<Integer, CodecBuffer> mMap = 2584 new HashMap<Integer, CodecBuffer>(); 2585 remove(int index)2586 public void remove(int index) { 2587 CodecBuffer buffer = mMap.get(index); 2588 if (buffer != null) { 2589 buffer.free(); 2590 mMap.remove(index); 2591 } 2592 } 2593 put(int index, @Nullable ByteBuffer newBuffer)2594 public void put(int index, @Nullable ByteBuffer newBuffer) { 2595 CodecBuffer buffer = mMap.get(index); 2596 if (buffer == null) { // likely 2597 buffer = new CodecBuffer(); 2598 mMap.put(index, buffer); 2599 } 2600 buffer.setByteBuffer(newBuffer); 2601 } 2602 put(int index, @Nullable Image newImage)2603 public void put(int index, @Nullable Image newImage) { 2604 CodecBuffer buffer = mMap.get(index); 2605 if (buffer == null) { // likely 2606 buffer = new CodecBuffer(); 2607 mMap.put(index, buffer); 2608 } 2609 buffer.setImage(newImage); 2610 } 2611 clear()2612 public void clear() { 2613 for (CodecBuffer buffer: mMap.values()) { 2614 buffer.free(); 2615 } 2616 mMap.clear(); 2617 } 2618 } 2619 2620 private ByteBuffer[] mCachedInputBuffers; 2621 private ByteBuffer[] mCachedOutputBuffers; 2622 private final BufferMap mDequeuedInputBuffers = new BufferMap(); 2623 private final BufferMap mDequeuedOutputBuffers = new BufferMap(); 2624 private final Map<Integer, BufferInfo> mDequeuedOutputInfos = 2625 new HashMap<Integer, BufferInfo>(); 2626 final private Object mBufferLock; 2627 invalidateByteBuffer( @ullable ByteBuffer[] buffers, int index)2628 private final void invalidateByteBuffer( 2629 @Nullable ByteBuffer[] buffers, int index) { 2630 if (buffers != null && index >= 0 && index < buffers.length) { 2631 ByteBuffer buffer = buffers[index]; 2632 if (buffer != null) { 2633 buffer.setAccessible(false); 2634 } 2635 } 2636 } 2637 validateInputByteBuffer( @ullable ByteBuffer[] buffers, int index)2638 private final void validateInputByteBuffer( 2639 @Nullable ByteBuffer[] buffers, int index) { 2640 if (buffers != null && index >= 0 && index < buffers.length) { 2641 ByteBuffer buffer = buffers[index]; 2642 if (buffer != null) { 2643 buffer.setAccessible(true); 2644 buffer.clear(); 2645 } 2646 } 2647 } 2648 revalidateByteBuffer( @ullable ByteBuffer[] buffers, int index)2649 private final void revalidateByteBuffer( 2650 @Nullable ByteBuffer[] buffers, int index) { 2651 synchronized(mBufferLock) { 2652 if (buffers != null && index >= 0 && index < buffers.length) { 2653 ByteBuffer buffer = buffers[index]; 2654 if (buffer != null) { 2655 buffer.setAccessible(true); 2656 } 2657 } 2658 } 2659 } 2660 validateOutputByteBuffer( @ullable ByteBuffer[] buffers, int index, @NonNull BufferInfo info)2661 private final void validateOutputByteBuffer( 2662 @Nullable ByteBuffer[] buffers, int index, @NonNull BufferInfo info) { 2663 if (buffers != null && index >= 0 && index < buffers.length) { 2664 ByteBuffer buffer = buffers[index]; 2665 if (buffer != null) { 2666 buffer.setAccessible(true); 2667 buffer.limit(info.offset + info.size).position(info.offset); 2668 } 2669 } 2670 } 2671 invalidateByteBuffers(@ullable ByteBuffer[] buffers)2672 private final void invalidateByteBuffers(@Nullable ByteBuffer[] buffers) { 2673 if (buffers != null) { 2674 for (ByteBuffer buffer: buffers) { 2675 if (buffer != null) { 2676 buffer.setAccessible(false); 2677 } 2678 } 2679 } 2680 } 2681 freeByteBuffer(@ullable ByteBuffer buffer)2682 private final void freeByteBuffer(@Nullable ByteBuffer buffer) { 2683 if (buffer != null /* && buffer.isDirect() */) { 2684 // all of our ByteBuffers are direct 2685 java.nio.NioUtils.freeDirectBuffer(buffer); 2686 } 2687 } 2688 freeByteBuffers(@ullable ByteBuffer[] buffers)2689 private final void freeByteBuffers(@Nullable ByteBuffer[] buffers) { 2690 if (buffers != null) { 2691 for (ByteBuffer buffer: buffers) { 2692 freeByteBuffer(buffer); 2693 } 2694 } 2695 } 2696 freeAllTrackedBuffers()2697 private final void freeAllTrackedBuffers() { 2698 synchronized(mBufferLock) { 2699 freeByteBuffers(mCachedInputBuffers); 2700 freeByteBuffers(mCachedOutputBuffers); 2701 mCachedInputBuffers = null; 2702 mCachedOutputBuffers = null; 2703 mDequeuedInputBuffers.clear(); 2704 mDequeuedOutputBuffers.clear(); 2705 } 2706 } 2707 cacheBuffers(boolean input)2708 private final void cacheBuffers(boolean input) { 2709 ByteBuffer[] buffers = null; 2710 try { 2711 buffers = getBuffers(input); 2712 invalidateByteBuffers(buffers); 2713 } catch (IllegalStateException e) { 2714 // we don't get buffers in async mode 2715 } 2716 if (input) { 2717 mCachedInputBuffers = buffers; 2718 } else { 2719 mCachedOutputBuffers = buffers; 2720 } 2721 } 2722 2723 /** 2724 * Retrieve the set of input buffers. Call this after start() 2725 * returns. After calling this method, any ByteBuffers 2726 * previously returned by an earlier call to this method MUST no 2727 * longer be used. 2728 * 2729 * @deprecated Use the new {@link #getInputBuffer} method instead 2730 * each time an input buffer is dequeued. 2731 * 2732 * <b>Note:</b> As of API 21, dequeued input buffers are 2733 * automatically {@link java.nio.Buffer#clear cleared}. 2734 * 2735 * <em>Do not use this method if using an input surface.</em> 2736 * 2737 * @throws IllegalStateException if not in the Executing state, 2738 * or codec is configured in asynchronous mode. 2739 * @throws MediaCodec.CodecException upon codec error. 2740 */ 2741 @NonNull getInputBuffers()2742 public ByteBuffer[] getInputBuffers() { 2743 if (mCachedInputBuffers == null) { 2744 throw new IllegalStateException(); 2745 } 2746 // FIXME: check codec status 2747 return mCachedInputBuffers; 2748 } 2749 2750 /** 2751 * Retrieve the set of output buffers. Call this after start() 2752 * returns and whenever dequeueOutputBuffer signals an output 2753 * buffer change by returning {@link 2754 * #INFO_OUTPUT_BUFFERS_CHANGED}. After calling this method, any 2755 * ByteBuffers previously returned by an earlier call to this 2756 * method MUST no longer be used. 2757 * 2758 * @deprecated Use the new {@link #getOutputBuffer} method instead 2759 * each time an output buffer is dequeued. This method is not 2760 * supported if codec is configured in asynchronous mode. 2761 * 2762 * <b>Note:</b> As of API 21, the position and limit of output 2763 * buffers that are dequeued will be set to the valid data 2764 * range. 2765 * 2766 * <em>Do not use this method if using an output surface.</em> 2767 * 2768 * @throws IllegalStateException if not in the Executing state, 2769 * or codec is configured in asynchronous mode. 2770 * @throws MediaCodec.CodecException upon codec error. 2771 */ 2772 @NonNull getOutputBuffers()2773 public ByteBuffer[] getOutputBuffers() { 2774 if (mCachedOutputBuffers == null) { 2775 throw new IllegalStateException(); 2776 } 2777 // FIXME: check codec status 2778 return mCachedOutputBuffers; 2779 } 2780 2781 /** 2782 * Returns a {@link java.nio.Buffer#clear cleared}, writable ByteBuffer 2783 * object for a dequeued input buffer index to contain the input data. 2784 * 2785 * After calling this method any ByteBuffer or Image object 2786 * previously returned for the same input index MUST no longer 2787 * be used. 2788 * 2789 * @param index The index of a client-owned input buffer previously 2790 * returned from a call to {@link #dequeueInputBuffer}, 2791 * or received via an onInputBufferAvailable callback. 2792 * 2793 * @return the input buffer, or null if the index is not a dequeued 2794 * input buffer, or if the codec is configured for surface input. 2795 * 2796 * @throws IllegalStateException if not in the Executing state. 2797 * @throws MediaCodec.CodecException upon codec error. 2798 */ 2799 @Nullable getInputBuffer(int index)2800 public ByteBuffer getInputBuffer(int index) { 2801 ByteBuffer newBuffer = getBuffer(true /* input */, index); 2802 synchronized(mBufferLock) { 2803 invalidateByteBuffer(mCachedInputBuffers, index); 2804 mDequeuedInputBuffers.put(index, newBuffer); 2805 } 2806 return newBuffer; 2807 } 2808 2809 /** 2810 * Returns a writable Image object for a dequeued input buffer 2811 * index to contain the raw input video frame. 2812 * 2813 * After calling this method any ByteBuffer or Image object 2814 * previously returned for the same input index MUST no longer 2815 * be used. 2816 * 2817 * @param index The index of a client-owned input buffer previously 2818 * returned from a call to {@link #dequeueInputBuffer}, 2819 * or received via an onInputBufferAvailable callback. 2820 * 2821 * @return the input image, or null if the index is not a 2822 * dequeued input buffer, or not a ByteBuffer that contains a 2823 * raw image. 2824 * 2825 * @throws IllegalStateException if not in the Executing state. 2826 * @throws MediaCodec.CodecException upon codec error. 2827 */ 2828 @Nullable getInputImage(int index)2829 public Image getInputImage(int index) { 2830 Image newImage = getImage(true /* input */, index); 2831 synchronized(mBufferLock) { 2832 invalidateByteBuffer(mCachedInputBuffers, index); 2833 mDequeuedInputBuffers.put(index, newImage); 2834 } 2835 return newImage; 2836 } 2837 2838 /** 2839 * Returns a read-only ByteBuffer for a dequeued output buffer 2840 * index. The position and limit of the returned buffer are set 2841 * to the valid output data. 2842 * 2843 * After calling this method, any ByteBuffer or Image object 2844 * previously returned for the same output index MUST no longer 2845 * be used. 2846 * 2847 * @param index The index of a client-owned output buffer previously 2848 * returned from a call to {@link #dequeueOutputBuffer}, 2849 * or received via an onOutputBufferAvailable callback. 2850 * 2851 * @return the output buffer, or null if the index is not a dequeued 2852 * output buffer, or the codec is configured with an output surface. 2853 * 2854 * @throws IllegalStateException if not in the Executing state. 2855 * @throws MediaCodec.CodecException upon codec error. 2856 */ 2857 @Nullable getOutputBuffer(int index)2858 public ByteBuffer getOutputBuffer(int index) { 2859 ByteBuffer newBuffer = getBuffer(false /* input */, index); 2860 synchronized(mBufferLock) { 2861 invalidateByteBuffer(mCachedOutputBuffers, index); 2862 mDequeuedOutputBuffers.put(index, newBuffer); 2863 } 2864 return newBuffer; 2865 } 2866 2867 /** 2868 * Returns a read-only Image object for a dequeued output buffer 2869 * index that contains the raw video frame. 2870 * 2871 * After calling this method, any ByteBuffer or Image object previously 2872 * returned for the same output index MUST no longer be used. 2873 * 2874 * @param index The index of a client-owned output buffer previously 2875 * returned from a call to {@link #dequeueOutputBuffer}, 2876 * or received via an onOutputBufferAvailable callback. 2877 * 2878 * @return the output image, or null if the index is not a 2879 * dequeued output buffer, not a raw video frame, or if the codec 2880 * was configured with an output surface. 2881 * 2882 * @throws IllegalStateException if not in the Executing state. 2883 * @throws MediaCodec.CodecException upon codec error. 2884 */ 2885 @Nullable getOutputImage(int index)2886 public Image getOutputImage(int index) { 2887 Image newImage = getImage(false /* input */, index); 2888 synchronized(mBufferLock) { 2889 invalidateByteBuffer(mCachedOutputBuffers, index); 2890 mDequeuedOutputBuffers.put(index, newImage); 2891 } 2892 return newImage; 2893 } 2894 2895 /** 2896 * The content is scaled to the surface dimensions 2897 */ 2898 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1; 2899 2900 /** 2901 * The content is scaled, maintaining its aspect ratio, the whole 2902 * surface area is used, content may be cropped 2903 */ 2904 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2; 2905 2906 /** @hide */ 2907 @IntDef({ 2908 VIDEO_SCALING_MODE_SCALE_TO_FIT, 2909 VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING, 2910 }) 2911 @Retention(RetentionPolicy.SOURCE) 2912 public @interface VideoScalingMode {} 2913 2914 /** 2915 * If a surface has been specified in a previous call to {@link #configure} 2916 * specifies the scaling mode to use. The default is "scale to fit". 2917 * @throws IllegalArgumentException if mode is not recognized. 2918 * @throws IllegalStateException if in the Released state. 2919 */ setVideoScalingMode(@ideoScalingMode int mode)2920 public native final void setVideoScalingMode(@VideoScalingMode int mode); 2921 2922 /** 2923 * Get the component name. If the codec was created by createDecoderByType 2924 * or createEncoderByType, what component is chosen is not known beforehand. 2925 * @throws IllegalStateException if in the Released state. 2926 */ 2927 @NonNull getName()2928 public native final String getName(); 2929 2930 /** 2931 * Change a video encoder's target bitrate on the fly. The value is an 2932 * Integer object containing the new bitrate in bps. 2933 */ 2934 public static final String PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate"; 2935 2936 /** 2937 * Temporarily suspend/resume encoding of input data. While suspended 2938 * input data is effectively discarded instead of being fed into the 2939 * encoder. This parameter really only makes sense to use with an encoder 2940 * in "surface-input" mode, as the client code has no control over the 2941 * input-side of the encoder in that case. 2942 * The value is an Integer object containing the value 1 to suspend 2943 * or the value 0 to resume. 2944 */ 2945 public static final String PARAMETER_KEY_SUSPEND = "drop-input-frames"; 2946 2947 /** 2948 * Request that the encoder produce a sync frame "soon". 2949 * Provide an Integer with the value 0. 2950 */ 2951 public static final String PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync"; 2952 2953 /** 2954 * Communicate additional parameter changes to the component instance. 2955 * <b>Note:</b> Some of these parameter changes may silently fail to apply. 2956 * 2957 * @param params The bundle of parameters to set. 2958 * @throws IllegalStateException if in the Released state. 2959 */ setParameters(@ullable Bundle params)2960 public final void setParameters(@Nullable Bundle params) { 2961 if (params == null) { 2962 return; 2963 } 2964 2965 String[] keys = new String[params.size()]; 2966 Object[] values = new Object[params.size()]; 2967 2968 int i = 0; 2969 for (final String key: params.keySet()) { 2970 keys[i] = key; 2971 values[i] = params.get(key); 2972 ++i; 2973 } 2974 2975 setParameters(keys, values); 2976 } 2977 2978 /** 2979 * Sets an asynchronous callback for actionable MediaCodec events. 2980 * 2981 * If the client intends to use the component in asynchronous mode, 2982 * a valid callback should be provided before {@link #configure} is called. 2983 * 2984 * When asynchronous callback is enabled, the client should not call 2985 * {@link #getInputBuffers}, {@link #getOutputBuffers}, 2986 * {@link #dequeueInputBuffer(long)} or {@link #dequeueOutputBuffer(BufferInfo, long)}. 2987 * <p> 2988 * Also, {@link #flush} behaves differently in asynchronous mode. After calling 2989 * {@code flush}, you must call {@link #start} to "resume" receiving input buffers, 2990 * even if an input surface was created. 2991 * 2992 * @param cb The callback that will run. Use {@code null} to clear a previously 2993 * set callback (before {@link #configure configure} is called and run 2994 * in synchronous mode). 2995 * @param handler Callbacks will happen on the handler's thread. If {@code null}, 2996 * callbacks are done on the default thread (the caller's thread or the 2997 * main thread.) 2998 */ setCallback(@ullable Callback cb, @Nullable Handler handler)2999 public void setCallback(@Nullable /* MediaCodec. */ Callback cb, @Nullable Handler handler) { 3000 if (cb != null) { 3001 synchronized (mListenerLock) { 3002 EventHandler newHandler = getEventHandlerOn(handler, mCallbackHandler); 3003 // NOTE: there are no callbacks on the handler at this time, but check anyways 3004 // even if we were to extend this to be callable dynamically, it must 3005 // be called when codec is flushed, so no messages are pending. 3006 if (newHandler != mCallbackHandler) { 3007 mCallbackHandler.removeMessages(EVENT_SET_CALLBACK); 3008 mCallbackHandler.removeMessages(EVENT_CALLBACK); 3009 mCallbackHandler = newHandler; 3010 } 3011 } 3012 } else if (mCallbackHandler != null) { 3013 mCallbackHandler.removeMessages(EVENT_SET_CALLBACK); 3014 mCallbackHandler.removeMessages(EVENT_CALLBACK); 3015 } 3016 3017 if (mCallbackHandler != null) { 3018 // set java callback on main handler 3019 Message msg = mCallbackHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb); 3020 mCallbackHandler.sendMessage(msg); 3021 3022 // set native handler here, don't post to handler because 3023 // it may cause the callback to be delayed and set in a wrong state. 3024 // Note that native codec may start sending events to the callback 3025 // handler after this returns. 3026 native_setCallback(cb); 3027 } 3028 } 3029 3030 /** 3031 * Sets an asynchronous callback for actionable MediaCodec events on the default 3032 * looper. 3033 * <p> 3034 * Same as {@link #setCallback(Callback, Handler)} with handler set to null. 3035 * @param cb The callback that will run. Use {@code null} to clear a previously 3036 * set callback (before {@link #configure configure} is called and run 3037 * in synchronous mode). 3038 * @see #setCallback(Callback, Handler) 3039 */ setCallback(@ullable Callback cb)3040 public void setCallback(@Nullable /* MediaCodec. */ Callback cb) { 3041 setCallback(cb, null /* handler */); 3042 } 3043 3044 /** 3045 * Listener to be called when an output frame has rendered on the output surface 3046 * 3047 * @see MediaCodec#setOnFrameRenderedListener 3048 */ 3049 public interface OnFrameRenderedListener { 3050 3051 /** 3052 * Called when an output frame has rendered on the output surface. 3053 * <p> 3054 * <strong>Note:</strong> This callback is for informational purposes only: to get precise 3055 * render timing samples, and can be significantly delayed and batched. Some frames may have 3056 * been rendered even if there was no callback generated. 3057 * 3058 * @param codec the MediaCodec instance 3059 * @param presentationTimeUs the presentation time (media time) of the frame rendered. 3060 * This is usually the same as specified in {@link #queueInputBuffer}; however, 3061 * some codecs may alter the media time by applying some time-based transformation, 3062 * such as frame rate conversion. In that case, presentation time corresponds 3063 * to the actual output frame rendered. 3064 * @param nanoTime The system time when the frame was rendered. 3065 * 3066 * @see System#nanoTime 3067 */ onFrameRendered( @onNull MediaCodec codec, long presentationTimeUs, long nanoTime)3068 public void onFrameRendered( 3069 @NonNull MediaCodec codec, long presentationTimeUs, long nanoTime); 3070 } 3071 3072 /** 3073 * Registers a callback to be invoked when an output frame is rendered on the output surface. 3074 * <p> 3075 * This method can be called in any codec state, but will only have an effect in the 3076 * Executing state for codecs that render buffers to the output surface. 3077 * <p> 3078 * <strong>Note:</strong> This callback is for informational purposes only: to get precise 3079 * render timing samples, and can be significantly delayed and batched. Some frames may have 3080 * been rendered even if there was no callback generated. 3081 * 3082 * @param listener the callback that will be run 3083 * @param handler the callback will be run on the handler's thread. If {@code null}, 3084 * the callback will be run on the default thread, which is the looper 3085 * from which the codec was created, or a new thread if there was none. 3086 */ setOnFrameRenderedListener( @ullable OnFrameRenderedListener listener, @Nullable Handler handler)3087 public void setOnFrameRenderedListener( 3088 @Nullable OnFrameRenderedListener listener, @Nullable Handler handler) { 3089 synchronized (mListenerLock) { 3090 mOnFrameRenderedListener = listener; 3091 if (listener != null) { 3092 EventHandler newHandler = getEventHandlerOn(handler, mOnFrameRenderedHandler); 3093 if (newHandler != mOnFrameRenderedHandler) { 3094 mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED); 3095 } 3096 mOnFrameRenderedHandler = newHandler; 3097 } else if (mOnFrameRenderedHandler != null) { 3098 mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED); 3099 } 3100 native_enableOnFrameRenderedListener(listener != null); 3101 } 3102 } 3103 native_enableOnFrameRenderedListener(boolean enable)3104 private native void native_enableOnFrameRenderedListener(boolean enable); 3105 getEventHandlerOn( @ullable Handler handler, @NonNull EventHandler lastHandler)3106 private EventHandler getEventHandlerOn( 3107 @Nullable Handler handler, @NonNull EventHandler lastHandler) { 3108 if (handler == null) { 3109 return mEventHandler; 3110 } else { 3111 Looper looper = handler.getLooper(); 3112 if (lastHandler.getLooper() == looper) { 3113 return lastHandler; 3114 } else { 3115 return new EventHandler(this, looper); 3116 } 3117 } 3118 } 3119 3120 /** 3121 * MediaCodec callback interface. Used to notify the user asynchronously 3122 * of various MediaCodec events. 3123 */ 3124 public static abstract class Callback { 3125 /** 3126 * Called when an input buffer becomes available. 3127 * 3128 * @param codec The MediaCodec object. 3129 * @param index The index of the available input buffer. 3130 */ onInputBufferAvailable(@onNull MediaCodec codec, int index)3131 public abstract void onInputBufferAvailable(@NonNull MediaCodec codec, int index); 3132 3133 /** 3134 * Called when an output buffer becomes available. 3135 * 3136 * @param codec The MediaCodec object. 3137 * @param index The index of the available output buffer. 3138 * @param info Info regarding the available output buffer {@link MediaCodec.BufferInfo}. 3139 */ onOutputBufferAvailable( @onNull MediaCodec codec, int index, @NonNull BufferInfo info)3140 public abstract void onOutputBufferAvailable( 3141 @NonNull MediaCodec codec, int index, @NonNull BufferInfo info); 3142 3143 /** 3144 * Called when the MediaCodec encountered an error 3145 * 3146 * @param codec The MediaCodec object. 3147 * @param e The {@link MediaCodec.CodecException} object describing the error. 3148 */ onError(@onNull MediaCodec codec, @NonNull CodecException e)3149 public abstract void onError(@NonNull MediaCodec codec, @NonNull CodecException e); 3150 3151 /** 3152 * Called when the output format has changed 3153 * 3154 * @param codec The MediaCodec object. 3155 * @param format The new output format. 3156 */ onOutputFormatChanged( @onNull MediaCodec codec, @NonNull MediaFormat format)3157 public abstract void onOutputFormatChanged( 3158 @NonNull MediaCodec codec, @NonNull MediaFormat format); 3159 } 3160 postEventFromNative( int what, int arg1, int arg2, @Nullable Object obj)3161 private void postEventFromNative( 3162 int what, int arg1, int arg2, @Nullable Object obj) { 3163 synchronized (mListenerLock) { 3164 EventHandler handler = mEventHandler; 3165 if (what == EVENT_CALLBACK) { 3166 handler = mCallbackHandler; 3167 } else if (what == EVENT_FRAME_RENDERED) { 3168 handler = mOnFrameRenderedHandler; 3169 } 3170 if (handler != null) { 3171 Message msg = handler.obtainMessage(what, arg1, arg2, obj); 3172 handler.sendMessage(msg); 3173 } 3174 } 3175 } 3176 setParameters(@onNull String[] keys, @NonNull Object[] values)3177 private native final void setParameters(@NonNull String[] keys, @NonNull Object[] values); 3178 3179 /** 3180 * Get the codec info. If the codec was created by createDecoderByType 3181 * or createEncoderByType, what component is chosen is not known beforehand, 3182 * and thus the caller does not have the MediaCodecInfo. 3183 * @throws IllegalStateException if in the Released state. 3184 */ 3185 @NonNull getCodecInfo()3186 public MediaCodecInfo getCodecInfo() { 3187 return MediaCodecList.getInfoFor(getName()); 3188 } 3189 3190 @NonNull getBuffers(boolean input)3191 private native final ByteBuffer[] getBuffers(boolean input); 3192 3193 @Nullable getBuffer(boolean input, int index)3194 private native final ByteBuffer getBuffer(boolean input, int index); 3195 3196 @Nullable getImage(boolean input, int index)3197 private native final Image getImage(boolean input, int index); 3198 native_init()3199 private static native final void native_init(); 3200 native_setup( @onNull String name, boolean nameIsType, boolean encoder)3201 private native final void native_setup( 3202 @NonNull String name, boolean nameIsType, boolean encoder); 3203 native_finalize()3204 private native final void native_finalize(); 3205 3206 static { 3207 System.loadLibrary("media_jni"); native_init()3208 native_init(); 3209 } 3210 3211 private long mNativeContext; 3212 3213 /** @hide */ 3214 public static class MediaImage extends Image { 3215 private final boolean mIsReadOnly; 3216 private final int mWidth; 3217 private final int mHeight; 3218 private final int mFormat; 3219 private long mTimestamp; 3220 private final Plane[] mPlanes; 3221 private final ByteBuffer mBuffer; 3222 private final ByteBuffer mInfo; 3223 private final int mXOffset; 3224 private final int mYOffset; 3225 3226 private final static int TYPE_YUV = 1; 3227 3228 @Override getFormat()3229 public int getFormat() { 3230 throwISEIfImageIsInvalid(); 3231 return mFormat; 3232 } 3233 3234 @Override getHeight()3235 public int getHeight() { 3236 throwISEIfImageIsInvalid(); 3237 return mHeight; 3238 } 3239 3240 @Override getWidth()3241 public int getWidth() { 3242 throwISEIfImageIsInvalid(); 3243 return mWidth; 3244 } 3245 3246 @Override getTimestamp()3247 public long getTimestamp() { 3248 throwISEIfImageIsInvalid(); 3249 return mTimestamp; 3250 } 3251 3252 @Override 3253 @NonNull getPlanes()3254 public Plane[] getPlanes() { 3255 throwISEIfImageIsInvalid(); 3256 return Arrays.copyOf(mPlanes, mPlanes.length); 3257 } 3258 3259 @Override close()3260 public void close() { 3261 if (mIsImageValid) { 3262 java.nio.NioUtils.freeDirectBuffer(mBuffer); 3263 mIsImageValid = false; 3264 } 3265 } 3266 3267 /** 3268 * Set the crop rectangle associated with this frame. 3269 * <p> 3270 * The crop rectangle specifies the region of valid pixels in the image, 3271 * using coordinates in the largest-resolution plane. 3272 */ 3273 @Override setCropRect(@ullable Rect cropRect)3274 public void setCropRect(@Nullable Rect cropRect) { 3275 if (mIsReadOnly) { 3276 throw new ReadOnlyBufferException(); 3277 } 3278 super.setCropRect(cropRect); 3279 } 3280 3281 readInt(@onNull ByteBuffer buffer, boolean asLong)3282 private int readInt(@NonNull ByteBuffer buffer, boolean asLong) { 3283 if (asLong) { 3284 return (int)buffer.getLong(); 3285 } else { 3286 return buffer.getInt(); 3287 } 3288 } 3289 MediaImage( @onNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly, long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect)3290 public MediaImage( 3291 @NonNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly, 3292 long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect) { 3293 mFormat = ImageFormat.YUV_420_888; 3294 mTimestamp = timestamp; 3295 mIsImageValid = true; 3296 mIsReadOnly = buffer.isReadOnly(); 3297 mBuffer = buffer.duplicate(); 3298 3299 // save offsets and info 3300 mXOffset = xOffset; 3301 mYOffset = yOffset; 3302 mInfo = info; 3303 3304 // read media-info. the size of media info can be 80 or 156/160 depending on 3305 // whether it was created on a 32- or 64-bit process. See MediaImage 3306 if (info.remaining() == 80 || info.remaining() == 156 || info.remaining() == 160) { 3307 boolean sizeIsLong = info.remaining() != 80; 3308 int type = readInt(info, info.remaining() == 160); 3309 if (type != TYPE_YUV) { 3310 throw new UnsupportedOperationException("unsupported type: " + type); 3311 } 3312 int numPlanes = readInt(info, sizeIsLong); 3313 if (numPlanes != 3) { 3314 throw new RuntimeException("unexpected number of planes: " + numPlanes); 3315 } 3316 mWidth = readInt(info, sizeIsLong); 3317 mHeight = readInt(info, sizeIsLong); 3318 if (mWidth < 1 || mHeight < 1) { 3319 throw new UnsupportedOperationException( 3320 "unsupported size: " + mWidth + "x" + mHeight); 3321 } 3322 int bitDepth = readInt(info, sizeIsLong); 3323 if (bitDepth != 8) { 3324 throw new UnsupportedOperationException("unsupported bit depth: " + bitDepth); 3325 } 3326 mPlanes = new MediaPlane[numPlanes]; 3327 for (int ix = 0; ix < numPlanes; ix++) { 3328 int planeOffset = readInt(info, sizeIsLong); 3329 int colInc = readInt(info, sizeIsLong); 3330 int rowInc = readInt(info, sizeIsLong); 3331 int horiz = readInt(info, sizeIsLong); 3332 int vert = readInt(info, sizeIsLong); 3333 if (horiz != vert || horiz != (ix == 0 ? 1 : 2)) { 3334 throw new UnsupportedOperationException("unexpected subsampling: " 3335 + horiz + "x" + vert + " on plane " + ix); 3336 } 3337 3338 buffer.clear(); 3339 buffer.position(mBuffer.position() + planeOffset 3340 + (xOffset / horiz) * colInc + (yOffset / vert) * rowInc); 3341 buffer.limit(buffer.position() + Utils.divUp(bitDepth, 8) 3342 + (mHeight / vert - 1) * rowInc + (mWidth / horiz - 1) * colInc); 3343 mPlanes[ix] = new MediaPlane(buffer.slice(), rowInc, colInc); 3344 } 3345 } else { 3346 throw new UnsupportedOperationException( 3347 "unsupported info length: " + info.remaining()); 3348 } 3349 3350 if (cropRect == null) { 3351 cropRect = new Rect(0, 0, mWidth, mHeight); 3352 } 3353 cropRect.offset(-xOffset, -yOffset); 3354 super.setCropRect(cropRect); 3355 } 3356 3357 private class MediaPlane extends Plane { MediaPlane(@onNull ByteBuffer buffer, int rowInc, int colInc)3358 public MediaPlane(@NonNull ByteBuffer buffer, int rowInc, int colInc) { 3359 mData = buffer; 3360 mRowInc = rowInc; 3361 mColInc = colInc; 3362 } 3363 3364 @Override getRowStride()3365 public int getRowStride() { 3366 throwISEIfImageIsInvalid(); 3367 return mRowInc; 3368 } 3369 3370 @Override getPixelStride()3371 public int getPixelStride() { 3372 throwISEIfImageIsInvalid(); 3373 return mColInc; 3374 } 3375 3376 @Override 3377 @NonNull getBuffer()3378 public ByteBuffer getBuffer() { 3379 throwISEIfImageIsInvalid(); 3380 return mData; 3381 } 3382 3383 private final int mRowInc; 3384 private final int mColInc; 3385 private final ByteBuffer mData; 3386 } 3387 } 3388 } 3389