1 /*
2  *  Copyright 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc;
12 
13 import android.annotation.TargetApi;
14 import android.media.MediaCodec;
15 import android.media.MediaCodecInfo;
16 import android.media.MediaFormat;
17 import android.opengl.GLES20;
18 import android.os.Bundle;
19 import android.support.annotation.Nullable;
20 import android.view.Surface;
21 import java.io.IOException;
22 import java.nio.ByteBuffer;
23 import java.util.Map;
24 import java.util.concurrent.BlockingDeque;
25 import java.util.concurrent.LinkedBlockingDeque;
26 import java.util.concurrent.TimeUnit;
27 import org.webrtc.ThreadUtils.ThreadChecker;
28 
29 /**
30  * Android hardware video encoder.
31  *
32  * @note This class is only supported on Android Kitkat and above.
33  */
34 @TargetApi(19)
35 @SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
36 class HardwareVideoEncoder implements VideoEncoder {
37   private static final String TAG = "HardwareVideoEncoder";
38 
39   // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
40   // in OMX_Video.h
41   private static final int VIDEO_ControlRateConstant = 2;
42   // Key associated with the bitrate control mode value (above). Not present as a MediaFormat
43   // constant until API level 21.
44   private static final String KEY_BITRATE_MODE = "bitrate-mode";
45 
46   private static final int VIDEO_AVC_PROFILE_HIGH = 8;
47   private static final int VIDEO_AVC_LEVEL_3 = 0x100;
48 
49   private static final int MAX_VIDEO_FRAMERATE = 30;
50 
51   // See MAX_ENCODER_Q_SIZE in androidmediaencoder.cc.
52   private static final int MAX_ENCODER_Q_SIZE = 2;
53 
54   private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
55   private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
56 
57   /**
58    * Keeps track of the number of output buffers that have been passed down the pipeline and not yet
59    * released. We need to wait for this to go down to zero before operations invalidating the output
60    * buffers, i.e., stop() and getOutputBuffers().
61    */
62   private static class BusyCount {
63     private final Object countLock = new Object();
64     private int count;
65 
increment()66     public void increment() {
67       synchronized (countLock) {
68         count++;
69       }
70     }
71 
72     // This method may be called on an arbitrary thread.
decrement()73     public void decrement() {
74       synchronized (countLock) {
75         count--;
76         if (count == 0) {
77           countLock.notifyAll();
78         }
79       }
80     }
81 
82     // The increment and waitForZero methods are called on the same thread (deliverEncodedImage,
83     // running on the output thread). Hence, after waitForZero returns, the count will stay zero
84     // until the same thread calls increment.
waitForZero()85     public void waitForZero() {
86       boolean wasInterrupted = false;
87       synchronized (countLock) {
88         while (count > 0) {
89           try {
90             countLock.wait();
91           } catch (InterruptedException e) {
92             Logging.e(TAG, "Interrupted while waiting on busy count", e);
93             wasInterrupted = true;
94           }
95         }
96       }
97 
98       if (wasInterrupted) {
99         Thread.currentThread().interrupt();
100       }
101     }
102   }
103   // --- Initialized on construction.
104   private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
105   private final String codecName;
106   private final VideoCodecMimeType codecType;
107   private final Integer surfaceColorFormat;
108   private final Integer yuvColorFormat;
109   private final YuvFormat yuvFormat;
110   private final Map<String, String> params;
111   private final int keyFrameIntervalSec; // Base interval for generating key frames.
112   // Interval at which to force a key frame. Used to reduce color distortions caused by some
113   // Qualcomm video encoders.
114   private final long forcedKeyFrameNs;
115   private final BitrateAdjuster bitrateAdjuster;
116   // EGL context shared with the application.  Used to access texture inputs.
117   private final EglBase14.Context sharedContext;
118 
119   // Drawer used to draw input textures onto the codec's input surface.
120   private final GlRectDrawer textureDrawer = new GlRectDrawer();
121   private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
122   // A queue of EncodedImage.Builders that correspond to frames in the codec.  These builders are
123   // pre-populated with all the information that can't be sent through MediaCodec.
124   private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
125 
126   private final ThreadChecker encodeThreadChecker = new ThreadChecker();
127   private final ThreadChecker outputThreadChecker = new ThreadChecker();
128   private final BusyCount outputBuffersBusyCount = new BusyCount();
129 
130   // --- Set on initialize and immutable until release.
131   private Callback callback;
132   private boolean automaticResizeOn;
133 
134   // --- Valid and immutable while an encoding session is running.
135   @Nullable private MediaCodecWrapper codec;
136   @Nullable private ByteBuffer[] outputBuffers;
137   // Thread that delivers encoded frames to the user callback.
138   @Nullable private Thread outputThread;
139 
140   // EGL base wrapping the shared texture context.  Holds hooks to both the shared context and the
141   // input surface.  Making this base current allows textures from the context to be drawn onto the
142   // surface.
143   @Nullable private EglBase14 textureEglBase;
144   // Input surface for the codec.  The encoder will draw input textures onto this surface.
145   @Nullable private Surface textureInputSurface;
146 
147   private int width;
148   private int height;
149   private boolean useSurfaceMode;
150 
151   // --- Only accessed from the encoding thread.
152   // Presentation timestamp of the last requested (or forced) key frame.
153   private long lastKeyFrameNs;
154 
155   // --- Only accessed on the output thread.
156   // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
157   @Nullable private ByteBuffer configBuffer;
158   private int adjustedBitrate;
159 
160   // Whether the encoder is running.  Volatile so that the output thread can watch this value and
161   // exit when the encoder stops.
162   private volatile boolean running;
163   // Any exception thrown during shutdown.  The output thread releases the MediaCodec and uses this
164   // value to send exceptions thrown during release back to the encoder thread.
165   @Nullable private volatile Exception shutdownException;
166 
167   /**
168    * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
169    * intervals, and bitrateAdjuster.
170    *
171    * @param codecName the hardware codec implementation to use
172    * @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
173    * @param surfaceColorFormat color format for surface mode or null if not available
174    * @param yuvColorFormat color format for bytebuffer mode
175    * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
176    * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
177    *     used to reduce distortion caused by some codec implementations
178    * @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
179    *     desired bitrates
180    * @throws IllegalArgumentException if colorFormat is unsupported
181    */
HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext)182   public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
183       VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
184       Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
185       BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
186     this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
187     this.codecName = codecName;
188     this.codecType = codecType;
189     this.surfaceColorFormat = surfaceColorFormat;
190     this.yuvColorFormat = yuvColorFormat;
191     this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
192     this.params = params;
193     this.keyFrameIntervalSec = keyFrameIntervalSec;
194     this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
195     this.bitrateAdjuster = bitrateAdjuster;
196     this.sharedContext = sharedContext;
197 
198     // Allow construction on a different thread.
199     encodeThreadChecker.detachThread();
200   }
201 
202   @Override
initEncode(Settings settings, Callback callback)203   public VideoCodecStatus initEncode(Settings settings, Callback callback) {
204     encodeThreadChecker.checkIsOnValidThread();
205 
206     this.callback = callback;
207     automaticResizeOn = settings.automaticResizeOn;
208     this.width = settings.width;
209     this.height = settings.height;
210     useSurfaceMode = canUseSurface();
211 
212     if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
213       bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
214     }
215     adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
216 
217     Logging.d(TAG,
218         "initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
219             + "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
220     return initEncodeInternal();
221   }
222 
initEncodeInternal()223   private VideoCodecStatus initEncodeInternal() {
224     encodeThreadChecker.checkIsOnValidThread();
225 
226     lastKeyFrameNs = -1;
227 
228     try {
229       codec = mediaCodecWrapperFactory.createByCodecName(codecName);
230     } catch (IOException | IllegalArgumentException e) {
231       Logging.e(TAG, "Cannot create media encoder " + codecName);
232       return VideoCodecStatus.FALLBACK_SOFTWARE;
233     }
234 
235     final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
236     try {
237       MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
238       format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
239       format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
240       format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
241       format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getCodecConfigFramerate());
242       format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
243       if (codecType == VideoCodecMimeType.H264) {
244         String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
245         if (profileLevelId == null) {
246           profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
247         }
248         switch (profileLevelId) {
249           case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
250             format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
251             format.setInteger("level", VIDEO_AVC_LEVEL_3);
252             break;
253           case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
254             break;
255           default:
256             Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
257         }
258       }
259       Logging.d(TAG, "Format: " + format);
260       codec.configure(
261           format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
262 
263       if (useSurfaceMode) {
264         textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE);
265         textureInputSurface = codec.createInputSurface();
266         textureEglBase.createSurface(textureInputSurface);
267         textureEglBase.makeCurrent();
268       }
269 
270       codec.start();
271       outputBuffers = codec.getOutputBuffers();
272     } catch (IllegalStateException e) {
273       Logging.e(TAG, "initEncodeInternal failed", e);
274       release();
275       return VideoCodecStatus.FALLBACK_SOFTWARE;
276     }
277 
278     running = true;
279     outputThreadChecker.detachThread();
280     outputThread = createOutputThread();
281     outputThread.start();
282 
283     return VideoCodecStatus.OK;
284   }
285 
286   @Override
release()287   public VideoCodecStatus release() {
288     encodeThreadChecker.checkIsOnValidThread();
289 
290     final VideoCodecStatus returnValue;
291     if (outputThread == null) {
292       returnValue = VideoCodecStatus.OK;
293     } else {
294       // The outputThread actually stops and releases the codec once running is false.
295       running = false;
296       if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
297         Logging.e(TAG, "Media encoder release timeout");
298         returnValue = VideoCodecStatus.TIMEOUT;
299       } else if (shutdownException != null) {
300         // Log the exception and turn it into an error.
301         Logging.e(TAG, "Media encoder release exception", shutdownException);
302         returnValue = VideoCodecStatus.ERROR;
303       } else {
304         returnValue = VideoCodecStatus.OK;
305       }
306     }
307 
308     textureDrawer.release();
309     videoFrameDrawer.release();
310     if (textureEglBase != null) {
311       textureEglBase.release();
312       textureEglBase = null;
313     }
314     if (textureInputSurface != null) {
315       textureInputSurface.release();
316       textureInputSurface = null;
317     }
318     outputBuilders.clear();
319 
320     codec = null;
321     outputBuffers = null;
322     outputThread = null;
323 
324     // Allow changing thread after release.
325     encodeThreadChecker.detachThread();
326 
327     return returnValue;
328   }
329 
330   @Override
encode(VideoFrame videoFrame, EncodeInfo encodeInfo)331   public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
332     encodeThreadChecker.checkIsOnValidThread();
333     if (codec == null) {
334       return VideoCodecStatus.UNINITIALIZED;
335     }
336 
337     final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
338     final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
339 
340     // If input resolution changed, restart the codec with the new resolution.
341     final int frameWidth = videoFrame.getBuffer().getWidth();
342     final int frameHeight = videoFrame.getBuffer().getHeight();
343     final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
344     if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
345       VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
346       if (status != VideoCodecStatus.OK) {
347         return status;
348       }
349     }
350 
351     if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
352       // Too many frames in the encoder.  Drop this frame.
353       Logging.e(TAG, "Dropped frame, encoder queue full");
354       return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
355     }
356 
357     boolean requestedKeyFrame = false;
358     for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
359       if (frameType == EncodedImage.FrameType.VideoFrameKey) {
360         requestedKeyFrame = true;
361       }
362     }
363 
364     if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
365       requestKeyFrame(videoFrame.getTimestampNs());
366     }
367 
368     // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
369     // subsampled at one byte per four pixels.
370     int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
371     EncodedImage.Builder builder = EncodedImage.builder()
372                                        .setCaptureTimeNs(videoFrame.getTimestampNs())
373                                        .setCompleteFrame(true)
374                                        .setEncodedWidth(videoFrame.getBuffer().getWidth())
375                                        .setEncodedHeight(videoFrame.getBuffer().getHeight())
376                                        .setRotation(videoFrame.getRotation());
377     outputBuilders.offer(builder);
378 
379     final VideoCodecStatus returnValue;
380     if (useSurfaceMode) {
381       returnValue = encodeTextureBuffer(videoFrame);
382     } else {
383       returnValue = encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
384     }
385 
386     // Check if the queue was successful.
387     if (returnValue != VideoCodecStatus.OK) {
388       // Keep the output builders in sync with buffers in the codec.
389       outputBuilders.pollLast();
390     }
391 
392     return returnValue;
393   }
394 
encodeTextureBuffer(VideoFrame videoFrame)395   private VideoCodecStatus encodeTextureBuffer(VideoFrame videoFrame) {
396     encodeThreadChecker.checkIsOnValidThread();
397     try {
398       // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
399       // but it's a workaround for bug webrtc:5147.
400       GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
401       // It is not necessary to release this frame because it doesn't own the buffer.
402       VideoFrame derotatedFrame =
403           new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
404       videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
405       textureEglBase.swapBuffers(videoFrame.getTimestampNs());
406     } catch (RuntimeException e) {
407       Logging.e(TAG, "encodeTexture failed", e);
408       return VideoCodecStatus.ERROR;
409     }
410     return VideoCodecStatus.OK;
411   }
412 
encodeByteBuffer( VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize)413   private VideoCodecStatus encodeByteBuffer(
414       VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
415     encodeThreadChecker.checkIsOnValidThread();
416     // Frame timestamp rounded to the nearest microsecond.
417     long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
418 
419     // No timeout.  Don't block for an input buffer, drop frames if the encoder falls behind.
420     int index;
421     try {
422       index = codec.dequeueInputBuffer(0 /* timeout */);
423     } catch (IllegalStateException e) {
424       Logging.e(TAG, "dequeueInputBuffer failed", e);
425       return VideoCodecStatus.ERROR;
426     }
427 
428     if (index == -1) {
429       // Encoder is falling behind.  No input buffers available.  Drop the frame.
430       Logging.d(TAG, "Dropped frame, no input buffers available");
431       return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
432     }
433 
434     ByteBuffer buffer;
435     try {
436       buffer = codec.getInputBuffers()[index];
437     } catch (IllegalStateException e) {
438       Logging.e(TAG, "getInputBuffers failed", e);
439       return VideoCodecStatus.ERROR;
440     }
441     fillInputBuffer(buffer, videoFrameBuffer);
442 
443     try {
444       codec.queueInputBuffer(
445           index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
446     } catch (IllegalStateException e) {
447       Logging.e(TAG, "queueInputBuffer failed", e);
448       // IllegalStateException thrown when the codec is in the wrong state.
449       return VideoCodecStatus.ERROR;
450     }
451     return VideoCodecStatus.OK;
452   }
453 
454   @Override
setRateAllocation(BitrateAllocation bitrateAllocation, int framerate)455   public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
456     encodeThreadChecker.checkIsOnValidThread();
457     if (framerate > MAX_VIDEO_FRAMERATE) {
458       framerate = MAX_VIDEO_FRAMERATE;
459     }
460     bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
461     return VideoCodecStatus.OK;
462   }
463 
464   @Override
getScalingSettings()465   public ScalingSettings getScalingSettings() {
466     encodeThreadChecker.checkIsOnValidThread();
467     if (automaticResizeOn) {
468       if (codecType == VideoCodecMimeType.VP8) {
469         final int kLowVp8QpThreshold = 29;
470         final int kHighVp8QpThreshold = 95;
471         return new ScalingSettings(kLowVp8QpThreshold, kHighVp8QpThreshold);
472       } else if (codecType == VideoCodecMimeType.H264) {
473         final int kLowH264QpThreshold = 24;
474         final int kHighH264QpThreshold = 37;
475         return new ScalingSettings(kLowH264QpThreshold, kHighH264QpThreshold);
476       }
477     }
478     return ScalingSettings.OFF;
479   }
480 
481   @Override
getImplementationName()482   public String getImplementationName() {
483     return "HWEncoder";
484   }
485 
resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode)486   private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
487     encodeThreadChecker.checkIsOnValidThread();
488     VideoCodecStatus status = release();
489     if (status != VideoCodecStatus.OK) {
490       return status;
491     }
492     width = newWidth;
493     height = newHeight;
494     useSurfaceMode = newUseSurfaceMode;
495     return initEncodeInternal();
496   }
497 
shouldForceKeyFrame(long presentationTimestampNs)498   private boolean shouldForceKeyFrame(long presentationTimestampNs) {
499     encodeThreadChecker.checkIsOnValidThread();
500     return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
501   }
502 
requestKeyFrame(long presentationTimestampNs)503   private void requestKeyFrame(long presentationTimestampNs) {
504     encodeThreadChecker.checkIsOnValidThread();
505     // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
506     // indicate this in queueInputBuffer() below and guarantee _this_ frame
507     // be encoded as a key frame, but sadly that flag is ignored.  Instead,
508     // we request a key frame "soon".
509     try {
510       Bundle b = new Bundle();
511       b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
512       codec.setParameters(b);
513     } catch (IllegalStateException e) {
514       Logging.e(TAG, "requestKeyFrame failed", e);
515       return;
516     }
517     lastKeyFrameNs = presentationTimestampNs;
518   }
519 
createOutputThread()520   private Thread createOutputThread() {
521     return new Thread() {
522       @Override
523       public void run() {
524         while (running) {
525           deliverEncodedImage();
526         }
527         releaseCodecOnOutputThread();
528       }
529     };
530   }
531 
532   // Visible for testing.
533   protected void deliverEncodedImage() {
534     outputThreadChecker.checkIsOnValidThread();
535     try {
536       MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
537       int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
538       if (index < 0) {
539         if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
540           outputBuffersBusyCount.waitForZero();
541           outputBuffers = codec.getOutputBuffers();
542         }
543         return;
544       }
545 
546       ByteBuffer codecOutputBuffer = outputBuffers[index];
547       codecOutputBuffer.position(info.offset);
548       codecOutputBuffer.limit(info.offset + info.size);
549 
550       if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
551         Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
552         configBuffer = ByteBuffer.allocateDirect(info.size);
553         configBuffer.put(codecOutputBuffer);
554       } else {
555         bitrateAdjuster.reportEncodedFrame(info.size);
556         if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
557           updateBitrate();
558         }
559 
560         final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
561         if (isKeyFrame) {
562           Logging.d(TAG, "Sync frame generated");
563         }
564 
565         final ByteBuffer frameBuffer;
566         if (isKeyFrame && codecType == VideoCodecMimeType.H264) {
567           Logging.d(TAG,
568               "Prepending config frame of size " + configBuffer.capacity()
569                   + " to output buffer with offset " + info.offset + ", size " + info.size);
570           // For H.264 key frame prepend SPS and PPS NALs at the start.
571           frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
572           configBuffer.rewind();
573           frameBuffer.put(configBuffer);
574           frameBuffer.put(codecOutputBuffer);
575           frameBuffer.rewind();
576         } else {
577           frameBuffer = codecOutputBuffer.slice();
578         }
579 
580         final EncodedImage.FrameType frameType = isKeyFrame
581             ? EncodedImage.FrameType.VideoFrameKey
582             : EncodedImage.FrameType.VideoFrameDelta;
583 
584         outputBuffersBusyCount.increment();
585         EncodedImage.Builder builder = outputBuilders.poll();
586         EncodedImage encodedImage = builder
587                                         .setBuffer(frameBuffer,
588                                             () -> {
589                                               // This callback should not throw any exceptions since
590                                               // it may be called on an arbitrary thread.
591                                               // Check bug webrtc:11230 for more details.
592                                               try {
593                                                 codec.releaseOutputBuffer(index, false);
594                                               } catch (Exception e) {
595                                                 Logging.e(TAG, "releaseOutputBuffer failed", e);
596                                               }
597                                               outputBuffersBusyCount.decrement();
598                                             })
599                                         .setFrameType(frameType)
600                                         .createEncodedImage();
601         // TODO(mellem):  Set codec-specific info.
602         callback.onEncodedFrame(encodedImage, new CodecSpecificInfo());
603         // Note that the callback may have retained the image.
604         encodedImage.release();
605       }
606     } catch (IllegalStateException e) {
607       Logging.e(TAG, "deliverOutput failed", e);
608     }
609   }
610 
611   private void releaseCodecOnOutputThread() {
612     outputThreadChecker.checkIsOnValidThread();
613     Logging.d(TAG, "Releasing MediaCodec on output thread");
614     outputBuffersBusyCount.waitForZero();
615     try {
616       codec.stop();
617     } catch (Exception e) {
618       Logging.e(TAG, "Media encoder stop failed", e);
619     }
620     try {
621       codec.release();
622     } catch (Exception e) {
623       Logging.e(TAG, "Media encoder release failed", e);
624       // Propagate exceptions caught during release back to the main thread.
625       shutdownException = e;
626     }
627     configBuffer = null;
628     Logging.d(TAG, "Release on output thread done");
629   }
630 
631   private VideoCodecStatus updateBitrate() {
632     outputThreadChecker.checkIsOnValidThread();
633     adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
634     try {
635       Bundle params = new Bundle();
636       params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
637       codec.setParameters(params);
638       return VideoCodecStatus.OK;
639     } catch (IllegalStateException e) {
640       Logging.e(TAG, "updateBitrate failed", e);
641       return VideoCodecStatus.ERROR;
642     }
643   }
644 
645   private boolean canUseSurface() {
646     return sharedContext != null && surfaceColorFormat != null;
647   }
648 
649   // Visible for testing.
650   protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
651     yuvFormat.fillBuffer(buffer, videoFrameBuffer);
652   }
653 
654   /**
655    * Enumeration of supported YUV color formats used for MediaCodec's input.
656    */
657   private enum YuvFormat {
658     I420 {
659       @Override
660       void fillBuffer(ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer) {
661         VideoFrame.I420Buffer i420 = srcBuffer.toI420();
662         YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
663             i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight());
664         i420.release();
665       }
666     },
667     NV12 {
668       @Override
669       void fillBuffer(ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer) {
670         VideoFrame.I420Buffer i420 = srcBuffer.toI420();
671         YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(),
672             i420.getDataV(), i420.getStrideV(), dstBuffer, i420.getWidth(), i420.getHeight());
673         i420.release();
674       }
675     };
676 
677     abstract void fillBuffer(ByteBuffer dstBuffer, VideoFrame.Buffer srcBuffer);
678 
679     static YuvFormat valueOf(int colorFormat) {
680       switch (colorFormat) {
681         case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
682           return I420;
683         case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
684         case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
685         case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
686           return NV12;
687         default:
688           throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
689       }
690     }
691   }
692 }
693