1 /* 2 * Copyright 2015 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.webrtc; 12 13 import android.annotation.TargetApi; 14 import android.graphics.SurfaceTexture; 15 import android.opengl.GLES11Ext; 16 import android.opengl.GLES20; 17 import android.os.Build; 18 import android.os.Handler; 19 import android.os.HandlerThread; 20 import android.support.annotation.Nullable; 21 import java.util.concurrent.Callable; 22 import org.webrtc.EglBase.Context; 23 import org.webrtc.TextureBufferImpl.RefCountMonitor; 24 import org.webrtc.VideoFrame.TextureBuffer; 25 26 /** 27 * Helper class for using a SurfaceTexture to create WebRTC VideoFrames. In order to create WebRTC 28 * VideoFrames, render onto the SurfaceTexture. The frames will be delivered to the listener. Only 29 * one texture frame can be in flight at once, so the frame must be released in order to receive a 30 * new frame. Call stopListening() to stop receiveing new frames. Call dispose to release all 31 * resources once the texture frame is released. 32 */ 33 public class SurfaceTextureHelper { 34 /** 35 * Interface for monitoring texture buffers created from this SurfaceTexture. Since only one 36 * texture buffer can exist at a time, this can be used to monitor for stuck frames. 37 */ 38 public interface FrameRefMonitor { 39 /** A new frame was created. New frames start with ref count of 1. */ onNewBuffer(TextureBuffer textureBuffer)40 void onNewBuffer(TextureBuffer textureBuffer); 41 /** Ref count of the frame was incremented by the calling thread. */ onRetainBuffer(TextureBuffer textureBuffer)42 void onRetainBuffer(TextureBuffer textureBuffer); 43 /** Ref count of the frame was decremented by the calling thread. */ onReleaseBuffer(TextureBuffer textureBuffer)44 void onReleaseBuffer(TextureBuffer textureBuffer); 45 /** Frame was destroyed (ref count reached 0). */ onDestroyBuffer(TextureBuffer textureBuffer)46 void onDestroyBuffer(TextureBuffer textureBuffer); 47 } 48 49 private static final String TAG = "SurfaceTextureHelper"; 50 /** 51 * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated 52 * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to 53 * initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame 54 * timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to 55 * rtc::TimeNanos() there is no need for aligning timestamps again in 56 * PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and 57 * closer to actual creation time. 58 */ create(final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor)59 public static SurfaceTextureHelper create(final String threadName, 60 final EglBase.Context sharedContext, boolean alignTimestamps, final YuvConverter yuvConverter, 61 FrameRefMonitor frameRefMonitor) { 62 final HandlerThread thread = new HandlerThread(threadName); 63 thread.start(); 64 final Handler handler = new Handler(thread.getLooper()); 65 66 // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See: 67 // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195. 68 // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper 69 // is constructed on the |handler| thread. 70 return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() { 71 @Nullable 72 @Override 73 public SurfaceTextureHelper call() { 74 try { 75 return new SurfaceTextureHelper( 76 sharedContext, handler, alignTimestamps, yuvConverter, frameRefMonitor); 77 } catch (RuntimeException e) { 78 Logging.e(TAG, threadName + " create failure", e); 79 return null; 80 } 81 } 82 }); 83 } 84 85 /** 86 * Same as above with alignTimestamps set to false and yuvConverter set to new YuvConverter. 87 * 88 * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) 89 */ 90 public static SurfaceTextureHelper create( 91 final String threadName, final EglBase.Context sharedContext) { 92 return create(threadName, sharedContext, /* alignTimestamps= */ false, new YuvConverter(), 93 /*frameRefMonitor=*/null); 94 } 95 96 /** 97 * Same as above with yuvConverter set to new YuvConverter. 98 * 99 * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) 100 */ 101 public static SurfaceTextureHelper create( 102 final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) { 103 return create( 104 threadName, sharedContext, alignTimestamps, new YuvConverter(), /*frameRefMonitor=*/null); 105 } 106 107 /** 108 * Create a SurfaceTextureHelper without frame ref monitor. 109 * 110 * @see #create(String, EglBase.Context, boolean, YuvConverter, FrameRefMonitor) 111 */ 112 public static SurfaceTextureHelper create(final String threadName, 113 final EglBase.Context sharedContext, boolean alignTimestamps, YuvConverter yuvConverter) { 114 return create( 115 threadName, sharedContext, alignTimestamps, yuvConverter, /*frameRefMonitor=*/null); 116 } 117 118 private final RefCountMonitor textureRefCountMonitor = new RefCountMonitor() { 119 @Override 120 public void onRetain(TextureBufferImpl textureBuffer) { 121 if (frameRefMonitor != null) { 122 frameRefMonitor.onRetainBuffer(textureBuffer); 123 } 124 } 125 126 @Override 127 public void onRelease(TextureBufferImpl textureBuffer) { 128 if (frameRefMonitor != null) { 129 frameRefMonitor.onReleaseBuffer(textureBuffer); 130 } 131 } 132 133 @Override 134 public void onDestroy(TextureBufferImpl textureBuffer) { 135 returnTextureFrame(); 136 if (frameRefMonitor != null) { 137 frameRefMonitor.onDestroyBuffer(textureBuffer); 138 } 139 } 140 }; 141 142 private final Handler handler; 143 private final EglBase eglBase; 144 private final SurfaceTexture surfaceTexture; 145 private final int oesTextureId; 146 private final YuvConverter yuvConverter; 147 @Nullable private final TimestampAligner timestampAligner; 148 private final FrameRefMonitor frameRefMonitor; 149 150 // These variables are only accessed from the |handler| thread. 151 @Nullable private VideoSink listener; 152 // The possible states of this class. 153 private boolean hasPendingTexture; 154 private volatile boolean isTextureInUse; 155 private boolean isQuitting; 156 private int frameRotation; 157 private int textureWidth; 158 private int textureHeight; 159 // |pendingListener| is set in setListener() and the runnable is posted to the handler thread. 160 // setListener() is not allowed to be called again before stopListening(), so this is thread safe. 161 @Nullable private VideoSink pendingListener; 162 final Runnable setListenerRunnable = new Runnable() { 163 @Override 164 public void run() { 165 Logging.d(TAG, "Setting listener to " + pendingListener); 166 listener = pendingListener; 167 pendingListener = null; 168 // May have a pending frame from the previous capture session - drop it. 169 if (hasPendingTexture) { 170 // Calling updateTexImage() is neccessary in order to receive new frames. 171 updateTexImage(); 172 hasPendingTexture = false; 173 } 174 } 175 }; 176 177 private SurfaceTextureHelper(Context sharedContext, Handler handler, boolean alignTimestamps, 178 YuvConverter yuvConverter, FrameRefMonitor frameRefMonitor) { 179 if (handler.getLooper().getThread() != Thread.currentThread()) { 180 throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread"); 181 } 182 this.handler = handler; 183 this.timestampAligner = alignTimestamps ? new TimestampAligner() : null; 184 this.yuvConverter = yuvConverter; 185 this.frameRefMonitor = frameRefMonitor; 186 187 eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER); 188 try { 189 // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682. 190 eglBase.createDummyPbufferSurface(); 191 eglBase.makeCurrent(); 192 } catch (RuntimeException e) { 193 // Clean up before rethrowing the exception. 194 eglBase.release(); 195 handler.getLooper().quit(); 196 throw e; 197 } 198 199 oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); 200 surfaceTexture = new SurfaceTexture(oesTextureId); 201 setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> { 202 hasPendingTexture = true; 203 tryDeliverTextureFrame(); 204 }, handler); 205 } 206 207 @TargetApi(21) 208 private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture, 209 SurfaceTexture.OnFrameAvailableListener listener, Handler handler) { 210 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { 211 surfaceTexture.setOnFrameAvailableListener(listener, handler); 212 } else { 213 // The documentation states that the listener will be called on an arbitrary thread, but in 214 // pratice, it is always the thread on which the SurfaceTexture was constructed. There are 215 // assertions in place in case this ever changes. For API >= 21, we use the new API to 216 // explicitly specify the handler. 217 surfaceTexture.setOnFrameAvailableListener(listener); 218 } 219 } 220 221 /** 222 * Start to stream textures to the given |listener|. If you need to change listener, you need to 223 * call stopListening() first. 224 */ 225 public void startListening(final VideoSink listener) { 226 if (this.listener != null || this.pendingListener != null) { 227 throw new IllegalStateException("SurfaceTextureHelper listener has already been set."); 228 } 229 this.pendingListener = listener; 230 handler.post(setListenerRunnable); 231 } 232 233 /** 234 * Stop listening. The listener set in startListening() is guaranteded to not receive any more 235 * onFrame() callbacks after this function returns. 236 */ 237 public void stopListening() { 238 Logging.d(TAG, "stopListening()"); 239 handler.removeCallbacks(setListenerRunnable); 240 ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { 241 listener = null; 242 pendingListener = null; 243 }); 244 } 245 246 /** 247 * Use this function to set the texture size. Note, do not call setDefaultBufferSize() yourself 248 * since this class needs to be aware of the texture size. 249 */ 250 public void setTextureSize(int textureWidth, int textureHeight) { 251 if (textureWidth <= 0) { 252 throw new IllegalArgumentException("Texture width must be positive, but was " + textureWidth); 253 } 254 if (textureHeight <= 0) { 255 throw new IllegalArgumentException( 256 "Texture height must be positive, but was " + textureHeight); 257 } 258 surfaceTexture.setDefaultBufferSize(textureWidth, textureHeight); 259 handler.post(() -> { 260 this.textureWidth = textureWidth; 261 this.textureHeight = textureHeight; 262 tryDeliverTextureFrame(); 263 }); 264 } 265 266 /** 267 * Forces a frame to be produced. If no new frame is available, the last frame is sent to the 268 * listener again. 269 */ 270 public void forceFrame() { 271 handler.post(() -> { 272 hasPendingTexture = true; 273 tryDeliverTextureFrame(); 274 }); 275 } 276 277 /** Set the rotation of the delivered frames. */ 278 public void setFrameRotation(int rotation) { 279 handler.post(() -> this.frameRotation = rotation); 280 } 281 282 /** 283 * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video 284 * producer such as a camera or decoder. 285 */ 286 public SurfaceTexture getSurfaceTexture() { 287 return surfaceTexture; 288 } 289 290 /** Retrieve the handler that calls onFrame(). This handler is valid until dispose() is called. */ 291 public Handler getHandler() { 292 return handler; 293 } 294 295 /** 296 * This function is called when the texture frame is released. Only one texture frame can be in 297 * flight at once, so this function must be called before a new frame is delivered. 298 */ 299 private void returnTextureFrame() { 300 handler.post(() -> { 301 isTextureInUse = false; 302 if (isQuitting) { 303 release(); 304 } else { 305 tryDeliverTextureFrame(); 306 } 307 }); 308 } 309 310 public boolean isTextureInUse() { 311 return isTextureInUse; 312 } 313 314 /** 315 * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is 316 * stopped when the texture frame has been released. You are guaranteed to not receive any more 317 * onFrame() after this function returns. 318 */ 319 public void dispose() { 320 Logging.d(TAG, "dispose()"); 321 ThreadUtils.invokeAtFrontUninterruptibly(handler, () -> { 322 isQuitting = true; 323 if (!isTextureInUse) { 324 release(); 325 } 326 }); 327 } 328 329 /** 330 * Posts to the correct thread to convert |textureBuffer| to I420. 331 * 332 * @deprecated Use toI420() instead. 333 */ 334 @Deprecated 335 public VideoFrame.I420Buffer textureToYuv(final TextureBuffer textureBuffer) { 336 return textureBuffer.toI420(); 337 } 338 339 private void updateTexImage() { 340 // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers, 341 // as observed on Nexus 5. Therefore, synchronize it with the EGL functions. 342 // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info. 343 synchronized (EglBase.lock) { 344 surfaceTexture.updateTexImage(); 345 } 346 } 347 348 private void tryDeliverTextureFrame() { 349 if (handler.getLooper().getThread() != Thread.currentThread()) { 350 throw new IllegalStateException("Wrong thread."); 351 } 352 if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) { 353 return; 354 } 355 if (textureWidth == 0 || textureHeight == 0) { 356 // Information about the resolution needs to be provided by a call to setTextureSize() before 357 // frames are produced. 358 Logging.w(TAG, "Texture size has not been set."); 359 return; 360 } 361 isTextureInUse = true; 362 hasPendingTexture = false; 363 364 updateTexImage(); 365 366 final float[] transformMatrix = new float[16]; 367 surfaceTexture.getTransformMatrix(transformMatrix); 368 long timestampNs = surfaceTexture.getTimestamp(); 369 if (timestampAligner != null) { 370 timestampNs = timestampAligner.translateTimestamp(timestampNs); 371 } 372 final VideoFrame.TextureBuffer buffer = 373 new TextureBufferImpl(textureWidth, textureHeight, TextureBuffer.Type.OES, oesTextureId, 374 RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix), handler, 375 yuvConverter, textureRefCountMonitor); 376 if (frameRefMonitor != null) { 377 frameRefMonitor.onNewBuffer(buffer); 378 } 379 final VideoFrame frame = new VideoFrame(buffer, frameRotation, timestampNs); 380 listener.onFrame(frame); 381 frame.release(); 382 } 383 384 private void release() { 385 if (handler.getLooper().getThread() != Thread.currentThread()) { 386 throw new IllegalStateException("Wrong thread."); 387 } 388 if (isTextureInUse || !isQuitting) { 389 throw new IllegalStateException("Unexpected release."); 390 } 391 yuvConverter.release(); 392 GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); 393 surfaceTexture.release(); 394 eglBase.release(); 395 handler.getLooper().quit(); 396 if (timestampAligner != null) { 397 timestampAligner.dispose(); 398 } 399 } 400 } 401