1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package com.android.cts.verifier.camera.its;
18 
19 import android.content.Context;
20 import android.graphics.Bitmap;
21 import android.graphics.SurfaceTexture;
22 import android.media.MediaCodec;
23 import android.media.MediaCodecList;
24 import android.media.MediaFormat;
25 import android.media.MediaMuxer;
26 import android.media.MediaRecorder;
27 import android.opengl.EGL14;
28 import android.opengl.EGLConfig;
29 import android.opengl.EGLContext;
30 import android.opengl.EGLDisplay;
31 import android.opengl.EGLExt;
32 import android.opengl.EGLSurface;
33 import android.opengl.GLES11Ext;
34 import android.opengl.GLES20;
35 import android.os.ConditionVariable;
36 import android.os.Handler;
37 import android.util.Size;
38 import android.view.Surface;
39 
40 import java.io.IOException;
41 import java.io.OutputStream;
42 import java.nio.ByteBuffer;
43 import java.nio.ByteOrder;
44 import java.nio.FloatBuffer;
45 import java.nio.IntBuffer;
46 import java.util.ArrayList;
47 import java.util.List;
48 
49 /**
50  * Class to record a preview like stream. It sets up a SurfaceTexture that the camera can write to,
51  * and copies over the camera frames to a MediaRecorder or MediaCodec surface.
52  */
53 class PreviewRecorder implements AutoCloseable {
54     private static final String TAG = PreviewRecorder.class.getSimpleName();
55 
56     // Frame capture timeout duration in milliseconds.
57     private static final int FRAME_CAPTURE_TIMEOUT_MS = 2000; // 2 seconds
58 
59     private static final int GREEN_PAINT = 1;
60     private static final int NO_PAINT = 0;
61 
62     // Simple Vertex Shader that rotates the texture before passing it to Fragment shader.
63     private static final String VERTEX_SHADER = String.join(
64             "\n",
65             "",
66             "attribute vec4 vPosition;",
67             "uniform mat4 texMatrix;", // provided by SurfaceTexture
68             "uniform mat2 texRotMatrix;", // optional rotation matrix, from Sensor Orientation
69             "varying vec2 vTextureCoord;",
70             "void main() {",
71             "    gl_Position = vPosition;",
72             "    vec2 texCoords = texRotMatrix * vPosition.xy;", // rotate the coordinates before
73                                                                  // applying transform from
74                                                                  // SurfaceTexture
75             "    texCoords = (texCoords + vec2(1.0, 1.0)) / 2.0;", // Texture coordinates
76                                                                    // have range [0, 1]
77             "    vTextureCoord = (texMatrix * vec4(texCoords, 0.0, 1.0)).xy;",
78             "}",
79             ""
80     );
81 
82     // Simple Fragment Shader that samples the passed texture at a given coordinate.
83     private static final String FRAGMENT_SHADER = String.join(
84             "\n",
85             "",
86             "#extension GL_OES_EGL_image_external : require",
87             "precision mediump float;",
88             "varying vec2 vTextureCoord;",
89             "uniform samplerExternalOES sTexture;", // implicitly populated by SurfaceTexture
90             "uniform int paintIt;",
91             "void main() {",
92             "    if (paintIt == 1) {",
93             "        gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0);",     // green frame
94             "    } else {",
95             "        gl_FragColor = texture2D(sTexture, vTextureCoord);",   // copy frame
96             "    }",
97             "}",
98             ""
99     );
100 
101     // column-major vertices list of a rectangle that fills the entire screen
102     private static final float[] FULLSCREEN_VERTICES = {
103             -1, -1, // bottom left
104             1, -1, // bottom right
105             -1,  1, // top left
106             1,  1, // top right
107     };
108 
109 
110     private boolean mRecordingStarted = false; // tracks if the MediaRecorder/MediaCodec instance
111                                                // was already used to record a video.
112 
113     // Lock to protect reads/writes to the various Surfaces below.
114     private final Object mRecordLock = new Object();
115     // Tracks if the mMediaRecorder/mMediaCodec is currently recording. Protected by mRecordLock.
116     private volatile boolean mIsRecording = false;
117     private boolean mIsPaintGreen = false;
118 
119     private final Size mPreviewSize;
120     private final int mMaxFps;
121     private final Handler mHandler;
122 
123     private Surface mRecordSurface; // MediaRecorder/MediaCodec source. EGL writes to this surface
124 
125     private MediaRecorder mMediaRecorder;
126 
127     private MediaCodec mMediaCodec;
128     private MediaMuxer mMediaMuxer;
129     private Object mMediaCodecCondition;
130 
131     private SurfaceTexture mCameraTexture; // Handles writing frames from camera as texture to
132                                            // the GLSL program.
133     private Surface mCameraSurface; // Surface corresponding to mCameraTexture that the
134                                     // Camera HAL writes to
135 
136     private int mGLShaderProgram = 0;
137     private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
138     private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
139     private EGLSurface mEGLRecorderSurface; // EGL Surface corresponding to mRecordSurface
140 
141     private int mVPositionLoc;
142     private int mTexMatrixLoc;
143     private int mTexRotMatrixLoc;
144     private int mPaintItLoc;
145 
146 
147     private final float[] mTexRotMatrix; // length = 4
148     private final float[] mTransformMatrix = new float[16];
149 
150     private List<Long> mFrameTimeStamps = new ArrayList();
151     /**
152      * Initializes MediaRecorder/MediaCodec and EGL context. The result of recorded video will
153      * be stored in {@code outputFile}.
154      */
PreviewRecorder(int cameraId, Size previewSize, int maxFps, int sensorOrientation, String outputFile, Handler handler, boolean hlg10Enabled, Context context)155     PreviewRecorder(int cameraId, Size previewSize, int maxFps, int sensorOrientation,
156             String outputFile, Handler handler, boolean hlg10Enabled, Context context)
157             throws ItsException {
158         // Ensure that we can record the given size
159         int maxSupportedResolution = ItsUtils.RESOLUTION_TO_CAMCORDER_PROFILE
160                                         .stream()
161                                         .map(p -> p.first)
162                                         .max(Integer::compareTo)
163                                         .orElse(0);
164         int currentResolution = previewSize.getHeight() * previewSize.getWidth();
165         if (currentResolution > maxSupportedResolution) {
166             throw new ItsException("Requested preview size is greater than maximum "
167                     + "supported preview size.");
168         }
169 
170         mHandler = handler;
171         mPreviewSize = previewSize;
172         mMaxFps = maxFps;
173         // rotate the texture as needed by the sensor orientation
174         mTexRotMatrix = getRotationMatrix(sensorOrientation);
175 
176         ConditionVariable cv = new ConditionVariable();
177         cv.close();
178 
179         // Init fields in the passed handler to bind egl context to the handler thread.
180         mHandler.post(() -> {
181             try {
182                 initPreviewRecorder(cameraId, outputFile, hlg10Enabled, context);
183             } catch (ItsException e) {
184                 Logt.e(TAG, "Failed to init preview recorder", e);
185                 throw new ItsRuntimeException("Failed to init preview recorder", e);
186             } finally {
187                 cv.open();
188             }
189         });
190         // Wait for up to 1s for handler to finish initializing
191         if (!cv.block(1000)) {
192             throw new ItsException("Preview recorder did not initialize in 1000ms");
193         }
194 
195     }
196 
initPreviewRecorder(int cameraId, String outputFile, boolean hlg10Enabled, Context context)197     private void initPreviewRecorder(int cameraId, String outputFile,
198             boolean hlg10Enabled, Context context) throws ItsException {
199 
200         // order of initialization is important
201         if (hlg10Enabled) {
202             Logt.i(TAG, "HLG10 Enabled, using MediaCodec");
203             setupMediaCodec(cameraId, outputFile, context);
204         } else {
205             Logt.i(TAG, "HLG10 Disabled, using MediaRecorder");
206             setupMediaRecorder(cameraId, outputFile, context);
207         }
208 
209         initEGL(); // requires recording surfaces to be set up
210         compileShaders(); // requires EGL context to be set up
211         setupCameraTexture(); // requires EGL context to be set up
212 
213 
214         mCameraTexture.setOnFrameAvailableListener(surfaceTexture -> {
215             // Synchronized on mRecordLock to ensure that all surface are valid while encoding
216             // frames. All surfaces should be valid for as long as mIsRecording is true.
217             synchronized (mRecordLock) {
218                 if (surfaceTexture.isReleased()) {
219                     return; // surface texture already cleaned up, do nothing.
220                 }
221 
222                 // Bind EGL context to the current thread (just in case the
223                 // executing thread changes)
224                 EGL14.eglMakeCurrent(mEGLDisplay, mEGLRecorderSurface,
225                         mEGLRecorderSurface, mEGLContext);
226                 surfaceTexture.updateTexImage(); // update texture to the latest frame
227 
228                 // Only update the frame if the recorder is currently recording.
229                 if (!mIsRecording) {
230                     return;
231                 }
232                 try {
233                     copyFrameToRecordSurface();
234                     // Capture results are not collected for padded green frames
235                     if (mIsPaintGreen) {
236                         Logt.v(TAG, "Recorded frame# " + mFrameTimeStamps.size()
237                                 + " timestamp = " + surfaceTexture.getTimestamp()
238                                 + " with color. mIsPaintGreen = " + mIsPaintGreen);
239                     } else {
240                         mFrameTimeStamps.add(surfaceTexture.getTimestamp());
241                         Logt.v(TAG, "Recorded frame# " + mFrameTimeStamps.size()
242                                 + " timestamp = " + surfaceTexture.getTimestamp());
243                     }
244                 } catch (ItsException e) {
245                     Logt.e(TAG, "Failed to copy texture to recorder.", e);
246                     throw new ItsRuntimeException("Failed to copy texture to recorder.", e);
247                 }
248             }
249         }, mHandler);
250     }
251 
setupMediaRecorder(int cameraId, String outputFile, Context context)252     private void setupMediaRecorder(int cameraId, String outputFile, Context context)
253             throws ItsException {
254         mRecordSurface = MediaCodec.createPersistentInputSurface();
255 
256         mMediaRecorder = new MediaRecorder(context);
257         mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
258         mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
259         mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT);
260 
261         mMediaRecorder.setVideoSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
262         mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
263         mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
264         mMediaRecorder.setVideoEncodingBitRate(
265                 ItsUtils.calculateBitrate(cameraId, mPreviewSize, mMaxFps));
266         mMediaRecorder.setInputSurface(mRecordSurface);
267         mMediaRecorder.setVideoFrameRate(mMaxFps);
268         mMediaRecorder.setOutputFile(outputFile);
269 
270         try {
271             mMediaRecorder.prepare();
272         } catch (IOException e) {
273             throw new ItsException("Error preparing MediaRecorder", e);
274         }
275     }
276 
setupMediaCodec(int cameraId, String outputFilePath, Context context)277     private void setupMediaCodec(int cameraId, String outputFilePath, Context context)
278             throws ItsException {
279         MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
280         int videoBitRate = ItsUtils.calculateBitrate(cameraId, mPreviewSize, mMaxFps);
281         MediaFormat format = ItsUtils.initializeHLG10Format(mPreviewSize, videoBitRate, mMaxFps);
282         String codecName = list.findEncoderForFormat(format);
283         assert (codecName != null);
284 
285         try {
286             mMediaMuxer = new MediaMuxer(outputFilePath,
287                     MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
288         } catch (IOException e) {
289             throw new ItsException("Error preparing the MediaMuxer.");
290         }
291 
292         try {
293             mMediaCodec = MediaCodec.createByCodecName(codecName);
294         } catch (IOException e) {
295             throw new ItsException("Error preparing the MediaCodec.");
296         }
297 
298         mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
299         mMediaCodecCondition = new Object();
300         mMediaCodec.setCallback(
301                 new ItsUtils.MediaCodecListener(mMediaMuxer, mMediaCodecCondition), mHandler);
302 
303         mRecordSurface = mMediaCodec.createInputSurface();
304         assert (mRecordSurface != null);
305     }
306 
initEGL()307     private void initEGL() throws ItsException {
308         // set up EGL Display
309         mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
310         if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
311             throw new ItsException("Unable to get EGL display");
312         }
313 
314         int[] version = {0, 0};
315         if (!EGL14.eglInitialize(mEGLDisplay, version, /* majorOffset= */0,
316                 version, /* minorOffset= */1)) {
317             mEGLDisplay = null;
318             throw new ItsException("unable to initialize EGL14");
319         }
320 
321         int[] configAttribList = {
322                 EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
323                 EGL14.EGL_RED_SIZE, 8,
324                 EGL14.EGL_GREEN_SIZE, 8,
325                 EGL14.EGL_BLUE_SIZE, 8,
326                 EGL14.EGL_ALPHA_SIZE, 8,
327                 EGL14.EGL_DEPTH_SIZE, 0,
328                 EGL14.EGL_STENCIL_SIZE, 0,
329                 EGLExt.EGL_RECORDABLE_ANDROID, 1,
330                 EGL14.EGL_NONE
331         };
332 
333         // set up EGL Config
334         EGLConfig[] configs = new EGLConfig[1];
335         int[] numConfigs = {1};
336         EGL14.eglChooseConfig(mEGLDisplay, configAttribList, 0, configs,
337                 0, configs.length, numConfigs, 0);
338         if (configs[0] == null) {
339             throw new ItsException("Unable to initialize EGL config");
340         }
341 
342         EGLConfig EGLConfig = configs[0];
343 
344         int[] contextAttribList = { EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE };
345 
346         mEGLContext = EGL14.eglCreateContext(mEGLDisplay, EGLConfig, EGL14.EGL_NO_CONTEXT,
347                 contextAttribList, 0);
348         if (mEGLContext == EGL14.EGL_NO_CONTEXT) {
349             throw new ItsException("Failed to create EGL context");
350         }
351 
352         int[] clientVersion = {0};
353         EGL14.eglQueryContext(mEGLDisplay, mEGLContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
354                 clientVersion, /* offset= */0);
355         Logt.i(TAG, "EGLContext created, client version " + clientVersion[0]);
356 
357         // Create EGL Surface to write to the recording Surface.
358         int[] surfaceAttribs = {EGL14.EGL_NONE};
359         mEGLRecorderSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, EGLConfig, mRecordSurface,
360                 surfaceAttribs, /* offset= */0);
361         if (mEGLRecorderSurface == EGL14.EGL_NO_SURFACE) {
362             throw new ItsException("Failed to create EGL recorder surface");
363         }
364 
365         // Bind EGL context to the current (handler) thread.
366         EGL14.eglMakeCurrent(mEGLDisplay, mEGLRecorderSurface, mEGLRecorderSurface, mEGLContext);
367     }
368 
setupCameraTexture()369     private void setupCameraTexture() throws ItsException {
370         mCameraTexture = new SurfaceTexture(createTexture());
371         mCameraTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
372         mCameraSurface = new Surface(mCameraTexture);
373     }
374 
375     /**
376      * Compiles the vertex and fragment shader into a shader program, and sets up the location
377      * fields that will be written to later.
378      */
compileShaders()379     private void compileShaders() throws ItsException {
380         int vertexShader = createShader(GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
381         int fragmentShader = createShader(GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);
382 
383         mGLShaderProgram = GLES20.glCreateProgram();
384         GLES20.glAttachShader(mGLShaderProgram, vertexShader);
385         GLES20.glAttachShader(mGLShaderProgram, fragmentShader);
386         GLES20.glLinkProgram(mGLShaderProgram);
387 
388         int[] linkStatus = {0};
389         GLES20.glGetProgramiv(mGLShaderProgram, GLES20.GL_LINK_STATUS, linkStatus, 0);
390         if (linkStatus[0] == 0) {
391             String msg = "Could not link program: " + GLES20.glGetProgramInfoLog(mGLShaderProgram);
392             GLES20.glDeleteProgram(mGLShaderProgram);
393             throw new ItsException(msg);
394         }
395 
396         mVPositionLoc = GLES20.glGetAttribLocation(mGLShaderProgram, "vPosition");
397         mTexMatrixLoc = GLES20.glGetUniformLocation(mGLShaderProgram, "texMatrix");
398         mTexRotMatrixLoc = GLES20.glGetUniformLocation(mGLShaderProgram, "texRotMatrix");
399         mPaintItLoc = GLES20.glGetUniformLocation(mGLShaderProgram, "paintIt");
400 
401         GLES20.glUseProgram(mGLShaderProgram);
402         assertNoGLError("glUseProgram");
403     }
404 
405     /**
406      * Creates a new GLSL texture that can be populated by {@link SurfaceTexture} and returns the
407      * corresponding ID. Throws {@link ItsException} if there is an error creating the textures.
408      */
createTexture()409     private int createTexture() throws ItsException {
410         IntBuffer buffer = IntBuffer.allocate(1);
411         GLES20.glGenTextures(1, buffer);
412         int texId = buffer.get(0);
413 
414         // This flags the texture to be implicitly populated by SurfaceTexture
415         GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texId);
416 
417         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
418                 GLES20.GL_NEAREST);
419         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
420                 GLES20.GL_LINEAR);
421         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
422                 GLES20.GL_CLAMP_TO_EDGE);
423         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
424                 GLES20.GL_CLAMP_TO_EDGE);
425 
426         boolean isTexture = GLES20.glIsTexture(texId);
427         if (!isTexture) {
428             throw new ItsException("Failed to create texture id. Returned texture id: " + texId);
429         }
430 
431         return texId;
432     }
433 
434     /**
435      * Compiles the gives {@code source} as a shader of the provided {@code type}. Throws an
436      * {@link ItsException} if there are errors while compiling the shader.
437      */
createShader(int type, String source)438     private int createShader(int type, String source) throws ItsException {
439         int shader = GLES20.glCreateShader(type);
440         GLES20.glShaderSource(shader, source);
441         GLES20.glCompileShader(shader);
442 
443         int[] compiled = new int[]{0};
444         GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
445         if (compiled[0] == GLES20.GL_FALSE) {
446             String msg = "Could not compile shader " + type + ": "
447                     + GLES20.glGetShaderInfoLog(shader);
448             GLES20.glDeleteShader(shader);
449             throw new ItsException(msg);
450         }
451 
452         return shader;
453     }
454 
455     /**
456      * Throws an {@link ItsException} if the previous GL call resulted in an error. No-op otherwise.
457      */
assertNoGLError(String op)458     private void assertNoGLError(String op) throws ItsException {
459         int error = GLES20.glGetError();
460         if (error != GLES20.GL_NO_ERROR) {
461             String msg = op + ": glError 0x" + Integer.toHexString(error);
462             throw new ItsException(msg);
463         }
464     }
465 
466 
467 
468     /**
469      * Copies a frame encoded as a texture by {@code mCameraTexture} to
470      * {@code mRecordSurface} by running our simple shader program for one frame that draws
471      * to {@code mEGLRecorderSurface}.
472      */
copyFrameToRecordSurface()473     private void copyFrameToRecordSurface() throws ItsException {
474         // Clear color buffer
475         GLES20.glClearColor(0f, 0f, 0f, 1f);
476         assertNoGLError("glClearColor");
477         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
478         assertNoGLError("glClear");
479 
480         // read texture transformation matrix from SurfaceTexture and write it to GLSL program.
481         mCameraTexture.getTransformMatrix(mTransformMatrix);
482         GLES20.glUniformMatrix4fv(mTexMatrixLoc, /* count= */1, /* transpose= */false,
483                 mTransformMatrix, /* offset= */0);
484         assertNoGLError("glUniformMatrix4fv");
485 
486         // write texture rotation matrix to GLSL program
487         GLES20.glUniformMatrix2fv(mTexRotMatrixLoc, /* count= */1, /* transpose= */false,
488                 mTexRotMatrix, /* offset= */0);
489         assertNoGLError("glUniformMatrix2fv");
490 
491         GLES20.glUniform1i(mPaintItLoc, mIsPaintGreen ? GREEN_PAINT : NO_PAINT);
492         assertNoGLError("glUniform1i");
493 
494         // write vertices of the full-screen rectangle to the GLSL program
495         ByteBuffer nativeBuffer = ByteBuffer.allocateDirect(
496                   FULLSCREEN_VERTICES.length * Float.BYTES);
497         nativeBuffer.order(ByteOrder.nativeOrder());
498         FloatBuffer vertexBuffer = nativeBuffer.asFloatBuffer();
499         vertexBuffer.put(FULLSCREEN_VERTICES);
500         nativeBuffer.position(0);
501         vertexBuffer.position(0);
502 
503         GLES20.glEnableVertexAttribArray(mVPositionLoc);
504         assertNoGLError("glEnableVertexAttribArray");
505         GLES20.glVertexAttribPointer(mVPositionLoc, /* size= */ 2, GLES20.GL_FLOAT,
506                 /* normalized= */ false, /* stride= */ 8, vertexBuffer);
507         assertNoGLError("glVertexAttribPointer");
508 
509 
510         // viewport size should match the frame dimensions to prevent stretching/cropping
511         GLES20.glViewport(0, 0, mPreviewSize.getWidth(), mPreviewSize.getHeight());
512         assertNoGLError("glViewport");
513 
514         GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /* first= */0, /* count= */4);
515         assertNoGLError("glDrawArrays");
516 
517         if (!EGL14.eglSwapBuffers(mEGLDisplay, mEGLRecorderSurface)) {
518             throw new ItsException("EglSwapBuffers failed to copy buffer to recording surface");
519         }
520     }
521 
522     /**
523      * Returns column major 2D rotation matrix that can be fed directly to GLSL.
524      * This matrix rotates around the origin.
525      */
getRotationMatrix(int orientationDegrees)526     private static float[] getRotationMatrix(int orientationDegrees) {
527         double rads = orientationDegrees * Math.PI / 180;
528         // Rotate clockwise because sensor orientation assumes clockwise rotation
529         return new float[] {
530                 (float) Math.cos(rads), (float) -Math.sin(rads),
531                 (float) Math.sin(rads), (float) Math.cos(rads)
532         };
533     }
534 
getCameraSurface()535     Surface getCameraSurface() {
536         return mCameraSurface;
537     }
538 
539     /**
540      * Copies a frame encoded as a texture by {@code mCameraTexture} to a Bitmap by running our
541      * simple shader program for one frame and then convert the frame to a JPEG and write to
542      * the JPEG bytes to the {@code outputStream}.
543      *
544      * This method should not be called while recording.
545      *
546      * @param outputStream The stream to which the captured JPEG image bytes are written to
547      */
getFrame(OutputStream outputStream)548     void getFrame(OutputStream outputStream) throws ItsException {
549         synchronized (mRecordLock) {
550             if (mIsRecording) {
551                 throw new ItsException("Attempting to get frame while recording is active is an "
552                         + "invalid combination.");
553             }
554 
555             ConditionVariable cv = new ConditionVariable();
556             cv.close();
557             // GL copy texture to JPEG should happen on the thread EGL Context was bound to
558             mHandler.post(() -> {
559                 try {
560                     copyFrameToRecordSurface();
561 
562                     ByteBuffer frameBuffer = ByteBuffer.allocateDirect(
563                             mPreviewSize.getWidth() * mPreviewSize.getHeight() * 4);
564                     frameBuffer.order(ByteOrder.nativeOrder());
565 
566                     GLES20.glReadPixels(
567                             0,
568                             0,
569                             mPreviewSize.getWidth(),
570                             mPreviewSize.getHeight(),
571                             GLES20.GL_RGBA,
572                             GLES20.GL_UNSIGNED_BYTE,
573                             frameBuffer);
574                     Bitmap frame = Bitmap.createBitmap(
575                             mPreviewSize.getWidth(),
576                             mPreviewSize.getHeight(),
577                             Bitmap.Config.ARGB_8888);
578                     frame.copyPixelsFromBuffer(frameBuffer);
579                     frame.compress(Bitmap.CompressFormat.JPEG, 100, outputStream);
580                 } catch (ItsException e) {
581                     Logt.e(TAG, "Could not get frame from texture", e);
582                     throw new ItsRuntimeException("Failed to get frame from texture", e);
583                 } finally {
584                     cv.open();
585                 }
586             });
587 
588             // Wait for up to two seconds for jpeg frame capture.
589             if (!cv.block(FRAME_CAPTURE_TIMEOUT_MS)) {
590                 throw new ItsException("Frame capture timed out");
591             }
592         }
593     }
594 
595     /**
596      * Starts recording frames from mCameraSurface. This method should
597      * only be called once. Throws {@link ItsException} on subsequent calls.
598      */
startRecording()599     void startRecording() throws ItsException {
600         if (mRecordingStarted) {
601             throw new ItsException("Attempting to record on a stale PreviewRecorder. "
602                     + "Create a new instance instead.");
603         }
604         mRecordingStarted = true;
605         Logt.i(TAG, "Starting Preview Recording.");
606         synchronized (mRecordLock) {
607             mIsRecording = true;
608             if (mMediaRecorder != null) {
609                 mMediaRecorder.start();
610             } else {
611                 mMediaCodec.start();
612             }
613         }
614     }
615 
616     /**
617      * Override camera frames with green frames, if recordGreenFrames
618      * parameter is true. Record Green frames as buffer to workaround
619      * MediaRecorder issue of missing frames at the end of recording.
620      */
overrideCameraFrames(boolean recordGreenFrames)621     void overrideCameraFrames(boolean recordGreenFrames) throws ItsException {
622         Logt.i(TAG, "Recording Camera frames.");
623         synchronized (mRecordLock) {
624             mIsPaintGreen = recordGreenFrames;
625         }
626     }
627 
628     /**
629      * Stops recording frames.
630      */
stopRecording()631     void stopRecording() throws ItsException {
632         Logt.i(TAG, "Stopping Preview Recording.");
633         synchronized (mRecordLock) {
634             stopRecordingLocked();
635         }
636     }
637 
stopRecordingLocked()638     private void stopRecordingLocked() throws ItsException {
639         mIsRecording = false;
640         if (mMediaRecorder != null) {
641             mMediaRecorder.stop();
642         } else {
643             mMediaCodec.signalEndOfInputStream();
644 
645             synchronized (mMediaCodecCondition) {
646                 try {
647                     mMediaCodecCondition.wait(ItsUtils.SESSION_CLOSE_TIMEOUT_MS);
648                 } catch (InterruptedException e) {
649                     throw new ItsException("Unexpected InterruptedException: ", e);
650                 }
651             }
652 
653             mMediaMuxer.stop();
654             mMediaCodec.stop();
655         }
656     }
657 
658     @Override
close()659     public void close() throws ItsException {
660         // synchronized to prevent reads and writes to surfaces while they are being released.
661         synchronized (mRecordLock) {
662             if (mIsRecording) {
663                 Logt.e(TAG, "Preview recording was not stopped before closing.");
664                 stopRecordingLocked();
665             }
666             mCameraSurface.release();
667             mCameraTexture.release();
668             if (mMediaRecorder != null) {
669                 mMediaRecorder.release();
670             }
671             if (mMediaCodec != null) {
672                 mMediaCodec.release();
673             }
674             if (mMediaMuxer != null) {
675                 mMediaMuxer.release();
676             }
677             mRecordSurface.release();
678 
679             ConditionVariable cv = new ConditionVariable();
680             cv.close();
681             // GL Cleanup should happen on the thread EGL Context was bound to
682             mHandler.post(() -> {
683                 try {
684                     cleanupEgl();
685                 } finally {
686                     cv.open();
687                 }
688             });
689 
690             // Wait for up to a second for egl to clean up.
691             // Since this is clean up, do nothing if the handler takes longer than 1s.
692             cv.block(/*timeoutMs=*/ 1000);
693         }
694     }
695 
cleanupEgl()696     private void cleanupEgl() {
697         if (mGLShaderProgram == 0) {
698             // egl program was never set up, no cleanup needed
699             return;
700         }
701 
702         Logt.i(TAG, "Cleaning up EGL Context");
703         GLES20.glDeleteProgram(mGLShaderProgram);
704         // Release the egl surfaces and context from the handler
705         EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE,
706                 EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
707         EGL14.eglDestroySurface(mEGLDisplay, mEGLRecorderSurface);
708         EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
709 
710         EGL14.eglTerminate(mEGLDisplay);
711     }
712 
713     /**
714      * Returns Camera frame's timestamps only after recording completes.
715      */
getFrameTimeStamps()716     public List<Long> getFrameTimeStamps() throws IllegalStateException {
717         synchronized (mRecordLock) {
718             if (mIsRecording) {
719                 throw new IllegalStateException("Can't return timestamps during recording.");
720             }
721             return mFrameTimeStamps;
722         }
723     }
724 }
725