1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package com.android.cts.verifier.camera.its;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.MaxStreamSizes;
20 
21 import static org.mockito.Matchers.eq;
22 import static org.mockito.Mockito.mock;
23 import static org.mockito.Mockito.timeout;
24 import static org.mockito.Mockito.verify;
25 
26 import android.annotation.NonNull;
27 import android.app.Activity;
28 import android.app.Notification;
29 import android.app.NotificationChannel;
30 import android.app.NotificationManager;
31 import android.app.Service;
32 import android.content.Context;
33 import android.content.Intent;
34 import android.content.pm.PackageManager;
35 import android.content.pm.ServiceInfo;
36 import android.graphics.Bitmap;
37 import android.graphics.BitmapFactory;
38 import android.graphics.ColorSpace;
39 import android.graphics.ImageFormat;
40 import android.graphics.Rect;
41 import android.graphics.SurfaceTexture;
42 import android.hardware.HardwareBuffer;
43 import android.hardware.Sensor;
44 import android.hardware.SensorEvent;
45 import android.hardware.SensorEventListener;
46 import android.hardware.SensorManager;
47 import android.hardware.SensorPrivacyManager;
48 import android.hardware.camera2.CameraAccessException;
49 import android.hardware.camera2.CameraCaptureSession;
50 import android.hardware.camera2.CameraCharacteristics;
51 import android.hardware.camera2.CameraDevice;
52 import android.hardware.camera2.CameraExtensionCharacteristics;
53 import android.hardware.camera2.CameraExtensionSession;
54 import android.hardware.camera2.CameraManager;
55 import android.hardware.camera2.CameraMetadata;
56 import android.hardware.camera2.CaptureFailure;
57 import android.hardware.camera2.CaptureRequest;
58 import android.hardware.camera2.CaptureResult;
59 import android.hardware.camera2.DngCreator;
60 import android.hardware.camera2.TotalCaptureResult;
61 import android.hardware.camera2.cts.CameraTestUtils;
62 import android.hardware.camera2.cts.PerformanceTest;
63 import android.hardware.camera2.cts.helpers.StaticMetadata;
64 import android.hardware.camera2.params.ColorSpaceProfiles;
65 import android.hardware.camera2.params.DynamicRangeProfiles;
66 import android.hardware.camera2.params.ExtensionSessionConfiguration;
67 import android.hardware.camera2.params.InputConfiguration;
68 import android.hardware.camera2.params.MeteringRectangle;
69 import android.hardware.camera2.params.OutputConfiguration;
70 import android.hardware.camera2.params.SessionConfiguration;
71 import android.hardware.camera2.params.StreamConfigurationMap;
72 import android.media.AudioAttributes;
73 import android.media.CamcorderProfile;
74 import android.media.Image;
75 import android.media.ImageReader;
76 import android.media.ImageWriter;
77 import android.media.MediaCodec;
78 import android.media.MediaCodecList;
79 import android.media.MediaFormat;
80 import android.media.MediaMuxer;
81 import android.media.MediaRecorder;
82 import android.os.Build;
83 import android.os.Bundle;
84 import android.os.ConditionVariable;
85 import android.os.Handler;
86 import android.os.HandlerThread;
87 import android.os.IBinder;
88 import android.os.Looper;
89 import android.os.SystemClock;
90 import android.os.SystemProperties;
91 import android.os.Vibrator;
92 import android.provider.MediaStore;
93 import android.util.Log;
94 import android.util.Pair;
95 import android.util.Range;
96 import android.util.Rational;
97 import android.util.Size;
98 import android.util.SparseArray;
99 import android.view.Surface;
100 import android.view.SurfaceHolder;
101 import android.view.WindowManager;
102 import android.view.WindowMetrics;
103 
104 import androidx.test.InstrumentationRegistry;
105 
106 import com.android.compatibility.common.util.ReportLog.Metric;
107 import com.android.cts.verifier.R;
108 import com.android.cts.verifier.camera.performance.CameraTestInstrumentation;
109 import com.android.cts.verifier.camera.performance.CameraTestInstrumentation.MetricListener;
110 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
111 import com.android.ex.camera2.blocking.BlockingExtensionSessionCallback;
112 import com.android.ex.camera2.blocking.BlockingSessionCallback;
113 import com.android.ex.camera2.blocking.BlockingStateCallback;
114 
115 import com.google.common.primitives.Ints;
116 import com.google.common.primitives.Longs;
117 
118 import org.json.JSONArray;
119 import org.json.JSONException;
120 import org.json.JSONObject;
121 import org.junit.runner.JUnitCore;
122 import org.junit.runner.Request;
123 import org.junit.runner.Result;
124 
125 import java.io.BufferedReader;
126 import java.io.ByteArrayOutputStream;
127 import java.io.File;
128 import java.io.IOException;
129 import java.io.InputStreamReader;
130 import java.io.OutputStream;
131 import java.net.ServerSocket;
132 import java.net.Socket;
133 import java.nio.ByteBuffer;
134 import java.nio.ByteOrder;
135 import java.nio.FloatBuffer;
136 import java.nio.charset.Charset;
137 import java.text.SimpleDateFormat;
138 import java.util.ArrayList;
139 import java.util.Arrays;
140 import java.util.Collections;
141 import java.util.Comparator;
142 import java.util.Date;
143 import java.util.HashMap;
144 import java.util.HashSet;
145 import java.util.LinkedList;
146 import java.util.List;
147 import java.util.Locale;
148 import java.util.Map;
149 import java.util.Objects;
150 import java.util.Set;
151 import java.util.concurrent.BlockingQueue;
152 import java.util.concurrent.ConcurrentHashMap;
153 import java.util.concurrent.CountDownLatch;
154 import java.util.concurrent.Executor;
155 import java.util.concurrent.LinkedBlockingDeque;
156 import java.util.concurrent.LinkedBlockingQueue;
157 import java.util.concurrent.Semaphore;
158 import java.util.concurrent.TimeUnit;
159 import java.util.concurrent.atomic.AtomicInteger;
160 import java.util.stream.Collectors;
161 import java.util.stream.IntStream;
162 
163 public class ItsService extends Service implements SensorEventListener {
164     public static final String TAG = ItsService.class.getSimpleName();
165 
166     // Version number to keep host/server communication in sync
167     // This string must be in sync with python side device.py
168     // Updated when interface between script and ItsService is changed
169     private final String ITS_SERVICE_VERSION = "1.0";
170 
171     private final int SERVICE_NOTIFICATION_ID = 37; // random int that is unique within app
172     private NotificationChannel mChannel;
173 
174     // Timeouts, in seconds.
175     private static final int TIMEOUT_CALLBACK = 20;
176     private static final int TIMEOUT_3A = 10;
177 
178     // Time given for background requests to warm up pipeline
179     private static final long PIPELINE_WARMUP_TIME_MS = 2000;
180 
181     // Time given PreviewRecorder to record green buffer frames
182     private static final long PADDED_FRAMES_MS = 600;
183 
184     // State transition timeouts, in ms.
185     private static final long TIMEOUT_IDLE_MS = 2000;
186     private static final long TIMEOUT_IDLE_MS_EXTENSIONS = 20000;
187     private static final long TIMEOUT_SESSION_CLOSE = 3000;
188     private static final long TIMEOUT_SESSION_READY = 3000;
189     private static final long TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS = 10;
190 
191     // Timeout to wait for a capture result after the capture buffer has arrived, in ms.
192     private static final long TIMEOUT_CAP_RES = 2000;
193 
194     private static final int MAX_CONCURRENT_READER_BUFFERS = 10;
195 
196     // Supports at most RAW+YUV+JPEG, one surface each, plus optional background stream
197     private static final int MAX_NUM_OUTPUT_SURFACES = 4;
198 
199     // Performance class R version number
200     private static final int PERFORMANCE_CLASS_R = Build.VERSION_CODES.R;
201 
202     // Performance class VIC version number
203     private static final int PERFORMANCE_CLASS_VIC = Build.VERSION_CODES.VANILLA_ICE_CREAM;
204 
205     public static final int SERVERPORT = 6000;
206 
207     private static final float EPISILON = 0.05f;
208 
209     public static final String REGION_KEY = "regions";
210     public static final String REGION_AE_KEY = "ae";
211     public static final String REGION_AWB_KEY = "awb";
212     public static final String REGION_AF_KEY = "af";
213     public static final String LOCK_AE_KEY = "aeLock";
214     public static final String LOCK_AWB_KEY = "awbLock";
215     public static final String TRIGGER_KEY = "triggers";
216     public static final String PHYSICAL_ID_KEY = "physicalId";
217     public static final String TRIGGER_AE_KEY = "ae";
218     public static final String TRIGGER_AF_KEY = "af";
219     public static final String VIB_PATTERN_KEY = "pattern";
220     public static final String EVCOMP_KEY = "evComp";
221     public static final String AUTO_FLASH_KEY = "autoFlash";
222     public static final String ZOOM_RATIO_KEY = "zoomRatio";
223     public static final String AUDIO_RESTRICTION_MODE_KEY = "mode";
224     public static final String SETTINGS_KEY = "settings";
225     public static final int AVAILABILITY_TIMEOUT_MS = 10;
226 
227     private static final HashMap<Integer, String> CAMCORDER_PROFILE_QUALITIES_MAP;
228     static {
229         CAMCORDER_PROFILE_QUALITIES_MAP = new HashMap<>();
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_480P, "480P")230         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_480P, "480P");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_1080P, "1080P")231         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_1080P, "1080P");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2160P, "2160P")232         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2160P, "2160P");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2K, "2k")233         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_2K, "2k");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_4KDCI, "4KDC")234         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_4KDCI, "4KDC");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_720P, "720P")235         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_720P, "720P");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_8KUHD, "8KUHD")236         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_8KUHD, "8KUHD");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_CIF, "CIF")237         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_CIF, "CIF");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_HIGH, "HIGH")238         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_HIGH, "HIGH");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_LOW, "LOW")239         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_LOW, "LOW");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QCIF, "QCIF")240         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QCIF, "QCIF");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QHD, "QHD")241         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QHD, "QHD");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QVGA, "QVGA")242         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_QVGA, "QVGA");
CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_VGA, "VGA")243         CAMCORDER_PROFILE_QUALITIES_MAP.put(CamcorderProfile.QUALITY_VGA, "VGA");
244     }
245 
246     private static final String JPEG_R_FMT = "jpeg_r";
247     private static HashMap<Integer, String> sFormatMap = new HashMap<>();
248     static {
sFormatMap.put(ImageFormat.PRIVATE, "priv")249         sFormatMap.put(ImageFormat.PRIVATE, "priv");
sFormatMap.put(ImageFormat.JPEG, "jpeg")250         sFormatMap.put(ImageFormat.JPEG, "jpeg");
sFormatMap.put(ImageFormat.JPEG_R, JPEG_R_FMT)251         sFormatMap.put(ImageFormat.JPEG_R, JPEG_R_FMT);
sFormatMap.put(ImageFormat.YUV_420_888, "yuv")252         sFormatMap.put(ImageFormat.YUV_420_888, "yuv");
253     }
254 
255     private CameraManager mCameraManager = null;
256     private HandlerThread mCameraThread = null;
257     private Handler mCameraHandler = null;
258     private ItsService.BlockingCameraManager mBlockingCameraManager = null;
259     private BlockingStateCallback mCameraListener = null;
260     private CameraDevice mCamera = null;
261     private CameraCaptureSession mSession = null;
262     private CameraExtensionSession mExtensionSession = null;
263     private ImageReader[] mOutputImageReaders = null;
264     private ImageReader mThreeAOutputImageReader = null;
265     private SparseArray<String> mPhysicalStreamMap = new SparseArray<>();
266     private SparseArray<Long> mStreamUseCaseMap = new SparseArray<>();
267     private ImageReader mInputImageReader = null;
268     private ImageReader mExtensionPreviewImageReader = null;
269     private CameraCharacteristics mCameraCharacteristics = null;
270     private CameraExtensionCharacteristics mCameraExtensionCharacteristics = null;
271     private HashMap<String, CameraCharacteristics> mPhysicalCameraChars = new HashMap<>();
272     private ItsUtils.ItsCameraIdList mItsCameraIdList = null;
273     // Declared here so that IntraPreviewAction can access configured CameraCaptureSession
274     private IntraPreviewAction mPreviewAction;
275 
276     // To reuse mSession, track output configurations, image reader args, and session listener.
277     private List<OutputConfiguration> mCaptureOutputConfigs = new ArrayList<>();
278     private ImageReaderArgs mImageReaderArgs = ImageReaderArgs.EMPTY;
279     private BlockingSessionCallback mSessionListener = null;
280 
281     private Vibrator mVibrator = null;
282 
283     private HandlerThread[] mSaveThreads = new HandlerThread[MAX_NUM_OUTPUT_SURFACES];
284     private Handler[] mSaveHandlers = new Handler[MAX_NUM_OUTPUT_SURFACES];
285     private HandlerThread mResultThread = null;
286     private Handler mResultHandler = null;
287 
288     private volatile boolean mThreadExitFlag = false;
289 
290     private volatile ServerSocket mSocket = null;
291     private volatile SocketRunnable mSocketRunnableObj = null;
292     private Semaphore mSocketQueueQuota = null;
293     private int mMemoryQuota = -1;
294     private LinkedList<Integer> mInflightImageSizes = new LinkedList<>();
295     private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = new LinkedBlockingDeque<>();
296     private final Object mSocketWriteEnqueueLock = new Object();
297     private final Object mSocketWriteDrainLock = new Object();
298 
299     private volatile BlockingQueue<Object[]> mSerializerQueue = new LinkedBlockingDeque<>();
300 
301     private final AtomicInteger mCountCallbacksRemaining = new AtomicInteger();
302     private AtomicInteger mCountRawOrDng = new AtomicInteger();
303     private AtomicInteger mCountRaw10 = new AtomicInteger();
304     private AtomicInteger mCountRaw12 = new AtomicInteger();
305     private AtomicInteger mCountJpg = new AtomicInteger();
306     private AtomicInteger mCountYuv = new AtomicInteger();
307     private AtomicInteger mCountCapRes = new AtomicInteger();
308     private AtomicInteger mCountRaw10QuadBayer = new AtomicInteger();
309     private AtomicInteger mCountRaw10Stats = new AtomicInteger();
310     private AtomicInteger mCountRaw10QuadBayerStats = new AtomicInteger();
311     private AtomicInteger mCountRaw = new AtomicInteger();
312     private AtomicInteger mCountRawQuadBayer = new AtomicInteger();
313     private AtomicInteger mCountRawStats = new AtomicInteger();
314     private AtomicInteger mCountRawQuadBayerStats = new AtomicInteger();
315     private boolean mCaptureRawIsDng;
316     private boolean mCaptureRawIsStats;
317     private boolean mCaptureRawIsQuadBayer;
318     private boolean mCaptureRawIsQuadBayerStats;
319     private int mCaptureStatsGridWidth;
320     private int mCaptureStatsGridHeight;
321     private CaptureResult[] mCaptureResults = null;
322     private MediaRecorder mMediaRecorder;
323     private Surface mRecordSurface;
324     private CaptureRequest.Builder mCaptureRequestBuilder;
325 
326     private volatile ConditionVariable mInterlock3A = new ConditionVariable(true);
327 
328     final Object m3AStateLock = new Object();
329     private volatile boolean mConvergedAE = false;
330     private volatile boolean mPrecaptureTriggered = false;
331     private volatile boolean mConvergeAETriggered = false;
332     private volatile boolean mConvergedAF = false;
333     private volatile boolean mConvergedAWB = false;
334     private volatile boolean mLockedAE = false;
335     private volatile boolean mLockedAWB = false;
336     private volatile boolean mNeedsLockedAE = false;
337     private volatile boolean mNeedsLockedAWB = false;
338     private volatile boolean mDoAE = true;
339     private volatile boolean mDoAF = true;
340     private volatile boolean mSend3AResults = true;
341     private final LinkedBlockingQueue<String> unavailableEventQueue = new LinkedBlockingQueue<>();
342     private final LinkedBlockingQueue<Pair<String, String>> unavailablePhysicalCamEventQueue =
343                 new LinkedBlockingQueue<>();
344     private Set<String> mUnavailablePhysicalCameras;
345 
346     static class MySensorEvent {
347         public Sensor sensor;
348         public int accuracy;
349         public long timestamp;
350         public float[] values;
351     }
352 
353     CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
354         @Override
355         public void onCameraAvailable(String cameraId) {
356             super.onCameraAvailable(cameraId);
357         }
358 
359         @Override
360         public void onCameraUnavailable(String cameraId) {
361             super.onCameraUnavailable(cameraId);
362             unavailableEventQueue.offer(cameraId);
363         }
364 
365         @Override
366         public void onPhysicalCameraAvailable(String cameraId, String physicalCameraId) {
367             super.onPhysicalCameraAvailable(cameraId, physicalCameraId);
368             unavailablePhysicalCamEventQueue.remove(new Pair<>(cameraId, physicalCameraId));
369         }
370 
371         @Override
372         public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
373             super.onPhysicalCameraUnavailable(cameraId, physicalCameraId);
374             unavailablePhysicalCamEventQueue.offer(new Pair<>(cameraId, physicalCameraId));
375         }
376     };
377 
378     static class VideoRecordingObject {
379         private static final int INVALID_FRAME_RATE = -1;
380 
381         public String recordedOutputPath;
382         public String quality;
383         public Size videoSize;
384         public int videoFrameRate; // -1 implies video framerate was not set by the test
385         public int fileFormat;
386         public double zoomRatio;
387         public Map<String, String> metadata = new HashMap<>();
388         public List<RecordingResult> perFrameCaptureResults;
389 
VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize, int videoFrameRate, int fileFormat, double zoomRatio, List<RecordingResult> perFrameCaptureResults)390         public VideoRecordingObject(String recordedOutputPath,
391                 String quality, Size videoSize, int videoFrameRate,
392                 int fileFormat, double zoomRatio,
393                 List<RecordingResult> perFrameCaptureResults) {
394             this.recordedOutputPath = recordedOutputPath;
395             this.quality = quality;
396             this.videoSize = videoSize;
397             this.videoFrameRate = videoFrameRate;
398             this.fileFormat = fileFormat;
399             this.zoomRatio = zoomRatio;
400             this.perFrameCaptureResults = perFrameCaptureResults;
401         }
402 
VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize, int fileFormat, double zoomRatio, List<RecordingResult> perFrameCaptureResults)403         VideoRecordingObject(String recordedOutputPath, String quality, Size videoSize,
404                 int fileFormat, double zoomRatio,
405                 List<RecordingResult> perFrameCaptureResults) {
406             this(recordedOutputPath, quality, videoSize,
407                 INVALID_FRAME_RATE, fileFormat, zoomRatio,
408                 perFrameCaptureResults);
409         }
410 
isFrameRateValid()411         public boolean isFrameRateValid() {
412             return videoFrameRate != INVALID_FRAME_RATE;
413         }
414     }
415 
416     // For capturing motion sensor traces.
417     private SensorManager mSensorManager = null;
418     private Sensor mAccelSensor = null;
419     private Sensor mMagSensor = null;
420     private Sensor mGyroSensor = null;
421     private Sensor mRotationVector = null;
422     private volatile LinkedList<MySensorEvent> mEvents = null;
423     private final Object mEventLock = new Object();
424     private volatile boolean mEventsEnabled = false;
425     private HandlerThread mSensorThread = null;
426     private Handler mSensorHandler = null;
427 
428     private SensorPrivacyManager mSensorPrivacyManager;
429 
430     // Camera test instrumentation
431     private CameraTestInstrumentation mCameraInstrumentation;
432     // Camera PerformanceTest metric
433     private final ArrayList<Metric> mResults = new ArrayList<>();
434 
435     private static final int SERIALIZER_SURFACES_ID = 2;
436     private static final int SERIALIZER_PHYSICAL_METADATA_ID = 3;
437 
438     public interface CaptureCallback {
onCaptureAvailable(Image capture, String physicalCameraId)439         void onCaptureAvailable(Image capture, String physicalCameraId);
440     }
441 
442     public abstract static class CaptureResultListener extends
443             CameraCaptureSession.CaptureCallback {}
444     public abstract static class ExtensionCaptureResultListener extends
445             CameraExtensionSession.ExtensionCaptureCallback {}
446 
447     @Override
onBind(Intent intent)448     public IBinder onBind(Intent intent) {
449         return null;
450     }
451 
452     @Override
onCreate()453     public void onCreate() {
454         try {
455             mThreadExitFlag = false;
456 
457             // Get handle to camera manager.
458             mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
459             if (mCameraManager == null) {
460                 throw new ItsException("Failed to connect to camera manager");
461             }
462             mBlockingCameraManager = new ItsService.BlockingCameraManager(mCameraManager);
463             mCameraListener = new BlockingStateCallback();
464 
465             // Register for motion events.
466             mEvents = new LinkedList<>();
467             mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
468             mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
469             mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
470             mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
471             mRotationVector = mSensorManager.getDefaultSensor(Sensor.TYPE_ROTATION_VECTOR);
472             mSensorThread = new HandlerThread("SensorThread");
473             mSensorThread.start();
474             mSensorHandler = new Handler(mSensorThread.getLooper());
475             mSensorManager.registerListener(this, mAccelSensor,
476                     /*100Hz*/ 10000, mSensorHandler);
477             mSensorManager.registerListener(this, mMagSensor,
478                     SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler);
479             mSensorManager.registerListener(this, mGyroSensor,
480                     SensorManager.SENSOR_DELAY_FASTEST, mSensorHandler);
481             mSensorManager.registerListener(this, mRotationVector,
482                     SensorManager.SENSOR_DELAY_FASTEST, mSensorHandler);
483 
484             // Get a handle to the system vibrator.
485             mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE);
486 
487             // Create threads to receive images and save them.
488             for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
489                 mSaveThreads[i] = new HandlerThread("SaveThread" + i);
490                 mSaveThreads[i].start();
491                 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper());
492             }
493 
494             // Create a thread to handle object serialization.
495             (new Thread(new SerializerRunnable())).start();
496 
497             // Create a thread to receive capture results and process them.
498             mResultThread = new HandlerThread("ResultThread");
499             mResultThread.start();
500             mResultHandler = new Handler(mResultThread.getLooper());
501 
502             // Create a thread for the camera device.
503             mCameraThread = new HandlerThread("ItsCameraThread");
504             mCameraThread.start();
505             mCameraHandler = new Handler(mCameraThread.getLooper());
506 
507             // Create a thread to process commands, listening on a TCP socket.
508             mSocketRunnableObj = new SocketRunnable();
509             (new Thread(mSocketRunnableObj)).start();
510         } catch (ItsException e) {
511             Logt.e(TAG, "Service failed to start: ", e);
512         }
513 
514         NotificationManager notificationManager =
515                 (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
516         mChannel = new NotificationChannel(
517                 "ItsServiceChannel", "ItsService", NotificationManager.IMPORTANCE_LOW);
518         // Configure the notification channel.
519         mChannel.setDescription("ItsServiceChannel");
520         mChannel.enableVibration(false);
521         notificationManager.createNotificationChannel(mChannel);
522 
523         mSensorPrivacyManager = getSystemService(SensorPrivacyManager.class);
524     }
525 
526     @Override
onStartCommand(Intent intent, int flags, int startId)527     public int onStartCommand(Intent intent, int flags, int startId) {
528         try {
529             // Just log a message indicating that the service is running and is able to accept
530             // socket connections.
531             while (!mThreadExitFlag && mSocket==null) {
532                 Thread.sleep(1);
533             }
534 
535             if (intent != null && intent.hasExtra(ItsTestActivity.JCA_CAPTURE_PATH_TAG)) {
536                 try {
537                     mSocketRunnableObj.sendResponse(ItsTestActivity.JCA_CAPTURE_STATUS_TAG,
538                             Integer.toString(intent.getIntExtra(
539                                     ItsTestActivity.JCA_CAPTURE_STATUS_TAG,
540                                     Activity.RESULT_CANCELED)
541                             )
542                     );
543                     mSocketRunnableObj.sendResponse(
544                             ItsTestActivity.JCA_CAPTURE_PATH_TAG,
545                             intent.getStringExtra(ItsTestActivity.JCA_CAPTURE_PATH_TAG));
546                 } catch (ItsException e) {
547                     Logt.e(TAG, "Error sending JCA capture path and status", e);
548                 }
549                 return START_STICKY;
550             }
551 
552             if (!mThreadExitFlag){
553                 Logt.i(TAG, "ItsService ready");
554             } else {
555                 Logt.e(TAG, "Starting ItsService in bad state");
556             }
557 
558             Notification notification = new Notification.Builder(this, mChannel.getId())
559                     .setContentTitle("CameraITS Service")
560                     .setContentText("CameraITS Service is running")
561                     .setSmallIcon(R.drawable.icon)
562                     .setOngoing(true).build();
563             startForeground(SERVICE_NOTIFICATION_ID, notification,
564                     ServiceInfo.FOREGROUND_SERVICE_TYPE_CAMERA);
565         } catch (java.lang.InterruptedException e) {
566             Logt.e(TAG, "Error starting ItsService (interrupted)", e);
567         }
568         return START_STICKY;
569     }
570 
571     @Override
onDestroy()572     public void onDestroy() {
573         mThreadExitFlag = true;
574         for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
575             if (mSaveThreads[i] != null) {
576                 mSaveThreads[i].quit();
577                 mSaveThreads[i] = null;
578             }
579         }
580         if (mSensorThread != null) {
581             mSensorThread.quitSafely();
582             mSensorThread = null;
583         }
584         if (mResultThread != null) {
585             mResultThread.quitSafely();
586             mResultThread = null;
587         }
588         if (mCameraThread != null) {
589             mCameraThread.quitSafely();
590             mCameraThread = null;
591         }
592     }
593 
594     private static class BlockingCameraManager
595             extends com.android.ex.camera2.blocking.BlockingCameraManager {
596 
BlockingCameraManager(CameraManager manager)597         BlockingCameraManager(CameraManager manager) {
598             super(manager);
599         }
600 
openCamera(String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)601         public CameraDevice openCamera(String cameraId, boolean overrideToPortrait,
602                 CameraDevice.StateCallback listener, Handler handler)
603                 throws CameraAccessException, BlockingOpenException {
604             if (handler == null) {
605                 throw new IllegalArgumentException("handler must not be null");
606             } else if (handler.getLooper() == Looper.myLooper()) {
607                 throw new IllegalArgumentException(
608                         "handler's looper must not be the current looper");
609             }
610 
611             return (new OpenListener(mManager, cameraId, overrideToPortrait, listener, handler))
612                     .blockUntilOpen();
613         }
614 
615         protected class OpenListener
616                 extends com.android.ex.camera2.blocking.BlockingCameraManager.OpenListener {
OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)617             OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait,
618                     CameraDevice.StateCallback listener, Handler handler)
619                     throws CameraAccessException {
620                 super(cameraId, listener);
621                 manager.openCamera(cameraId, overrideToPortrait, handler, this);
622             }
623         }
624     }
625 
openCameraDevice(String cameraId, JSONObject cmdObj)626     public void openCameraDevice(String cameraId, JSONObject cmdObj) throws ItsException {
627         Logt.i(TAG, String.format("Opening camera %s", cameraId));
628 
629         // Get initial physical unavailable callbacks without opening camera
630         mCameraManager.registerAvailabilityCallback(ac, mCameraHandler);
631 
632         try {
633             if (mMemoryQuota == -1) {
634                 // Initialize memory quota on this device
635                 if (mItsCameraIdList == null) {
636                     mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager);
637                 }
638                 if (mItsCameraIdList.mCameraIds.size() == 0) {
639                     throw new ItsException("No camera devices");
640                 }
641                 for (String camId : mItsCameraIdList.mCameraIds) {
642                     CameraCharacteristics chars =  mCameraManager.getCameraCharacteristics(camId);
643                     Size maxYuvSize = ItsUtils.getMaxOutputSize(
644                             chars, ImageFormat.YUV_420_888);
645                     // 4 bytes per pixel for RGBA8888 Bitmap and at least 3 Bitmaps per CDD
646                     int quota = maxYuvSize.getWidth() * maxYuvSize.getHeight() * 4 * 3;
647                     if (quota > mMemoryQuota) {
648                         mMemoryQuota = quota;
649                     }
650                 }
651             }
652         } catch (CameraAccessException e) {
653             throw new ItsException("Failed to get device ID list", e);
654         }
655 
656         try {
657             mUnavailablePhysicalCameras = getUnavailablePhysicalCameras(
658                     unavailablePhysicalCamEventQueue, cameraId);
659             Log.i(TAG, "Unavailable cameras:" + List.of(mUnavailablePhysicalCameras.toString()));
660             if (cmdObj.has("overrideToPortrait")) {
661                 mCamera = mBlockingCameraManager.openCamera(cameraId,
662                         cmdObj.getBoolean("overrideToPortrait"), mCameraListener, mCameraHandler);
663             } else {
664                 mCamera = mBlockingCameraManager.openCamera(cameraId, mCameraListener,
665                         mCameraHandler);
666             }
667             mCameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId);
668             mCameraExtensionCharacteristics = mCameraManager.getCameraExtensionCharacteristics(
669                     cameraId);
670             // The camera should be in available->unavailable state.
671             unavailableEventQueue.clear();
672             boolean isLogicalCamera = hasCapability(
673                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA);
674             if (isLogicalCamera) {
675                 Set<String> physicalCameraIds = mCameraCharacteristics.getPhysicalCameraIds();
676                 for (String id : physicalCameraIds) {
677                     if (mUnavailablePhysicalCameras.contains(id)) {
678                         Log.i(TAG, "Physical camera id not available: " + id);
679                         continue;
680                     }
681                     mPhysicalCameraChars.put(id, mCameraManager.getCameraCharacteristics(id));
682                 }
683             }
684             mSocketQueueQuota = new Semaphore(mMemoryQuota, true);
685         } catch (CameraAccessException e) {
686             throw new ItsException("Failed to open camera", e);
687         } catch (BlockingOpenException e) {
688             throw new ItsException("Failed to open camera (after blocking)", e);
689         } catch (org.json.JSONException e) {
690             throw new ItsException("Failed to read open camera command", e);
691         } catch (Exception e) {
692             throw new ItsException("Failed to get unavailable physical cameras", e);
693         }
694         mSocketRunnableObj.sendResponse("cameraOpened", "");
695     }
696 
closeCameraDevice()697     public void closeCameraDevice() throws ItsException {
698         try {
699             if (mSession != null) {
700                 Logt.i(TAG, "Closing session upon closing camera device.");
701                 mSession.close();
702                 mSession = null;
703             }
704             if (mCamera != null) {
705                 Logt.i(TAG, "Closing camera");
706                 mCamera.close();
707                 mCamera = null;
708                 mCameraManager.unregisterAvailabilityCallback(ac);
709                 unavailablePhysicalCamEventQueue.clear();
710             }
711             // Reset OutputConfigurations and ImageReader args
712             mCaptureOutputConfigs = new ArrayList<>();
713             closeImageReaders();
714         } catch (Exception e) {
715             throw new ItsException("Failed to close device");
716         }
717         mSocketRunnableObj.sendResponse("cameraClosed", "");
718     }
719 
720     class SerializerRunnable implements Runnable {
721         // Use a separate thread to perform JSON serialization (since this can be slow due to
722         // the reflection).
723         @Override
run()724         public void run() {
725             Logt.i(TAG, "Serializer thread starting");
726             while (! mThreadExitFlag) {
727                 try {
728                     Object[] objs = mSerializerQueue.take();
729                     JSONObject jsonObj = new JSONObject();
730                     String tag = null;
731                     for (int i = 0; i < objs.length; i++) {
732                         Object obj = objs[i];
733                         if (obj instanceof String) {
734                             if (tag != null) {
735                                 throw new ItsException("Multiple tags for socket response");
736                             }
737                             tag = (String)obj;
738                         } else if (obj instanceof CameraCharacteristics) {
739                             jsonObj.put("cameraProperties", ItsSerializer.serialize(
740                                     (CameraCharacteristics)obj));
741                         } else if (obj instanceof CaptureRequest) {
742                             jsonObj.put("captureRequest", ItsSerializer.serialize(
743                                     (CaptureRequest)obj));
744                         } else if (obj instanceof CaptureResult) {
745                             jsonObj.put("captureResult", ItsSerializer.serialize(
746                                     (CaptureResult)obj));
747                         } else if (obj instanceof JSONArray) {
748                             if (tag == "captureResults") {
749                                 if (i == SERIALIZER_SURFACES_ID) {
750                                     jsonObj.put("outputs", (JSONArray)obj);
751                                 } else if (i == SERIALIZER_PHYSICAL_METADATA_ID) {
752                                     jsonObj.put("physicalResults", (JSONArray)obj);
753                                 } else {
754                                     throw new ItsException(
755                                             "Unsupported JSONArray for captureResults");
756                                 }
757                             } else {
758                                 jsonObj.put("outputs", (JSONArray)obj);
759                             }
760                         } else {
761                             throw new ItsException("Invalid object received for serialization");
762                         }
763                     }
764                     if (tag == null) {
765                         throw new ItsException("No tag provided for socket response");
766                     }
767                     mSocketRunnableObj.sendResponse(tag, null, jsonObj, null);
768                     Logt.i(TAG, String.format("Serialized %s", tag));
769                 } catch (JSONException | ItsException e) {
770                     Logt.e(TAG, "Error serializing object", e);
771                     break;
772                 } catch (java.lang.InterruptedException e) {
773                     Logt.e(TAG, "Error serializing object (interrupted)", e);
774                     break;
775                 }
776             }
777             Logt.i(TAG, "Serializer thread terminated");
778         }
779     }
780 
781     class SocketWriteRunnable implements Runnable {
782 
783         // Use a separate thread to service a queue of objects to be written to the socket,
784         // writing each sequentially in order. This is needed since different handler functions
785         // (called on different threads) will need to send data back to the host script.
786 
787         public Socket mOpenSocket = null;
788         private Thread mThread = null;
789 
SocketWriteRunnable(Socket openSocket)790         public SocketWriteRunnable(Socket openSocket) {
791             mOpenSocket = openSocket;
792         }
793 
setOpenSocket(Socket openSocket)794         public void setOpenSocket(Socket openSocket) {
795             mOpenSocket = openSocket;
796         }
797 
798         @Override
run()799         public void run() {
800             Logt.i(TAG, "Socket writer thread starting");
801             while (true) {
802                 try {
803                     ByteBuffer b = mSocketWriteQueue.take();
804                     synchronized(mSocketWriteDrainLock) {
805                         if (mOpenSocket == null) {
806                             Logt.e(TAG, "No open socket connection!");
807                             continue;
808                         }
809                         if (b.hasArray()) {
810                             mOpenSocket.getOutputStream().write(b.array(), 0, b.capacity());
811                         } else {
812                             byte[] barray = new byte[b.capacity()];
813                             b.get(barray);
814                             mOpenSocket.getOutputStream().write(barray);
815                         }
816                         mOpenSocket.getOutputStream().flush();
817                         Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity()));
818                         Integer imgBufSize = mInflightImageSizes.peek();
819                         if (imgBufSize != null && imgBufSize == b.capacity()) {
820                             mInflightImageSizes.removeFirst();
821                             if (mSocketQueueQuota != null) {
822                                 mSocketQueueQuota.release(imgBufSize);
823                             }
824                         }
825                     }
826                 } catch (IOException e) {
827                     Logt.e(TAG, "Error writing to socket", e);
828                     mOpenSocket = null;
829                     break;
830                 } catch (java.lang.InterruptedException e) {
831                     Logt.e(TAG, "Error writing to socket (interrupted)", e);
832                     mOpenSocket = null;
833                     break;
834                 }
835             }
836             Logt.i(TAG, "Socket writer thread terminated");
837         }
838 
checkAndStartThread()839         public synchronized void checkAndStartThread() {
840             if (mThread == null || mThread.getState() == Thread.State.TERMINATED) {
841                 mThread = new Thread(this);
842             }
843             if (mThread.getState() == Thread.State.NEW) {
844                 mThread.start();
845             }
846         }
847 
848     }
849 
850     class SocketRunnable implements Runnable {
851 
852         // Format of sent messages (over the socket):
853         // * Serialized JSON object on a single line (newline-terminated)
854         // * For byte buffers, the binary data then follows
855         //
856         // Format of received messages (from the socket):
857         // * Serialized JSON object on a single line (newline-terminated)
858 
859         private Socket mOpenSocket = null;
860 
861         @Override
run()862         public void run() {
863             Logt.i(TAG, "Socket thread starting");
864             try {
865                 mSocket = new ServerSocket(SERVERPORT);
866             } catch (IOException e) {
867                 Logt.e(TAG, "Failed to create socket", e);
868             }
869 
870             // Create a new thread to handle writes to this socket.
871             SocketWriteRunnable socketWriteRunnable = new SocketWriteRunnable(null);
872 
873             while (!mThreadExitFlag) {
874                 // Receive the socket-open request from the host.
875                 try {
876                     Logt.i(TAG, "Waiting for client to connect to socket");
877                     if (mSocket == null) {
878                         Logt.e(TAG, "mSocket is null.");
879                         break;
880                     }
881                     mOpenSocket = mSocket.accept();
882                     if (mOpenSocket == null) {
883                         Logt.e(TAG, "Socket connection error");
884                         break;
885                     }
886                     mSocketWriteQueue.clear();
887                     mInflightImageSizes.clear();
888                     socketWriteRunnable.setOpenSocket(mOpenSocket);
889                     socketWriteRunnable.checkAndStartThread();
890                     Logt.i(TAG, "Socket connected");
891                 } catch (IOException e) {
892                     Logt.e(TAG, "Socket open error: ", e);
893                     break;
894                 }
895 
896                 // Process commands over the open socket.
897                 while (!mThreadExitFlag) {
898                     try {
899                         BufferedReader input = new BufferedReader(
900                                 new InputStreamReader(mOpenSocket.getInputStream()));
901                         if (input == null) {
902                             Logt.e(TAG, "Failed to get socket input stream");
903                             break;
904                         }
905                         String line = input.readLine();
906                         if (line == null) {
907                             Logt.i(TAG, "Socket readline returned null (host disconnected)");
908                             break;
909                         }
910                         processSocketCommand(line);
911                     } catch (IOException e) {
912                         Logt.e(TAG, "Socket read error: ", e);
913                         break;
914                     } catch (ItsException e) {
915                         Logt.e(TAG, "Script error: ", e);
916                         break;
917                     }
918                 }
919 
920                 // Close socket and go back to waiting for a new connection.
921                 try {
922                     synchronized(mSocketWriteDrainLock) {
923                         mSocketWriteQueue.clear();
924                         mInflightImageSizes.clear();
925                         mOpenSocket.close();
926                         mOpenSocket = null;
927                         socketWriteRunnable.setOpenSocket(null);
928                         Logt.i(TAG, "Socket disconnected");
929                     }
930                 } catch (java.io.IOException e) {
931                     Logt.e(TAG, "Exception closing socket");
932                 }
933             }
934 
935             // It's an overall error state if the code gets here; no recevery.
936             // Try to do some cleanup, but the service probably needs to be restarted.
937             Logt.i(TAG, "Socket server loop exited");
938             mThreadExitFlag = true;
939             try {
940                 synchronized(mSocketWriteDrainLock) {
941                     if (mOpenSocket != null) {
942                         mOpenSocket.close();
943                         mOpenSocket = null;
944                         socketWriteRunnable.setOpenSocket(null);
945                     }
946                 }
947             } catch (java.io.IOException e) {
948                 Logt.w(TAG, "Exception closing socket");
949             }
950             try {
951                 if (mSocket != null) {
952                     mSocket.close();
953                     mSocket = null;
954                 }
955             } catch (java.io.IOException e) {
956                 Logt.w(TAG, "Exception closing socket");
957             }
958         }
959 
processSocketCommand(String cmd)960         public void processSocketCommand(String cmd)
961                 throws ItsException {
962             // Default locale must be set to "en-us"
963             Locale locale = Locale.getDefault();
964             if (!Locale.US.equals(locale)) {
965                 Logt.e(TAG, "Default language is not set to " + Locale.US + "!");
966                 stopSelf();
967             }
968 
969             // Each command is a serialized JSON object.
970             try {
971                 JSONObject cmdObj = new JSONObject(cmd);
972                 Logt.i(TAG, "Start processing command: " + cmdObj.getString("cmdName"));
973                 if ("open".equals(cmdObj.getString("cmdName"))) {
974                     String cameraId = cmdObj.getString("cameraId");
975                     openCameraDevice(cameraId, cmdObj);
976                 } else if ("close".equals(cmdObj.getString("cmdName"))) {
977                     closeCameraDevice();
978                 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) {
979                     doGetProps();
980                 } else if ("getCameraPropertiesById".equals(cmdObj.getString("cmdName"))) {
981                     doGetPropsById(cmdObj);
982                 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) {
983                     doStartSensorEvents();
984                 } else if ("checkSensorExistence".equals(cmdObj.getString("cmdName"))) {
985                     doCheckSensorExistence();
986                 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) {
987                     doGetSensorEvents();
988                 } else if ("do3A".equals(cmdObj.getString("cmdName"))) {
989                     do3A(cmdObj);
990                 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) {
991                     doCapture(cmdObj);
992                 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) {
993                     doVibrate(cmdObj);
994                 } else if ("setAudioRestriction".equals(cmdObj.getString("cmdName"))) {
995                     doSetAudioRestriction(cmdObj);
996                 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) {
997                     doGetCameraIds();
998                 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) {
999                     doReprocessCapture(cmdObj);
1000                 } else if ("getItsVersion".equals(cmdObj.getString("cmdName"))) {
1001                     mSocketRunnableObj.sendResponse("ItsVersion", ITS_SERVICE_VERSION);
1002                 } else if ("isStreamCombinationSupported".equals(cmdObj.getString("cmdName"))) {
1003                     doCheckStreamCombination(cmdObj);
1004                 } else if ("getCameraSessionProperties".equals(cmdObj.getString("cmdName"))) {
1005                     doGetSessionProps(cmdObj);
1006                 } else if ("isCameraPrivacyModeSupported".equals(cmdObj.getString("cmdName"))) {
1007                     doCheckCameraPrivacyModeSupport();
1008                 } else if ("isPrimaryCamera".equals(cmdObj.getString("cmdName"))) {
1009                     String cameraId = cmdObj.getString("cameraId");
1010                     doCheckPrimaryCamera(cameraId);
1011                 } else if ("isPerformanceClass".equals(cmdObj.getString("cmdName"))) {
1012                     doCheckPerformanceClass();
1013                 } else if ("isVicPerformanceClass".equals(cmdObj.getString("cmdName"))) {
1014                     doCheckVicPerformanceClass();
1015                 } else if ("measureCameraLaunchMs".equals(cmdObj.getString("cmdName"))) {
1016                     String cameraId = cmdObj.getString("cameraId");
1017                     doMeasureCameraLaunchMs(cameraId);
1018                 } else if ("measureCamera1080pJpegCaptureMs".equals(cmdObj.getString("cmdName"))) {
1019                     String cameraId = cmdObj.getString("cameraId");
1020                     doMeasureCamera1080pJpegCaptureMs(cameraId);
1021                 } else if ("getSupportedVideoQualities".equals(cmdObj.getString("cmdName"))) {
1022                     String cameraId = cmdObj.getString("cameraId");
1023                     doGetSupportedVideoQualities(cameraId);
1024                 } else if ("doGetSupportedVideoSizesCapped".equals(cmdObj.getString("cmdName"))) {
1025                     String cameraId = cmdObj.getString("cameraId");
1026                     doGetSupportedVideoSizesCapped(cameraId);
1027                 } else if ("getSupportedPreviewSizes".equals(cmdObj.getString("cmdName"))) {
1028                     doGetSupportedPreviewSizes();
1029                 } else if ("getQueryableStreamCombinations".equals(cmdObj.getString("cmdName"))) {
1030                     doGetQueryableStreamCombinations();
1031                 } else if ("getSupportedExtensions".equals(cmdObj.getString("cmdName"))) {
1032                     String cameraId = cmdObj.getString("cameraId");
1033                     doGetSupportedExtensions(cameraId);
1034                 } else if ("getSupportedExtensionSizes".equals(cmdObj.getString("cmdName"))) {
1035                     String cameraId = cmdObj.getString("cameraId");
1036                     int extension = cmdObj.getInt("extension");
1037                     int format = cmdObj.getInt("format");
1038                     doGetSupportedExtensionSizes(cameraId, extension, format);
1039                 } else if ("getSupportedExtensionPreviewSizes"
1040                         .equals(cmdObj.getString("cmdName"))) {
1041                     String cameraId = cmdObj.getString("cameraId");
1042                     int extension = cmdObj.getInt("extension");
1043                     doGetSupportedExtensionPreviewSizes(cameraId, extension);
1044                 } else if ("doBasicRecording".equals(cmdObj.getString("cmdName"))) {
1045                     String cameraId = cmdObj.getString("cameraId");
1046                     int profileId = cmdObj.getInt("profileId");
1047                     String quality = cmdObj.getString("quality");
1048                     int recordingDuration = cmdObj.getInt("recordingDuration");
1049                     int videoStabilizationMode = cmdObj.getInt("videoStabilizationMode");
1050                     boolean hlg10Enabled = cmdObj.getBoolean("hlg10Enabled");
1051                     double zoomRatio = cmdObj.optDouble("zoomRatio");
1052                     int aeTargetFpsMin = cmdObj.optInt("aeTargetFpsMin");
1053                     int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax");
1054                     doBasicRecording(cameraId, profileId, quality, recordingDuration,
1055                             videoStabilizationMode, hlg10Enabled, zoomRatio,
1056                             aeTargetFpsMin, aeTargetFpsMax);
1057                 } else if ("doStaticPreviewRecording".equals(cmdObj.getString("cmdName"))) {
1058                     doStaticPreviewRecording(cmdObj);
1059                 } else if ("doDynamicZoomPreviewRecording".equals(
1060                         cmdObj.getString("cmdName"))) {
1061                     doDynamicZoomPreviewRecording(cmdObj);
1062                 } else if ("doDynamicMeteringRegionPreviewRecording".equals(
1063                         cmdObj.getString("cmdName"))) {
1064                     doDynamicMeteringRegionPreviewRecording(cmdObj);
1065                 } else if ("isHLG10SupportedForProfile".equals(cmdObj.getString("cmdName"))) {
1066                     String cameraId = cmdObj.getString("cameraId");
1067                     int profileId = cmdObj.getInt("profileId");
1068                     doCheckHLG10SupportForProfile(cameraId, profileId);
1069                 } else if ("isHLG10SupportedForSizeAndFps".equals(cmdObj.getString("cmdName"))) {
1070                     String cameraId = cmdObj.getString("cameraId");
1071                     String videoSize = cmdObj.getString("videoSize");
1072                     int maxFps = cmdObj.getInt("maxFps");
1073                     doCheckHLG10SupportForSizeAndFps(cameraId, videoSize, maxFps);
1074                 } else if ("isP3Supported".equals(cmdObj.getString("cmdName"))) {
1075                     String cameraId = cmdObj.getString("cameraId");
1076                     doCheckP3Support(cameraId);
1077                 } else if ("isLandscapeToPortraitEnabled".equals(cmdObj.getString("cmdName"))) {
1078                     doCheckLandscapeToPortraitEnabled();
1079                 } else if ("doCaptureWithFlash".equals(cmdObj.getString("cmdName"))) {
1080                     doCaptureWithFlash(cmdObj);
1081                 } else if ("doGetUnavailablePhysicalCameras".equals(cmdObj.getString("cmdName"))) {
1082                     doGetUnavailablePhysicalCameras();
1083                 } else if ("doCaptureWithExtensions".equals(cmdObj.getString("cmdName"))) {
1084                     int extension = cmdObj.getInt("extension");
1085                     doCaptureWithExtensions(cmdObj, extension);
1086                 } else if ("getDisplaySize".equals(cmdObj.getString("cmdName"))) {
1087                     doGetDisplaySize();
1088                 } else if ("getMaxCamcorderProfileSize".equals(cmdObj.getString("cmdName"))) {
1089                     String cameraId = cmdObj.getString("cameraId");
1090                     doGetMaxCamcorderProfileSize(cameraId);
1091                 } else if ("getAvailablePhysicalCameraProperties".equals(cmdObj.getString("cmdName"))) {
1092                     doGetAvailablePhysicalCameraProperties();
1093                 } else if ("isLowLightBoostAvailable".equals(cmdObj.getString("cmdName"))) {
1094                     String cameraId = cmdObj.getString("cameraId");
1095                     int extension = cmdObj.getInt("extension");
1096                     doCheckLowLightBoostAvailable(cameraId, extension);
1097                 } else if ("doCapturePreviewFrame".equals(cmdObj.getString("cmdName"))) {
1098                     doCapturePreviewFrame(cmdObj);
1099                 } else if ("doGetDefaultCameraPkgName".equals(cmdObj.getString("cmdName"))) {
1100                     doGetDefaultCameraPkgName();
1101                 } else if ("doGainMapCheck".equals(cmdObj.getString("cmdName"))) {
1102                     doGainMapCheck(cmdObj);
1103                 } else {
1104                     throw new ItsException("Unknown command: " + cmd);
1105                 }
1106                 Logt.i(TAG, "Finish processing command" + cmdObj.getString("cmdName"));
1107             } catch (org.json.JSONException e) {
1108                 Logt.e(TAG, "Invalid command: ", e);
1109             }
1110         }
1111 
sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)1112         public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)
1113                 throws ItsException {
1114             try {
1115                 JSONObject jsonObj = new JSONObject();
1116                 jsonObj.put("tag", tag);
1117                 if (str != null) {
1118                     jsonObj.put("strValue", str);
1119                 }
1120                 if (obj != null) {
1121                     jsonObj.put("objValue", obj);
1122                 }
1123                 if (bbuf != null) {
1124                     jsonObj.put("bufValueSize", bbuf.capacity());
1125                 }
1126                 ByteBuffer bstr = ByteBuffer.wrap(
1127                         (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset()));
1128                 synchronized(mSocketWriteEnqueueLock) {
1129                     if (bstr != null) {
1130                         mSocketWriteQueue.put(bstr);
1131                     }
1132                     if (bbuf != null) {
1133                         mInflightImageSizes.add(bbuf.capacity());
1134                         mSocketWriteQueue.put(bbuf);
1135                     }
1136                 }
1137             } catch (org.json.JSONException e) {
1138                 throw new ItsException("JSON error: ", e);
1139             } catch (java.lang.InterruptedException e) {
1140                 throw new ItsException("Socket error: ", e);
1141             }
1142         }
1143 
sendResponse(String tag, String str)1144         public void sendResponse(String tag, String str)
1145                 throws ItsException {
1146             sendResponse(tag, str, null, null);
1147         }
1148 
sendResponse(String tag, JSONObject obj)1149         public void sendResponse(String tag, JSONObject obj)
1150                 throws ItsException {
1151             sendResponse(tag, null, obj, null);
1152         }
1153 
sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)1154         public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)
1155                 throws ItsException {
1156             sendResponse(tag, null, null, bbuf);
1157         }
1158 
sendResponse(LinkedList<MySensorEvent> events)1159         public void sendResponse(LinkedList<MySensorEvent> events)
1160                 throws ItsException {
1161             Logt.i(TAG, "Sending " + events.size() + " sensor events");
1162             try {
1163                 JSONArray accels = new JSONArray();
1164                 JSONArray mags = new JSONArray();
1165                 JSONArray gyros = new JSONArray();
1166                 JSONArray rvs = new JSONArray();
1167                 for (MySensorEvent event : events) {
1168                     JSONObject obj = new JSONObject();
1169                     if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
1170                         float[] mRotationMatrix = new float[16];
1171                         float[] orientationVals = new float[3];
1172                         SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values);
1173                         SensorManager.getOrientation(mRotationMatrix, orientationVals);
1174                         orientationVals[0] = (float) Math.toDegrees(orientationVals[0]);
1175                         orientationVals[1] = (float) Math.toDegrees(orientationVals[1]);
1176                         orientationVals[2] = (float) Math.toDegrees(orientationVals[2]);
1177                         obj.put("time", event.timestamp);
1178                         obj.put("x", orientationVals[0]);
1179                         obj.put("y", orientationVals[1]);
1180                         obj.put("z", orientationVals[2]);
1181                     } else {
1182                         obj.put("time", event.timestamp);
1183                         obj.put("x", event.values[0]);
1184                         obj.put("y", event.values[1]);
1185                         obj.put("z", event.values[2]);
1186                     }
1187                     if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
1188                         accels.put(obj);
1189                     } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
1190                         mags.put(obj);
1191                     } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
1192                         gyros.put(obj);
1193                     } else if (event.sensor.getType() == Sensor.TYPE_ROTATION_VECTOR) {
1194                         rvs.put(obj);
1195                     }
1196                 }
1197                 JSONObject obj = new JSONObject();
1198                 obj.put("accel", accels);
1199                 obj.put("mag", mags);
1200                 obj.put("gyro", gyros);
1201                 obj.put("rv", rvs);
1202                 sendResponse("sensorEvents", null, obj, null);
1203             } catch (org.json.JSONException e) {
1204                 throw new ItsException("JSON error: ", e);
1205             }
1206             Logt.i(TAG, "Sent sensor events");
1207         }
1208 
sendResponse(CameraCharacteristics props)1209         public void sendResponse(CameraCharacteristics props)
1210                 throws ItsException {
1211             try {
1212                 Object objs[] = new Object[2];
1213                 objs[0] = "cameraProperties";
1214                 objs[1] = props;
1215                 mSerializerQueue.put(objs);
1216             } catch (InterruptedException e) {
1217                 throw new ItsException("Interrupted: ", e);
1218             }
1219         }
1220 
sendResponse(String tag, HashMap<String, CameraCharacteristics> props)1221         public void sendResponse(String tag, HashMap<String, CameraCharacteristics> props)
1222                 throws ItsException {
1223             try {
1224                 JSONArray jsonSurfaces = new JSONArray();
1225                 for (String s : props.keySet()) {
1226                     JSONObject jsonSurface = new JSONObject();
1227                     jsonSurface.put(s, ItsSerializer.serialize(props.get(s)));
1228                     jsonSurfaces.put(jsonSurface);
1229                 }
1230                 Object objs[] = new Object[2];
1231                 objs[0] = "availablePhysicalCameraProperties";
1232                 objs[1] = jsonSurfaces;
1233                 mSerializerQueue.put(objs);
1234             } catch (Exception e) {
1235                 throw new ItsException("Interrupted: ", e);
1236             }
1237         }
1238 
sendVideoRecordingObject(VideoRecordingObject obj)1239         public void sendVideoRecordingObject(VideoRecordingObject obj)
1240                 throws ItsException {
1241             try {
1242                 JSONObject videoJson = new JSONObject();
1243                 videoJson.put("recordedOutputPath", obj.recordedOutputPath);
1244                 videoJson.put("quality", obj.quality);
1245                 if (obj.isFrameRateValid()) {
1246                     videoJson.put("videoFrameRate", obj.videoFrameRate);
1247                 }
1248                 videoJson.put("videoSize", obj.videoSize);
1249                 JSONObject metadata = new JSONObject();
1250                 for (Map.Entry<String, String> entry : obj.metadata.entrySet()) {
1251                     metadata.put(entry.getKey(), entry.getValue());
1252                 }
1253                 videoJson.put("metadata", metadata);
1254                 JSONArray captureMetadata = new JSONArray();
1255                 for (RecordingResult r : obj.perFrameCaptureResults) {
1256                     captureMetadata.put(ItsSerializer.serialize(r));
1257                 }
1258                 videoJson.put("captureMetadata", captureMetadata);
1259                 sendResponse("recordingResponse", null, videoJson, null);
1260             } catch (org.json.JSONException e) {
1261                 throw new ItsException("JSON error: ", e);
1262             }
1263         }
1264 
sendResponseCaptureResult( CaptureRequest request, TotalCaptureResult result, ImageReader[] readers)1265         public void sendResponseCaptureResult(
1266                 CaptureRequest request, TotalCaptureResult result, ImageReader[] readers)
1267                 throws ItsException {
1268             try {
1269                 JSONArray jsonSurfaces = new JSONArray();
1270                 for (int i = 0; i < readers.length; i++) {
1271                     JSONObject jsonSurface = new JSONObject();
1272                     jsonSurface.put("width", readers[i].getWidth());
1273                     jsonSurface.put("height", readers[i].getHeight());
1274 
1275                     CameraCharacteristics cameraCharacteristics = mCameraCharacteristics;
1276                     String physicalCameraId = mPhysicalStreamMap.get(i);
1277                     if (physicalCameraId != null && !physicalCameraId.isEmpty()) {
1278                         cameraCharacteristics = mPhysicalCameraChars.get(physicalCameraId);
1279                     }
1280 
1281                     int format = readers[i].getImageFormat();
1282                     if (format == ImageFormat.RAW_SENSOR) {
1283                         if (mCaptureRawIsStats) {
1284                             Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
1285                                 cameraCharacteristics, false);
1286                             int aaw = activeArrayCropRegion.width();
1287                             int aah = activeArrayCropRegion.height();
1288                             jsonSurface.put("format", "rawStats");
1289                             jsonSurface.put("width", aaw / mCaptureStatsGridWidth);
1290                             jsonSurface.put("height", aah / mCaptureStatsGridHeight);
1291                         } else if (mCaptureRawIsQuadBayerStats) {
1292                             Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
1293                                 cameraCharacteristics, true);
1294                             int aaw = activeArrayCropRegion.width();
1295                             int aah = activeArrayCropRegion.height();
1296                             jsonSurface.put("format", "rawQuadBayerStats");
1297                             jsonSurface.put("width", aaw / mCaptureStatsGridWidth);
1298                             jsonSurface.put("height", aah / mCaptureStatsGridHeight);
1299                         } else if (mCaptureRawIsQuadBayer) {
1300                             jsonSurface.put("format", "rawQuadBayer");
1301                         } else if (mCaptureRawIsDng) {
1302                             jsonSurface.put("format", "dng");
1303                         } else {
1304                             jsonSurface.put("format", "raw");
1305                         }
1306                     } else if (format == ImageFormat.RAW10) {
1307                         if (mCaptureRawIsStats) {
1308                             Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
1309                                 cameraCharacteristics, false);
1310                             int aaw = activeArrayCropRegion.width();
1311                             int aah = activeArrayCropRegion.height();
1312                             jsonSurface.put("format", "raw10Stats");
1313                             jsonSurface.put("width", aaw / mCaptureStatsGridWidth);
1314                             jsonSurface.put("height", aah / mCaptureStatsGridHeight);
1315                         } else if (mCaptureRawIsQuadBayerStats) {
1316                             Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
1317                                 cameraCharacteristics, true);
1318                             int aaw = activeArrayCropRegion.width();
1319                             int aah = activeArrayCropRegion.height();
1320                             jsonSurface.put("format", "raw10QuadBayerStats");
1321                             jsonSurface.put("width", aaw / mCaptureStatsGridWidth);
1322                             jsonSurface.put("height", aah / mCaptureStatsGridHeight);
1323                         } else if (mCaptureRawIsQuadBayer) {
1324                             jsonSurface.put("format", "raw10QuadBayer");
1325                         } else {
1326                             jsonSurface.put("format", "raw10");
1327                         }
1328                     } else if (format == ImageFormat.RAW12) {
1329                         jsonSurface.put("format", "raw12");
1330                     } else if (format == ImageFormat.JPEG) {
1331                         jsonSurface.put("format", "jpeg");
1332                     } else if (format == ImageFormat.JPEG_R) {
1333                         jsonSurface.put("format", JPEG_R_FMT);
1334                     } else if (format == ImageFormat.PRIVATE) {
1335                         jsonSurface.put("format", "priv");
1336                     } else if (format == ImageFormat.YUV_420_888) {
1337                         jsonSurface.put("format", "yuv");
1338                     } else if (format == ImageFormat.Y8) {
1339                         jsonSurface.put("format", "y8");
1340                     } else {
1341                         throw new ItsException("Invalid format");
1342                     }
1343                     jsonSurfaces.put(jsonSurface);
1344                 }
1345 
1346                 Map<String, CaptureResult> physicalMetadata =
1347                     result.getPhysicalCameraResults();
1348                 JSONArray jsonPhysicalMetadata = new JSONArray();
1349                 for (Map.Entry<String, CaptureResult> pair : physicalMetadata.entrySet()) {
1350                     JSONObject jsonOneMetadata = new JSONObject();
1351                     jsonOneMetadata.put(pair.getKey(), ItsSerializer.serialize(pair.getValue()));
1352                     jsonPhysicalMetadata.put(jsonOneMetadata);
1353                 }
1354                 Object objs[] = new Object[4];
1355                 objs[0] = "captureResults";
1356                 objs[1] = result;
1357                 objs[SERIALIZER_SURFACES_ID] = jsonSurfaces;
1358                 objs[SERIALIZER_PHYSICAL_METADATA_ID] = jsonPhysicalMetadata;
1359                 mSerializerQueue.put(objs);
1360             } catch (org.json.JSONException e) {
1361                 throw new ItsException("JSON error: ", e);
1362             } catch (InterruptedException e) {
1363                 throw new ItsException("Interrupted: ", e);
1364             }
1365         }
1366     }
1367 
1368     public ImageReader.OnImageAvailableListener
createAvailableListener(final CaptureCallback listener)1369             createAvailableListener(final CaptureCallback listener) {
1370         return new ImageReader.OnImageAvailableListener() {
1371             @Override
1372             public void onImageAvailable(ImageReader reader) {
1373                 Image i = null;
1374                 try {
1375                     i = reader.acquireNextImage();
1376                     Logt.i(TAG, "Image timestamp: " + i.getTimestamp());
1377                     String physicalCameraId = new String();
1378                     for (int idx = 0; idx < mOutputImageReaders.length; idx++) {
1379                         if (mOutputImageReaders[idx] == reader) {
1380                             physicalCameraId = mPhysicalStreamMap.get(idx);
1381                         }
1382                     }
1383                     listener.onCaptureAvailable(i, physicalCameraId);
1384                 } finally {
1385                     if (i != null) {
1386                         i.close();
1387                     }
1388                 }
1389             }
1390         };
1391     }
1392 
1393     public ImageReader.OnImageAvailableListener
1394             createExtensionAvailableListener(final CaptureCallback listener) {
1395         return new ImageReader.OnImageAvailableListener() {
1396             @Override
1397             public void onImageAvailable(ImageReader reader) {
1398                 Image i = null;
1399                 try {
1400                     i = reader.acquireNextImage();
1401                     String physicalCameraId = new String();
1402                     for (int idx = 0; idx < mOutputImageReaders.length; idx++) {
1403                         if (mOutputImageReaders[idx] == reader) {
1404                             physicalCameraId = mPhysicalStreamMap.get(idx);
1405                             break;
1406                         }
1407                     }
1408                     listener.onCaptureAvailable(i, physicalCameraId);
1409                     synchronized(mCountCallbacksRemaining) {
1410                         mCountCallbacksRemaining.decrementAndGet();
1411                         mCountCallbacksRemaining.notify();
1412                     }
1413                 } finally {
1414                     if (i != null) {
1415                         i.close();
1416                     }
1417                 }
1418             }
1419         };
1420     }
1421 
1422     private ImageReader.OnImageAvailableListener
1423             createAvailableListenerDropper() {
1424         return new ImageReader.OnImageAvailableListener() {
1425             @Override
1426             public void onImageAvailable(ImageReader reader) {
1427                 Image i = reader.acquireNextImage();
1428                 if (i != null) {
1429                     i.close();
1430                 }
1431             }
1432         };
1433     }
1434 
1435     private void doStartSensorEvents() throws ItsException {
1436         synchronized(mEventLock) {
1437             mEvents.clear();
1438             mEventsEnabled = true;
1439         }
1440         mSocketRunnableObj.sendResponse("sensorEventsStarted", "");
1441     }
1442 
1443     private void doCheckSensorExistence() throws ItsException {
1444         try {
1445             JSONObject obj = new JSONObject();
1446             obj.put("accel", mAccelSensor != null);
1447             obj.put("mag", mMagSensor != null);
1448             obj.put("gyro", mGyroSensor != null);
1449             obj.put("rv", mRotationVector != null);
1450             obj.put("vibrator", mVibrator.hasVibrator());
1451             mSocketRunnableObj.sendResponse("sensorExistence", null, obj, null);
1452         } catch (org.json.JSONException e) {
1453             throw new ItsException("JSON error: ", e);
1454         }
1455     }
1456 
1457     private void doGetSensorEvents() throws ItsException {
1458         synchronized(mEventLock) {
1459             mSocketRunnableObj.sendResponse(mEvents);
1460             mEvents.clear();
1461             mEventsEnabled = false;
1462         }
1463     }
1464 
1465     private void doGetProps() throws ItsException {
1466         mSocketRunnableObj.sendResponse(mCameraCharacteristics);
1467     }
1468 
1469     private void doGetPropsById(JSONObject params) throws ItsException {
1470         String[] devices;
1471         try {
1472             // Intentionally not using ItsUtils.getItsCompatibleCameraIds here so it's possible to
1473             // write some simple script to query camera characteristics even for devices exempted
1474             // from ITS today.
1475             devices = mCameraManager.getCameraIdList();
1476             if (devices == null || devices.length == 0) {
1477                 throw new ItsException("No camera devices");
1478             }
1479         } catch (CameraAccessException e) {
1480             throw new ItsException("Failed to get device ID list", e);
1481         }
1482 
1483         try {
1484             String cameraId = params.getString("cameraId");
1485             CameraCharacteristics characteristics = null;
1486             if (params.has("overrideToPortrait")) {
1487                 characteristics = mCameraManager.getCameraCharacteristics(cameraId,
1488                         params.getBoolean("overrideToPortrait"));
1489             } else {
1490                 characteristics = mCameraManager.getCameraCharacteristics(cameraId);
1491             }
1492             mSocketRunnableObj.sendResponse(characteristics);
1493         } catch (org.json.JSONException e) {
1494             throw new ItsException("JSON error: ", e);
1495         } catch (IllegalArgumentException e) {
1496             throw new ItsException("Illegal argument error:", e);
1497         } catch (CameraAccessException e) {
1498             throw new ItsException("Access error: ", e);
1499         }
1500     }
1501 
1502     private void doGetAvailablePhysicalCameraProperties() throws ItsException {
1503         mSocketRunnableObj.sendResponse("availablePhysicalCameraProperties", mPhysicalCameraChars);
1504     }
1505 
1506 
1507     private void doCheckLowLightBoostAvailable(String cameraId, int extension) throws ItsException {
1508         try {
1509             mSocketRunnableObj.sendResponse("isLowLightBoostAvailable",
1510                     isLowLightBoostAvailable(cameraId, extension) ? "true" : "false");
1511         } catch (CameraAccessException e) {
1512             throw new ItsException("Failed to check low light boost supported", e);
1513         }
1514     }
1515 
1516     /**
1517      * Checks if low light boost AE mode is supported.
1518      *
1519      * This method queries for available AE modes and checks if low light boost is an available AE
1520      * mode. The method queries Camera2 if {@code extension} is -1. Otherwise, the method will
1521      * query the Camera Extension as defined by {@code extension}. If the extension is not
1522      * supported then false is returned.
1523      **/
1524     private boolean isLowLightBoostAvailable(String cameraId, int extension)
1525             throws CameraAccessException {
1526         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.VANILLA_ICE_CREAM) {
1527             return false;
1528         }
1529         boolean isLowLightBoostSupported = false;
1530         int[] aeModes = null;
1531         if (extension == -1) {
1532             // Get available AE modes for Camera2
1533             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId);
1534             aeModes = c.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
1535         } else {
1536             // Get available AE modes for Camera Extension mode if the extension is supported
1537             CameraExtensionCharacteristics c =
1538                     mCameraManager.getCameraExtensionCharacteristics(cameraId);
1539             List<Integer> supportedExtensions = c.getSupportedExtensions();
1540             if (supportedExtensions.contains(extension)) {
1541                 aeModes = c.get(extension, CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES);
1542             }
1543         }
1544         return isLowLightBoostSupported = aeModes == null ? false : Ints.asList(aeModes)
1545             .contains(CameraMetadata.CONTROL_AE_MODE_ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY);
1546     }
1547 
1548     private Set<String> getUnavailablePhysicalCameras(
1549             LinkedBlockingQueue<Pair<String, String>> queue, String cameraId) throws Exception {
1550         Set<String> unavailablePhysicalCameras = new HashSet<String>();
1551         while (true) {
1552             Pair<String, String> unavailableIdCombo = queue.poll(
1553                     AVAILABILITY_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
1554             if (unavailableIdCombo == null) {
1555                 // No more entries in the queue. Break out of the loop and return.
1556                 break;
1557             }
1558             if (cameraId.equals(unavailableIdCombo.first)) {
1559                 unavailablePhysicalCameras.add(unavailableIdCombo.second);
1560             }
1561         }
1562         return unavailablePhysicalCameras;
1563     }
1564 
1565     private void doGetCameraIds() throws ItsException {
1566         if (mItsCameraIdList == null) {
1567             mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager);
1568         }
1569         if (mItsCameraIdList.mCameraIdCombos.size() == 0) {
1570             throw new ItsException("No camera devices");
1571         }
1572 
1573         try {
1574             JSONObject obj = new JSONObject();
1575             JSONArray array = new JSONArray();
1576             for (String id : mItsCameraIdList.mCameraIdCombos) {
1577                 array.put(id);
1578             }
1579             obj.put("cameraIdArray", array);
1580             obj.put("primaryRearCameraId", mItsCameraIdList.mPrimaryRearCameraId);
1581             obj.put("primaryFrontCameraId", mItsCameraIdList.mPrimaryFrontCameraId);
1582             mSocketRunnableObj.sendResponse("cameraIds", obj);
1583         } catch (org.json.JSONException e) {
1584             throw new ItsException("JSON error: ", e);
1585         }
1586     }
1587 
1588     private static class HandlerExecutor implements Executor {
1589         private final Handler mHandler;
1590 
1591         public HandlerExecutor(Handler handler) {
1592             mHandler = handler;
1593         }
1594 
1595         @Override
1596         public void execute(Runnable runCmd) {
1597             mHandler.post(runCmd);
1598         }
1599     }
1600 
1601     private SessionConfiguration getSessionConfiguration(JSONObject params)
1602             throws ItsException {
1603         JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
1604         boolean has10bitOutput = prepareImageReadersWithOutputSpecs(jsonOutputSpecs,
1605                 /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0,
1606                 /*backgroundRequest*/false, /*reuseSession*/ false);
1607         int numSurfaces = mOutputImageReaders.length;
1608         List<OutputConfiguration> outputConfigs = new ArrayList<>(numSurfaces);
1609         for (int i = 0; i < numSurfaces; i++) {
1610             OutputConfiguration config = new OutputConfiguration(
1611                     mOutputImageReaders[i].getSurface());
1612             if (mPhysicalStreamMap.get(i) != null) {
1613                 config.setPhysicalCameraId(mPhysicalStreamMap.get(i));
1614             }
1615             if (mStreamUseCaseMap.get(i) != null) {
1616                 config.setStreamUseCase(mStreamUseCaseMap.get(i));
1617             }
1618             boolean hlg10Compatible =
1619                     isHlg10Compatible(mOutputImageReaders[i].getImageFormat());
1620             if (has10bitOutput && hlg10Compatible) {
1621                 config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10);
1622             }
1623             outputConfigs.add(config);
1624         }
1625 
1626         SessionConfiguration sessionConfig = new SessionConfiguration(
1627                 SessionConfiguration.SESSION_REGULAR, outputConfigs);
1628 
1629         CaptureRequest.Builder templateReq = null;
1630         if (params.has(SETTINGS_KEY)) {
1631             try {
1632                 CaptureRequest.Builder defaultReq = mCamera.createCaptureRequest(
1633                         CameraDevice.TEMPLATE_STILL_CAPTURE);
1634                 JSONObject settingsObj = params.getJSONObject(SETTINGS_KEY);
1635                 templateReq = ItsSerializer.deserialize(defaultReq, settingsObj);
1636             } catch (CameraAccessException e) {
1637                 throw new ItsException("Failed to create capture request", e);
1638             } catch (org.json.JSONException e) {
1639                 throw new ItsException("JSON error: ", e);
1640             }
1641         }
1642 
1643         if (templateReq != null) {
1644             sessionConfig.setSessionParameters(templateReq.build());
1645         }
1646         return sessionConfig;
1647     }
1648 
1649     private void doCheckStreamCombination(JSONObject params) throws ItsException {
1650         try {
1651             String returnString;
1652             SessionConfiguration sessionConfig = getSessionConfiguration(params);
1653 
1654             if (sessionConfig.getSessionParameters() == null) {
1655                 returnString = mCamera.isSessionConfigurationSupported(sessionConfig)
1656                         ? "supportedCombination" : "unsupportedCombination";
1657             } else if (!mCameraManager.isCameraDeviceSetupSupported(mCamera.getId())) {
1658                 Log.i(TAG,
1659                         "Attempting to query session support with parameters, but "
1660                                 + "CameraDeviceSetup is not supported.");
1661                 returnString = "unsupportedOperation";
1662             } else {
1663                 CameraDevice.CameraDeviceSetup cameraDeviceSetup =
1664                         mCameraManager.getCameraDeviceSetup(mCamera.getId());
1665                 boolean supported = cameraDeviceSetup.isSessionConfigurationSupported(
1666                         sessionConfig);
1667                 returnString = supported ? "supportedCombination" : "unsupportedCombination";
1668             }
1669 
1670             mSocketRunnableObj.sendResponse("streamCombinationSupport", returnString);
1671 
1672         } catch (UnsupportedOperationException e) {
1673             mSocketRunnableObj.sendResponse("streamCombinationSupport", "unsupportedOperation");
1674         } catch (IllegalArgumentException | CameraAccessException e) {
1675             throw new ItsException("Error checking stream combination", e);
1676         }
1677     }
1678 
1679     private void doGetSessionProps(JSONObject params) throws ItsException {
1680         try {
1681             if (!mCameraManager.isCameraDeviceSetupSupported(mCamera.getId())) {
1682                 throw new ItsException("Attempting to query session characteristics, but "
1683                         + "CameraDeviceSetup is not supported.");
1684             }
1685 
1686             SessionConfiguration sessionConfig = getSessionConfiguration(params);
1687 
1688             CameraDevice.CameraDeviceSetup cameraDeviceSetup =
1689                     mCameraManager.getCameraDeviceSetup(mCamera.getId());
1690             CameraCharacteristics sessionProps = cameraDeviceSetup.getSessionCharacteristics(
1691                     sessionConfig);
1692 
1693             mSocketRunnableObj.sendResponse(sessionProps);
1694         } catch (android.hardware.camera2.CameraAccessException e) {
1695             throw new ItsException("Access error: ", e);
1696         }
1697     }
1698 
1699     private void doCheckCameraPrivacyModeSupport() throws ItsException {
1700         boolean hasPrivacySupport = mSensorPrivacyManager
1701                 .supportsSensorToggle(SensorPrivacyManager.Sensors.CAMERA);
1702         mSocketRunnableObj.sendResponse("cameraPrivacyModeSupport",
1703                 hasPrivacySupport ? "true" : "false");
1704     }
1705 
1706     private void doGetUnavailablePhysicalCameras() throws ItsException {
1707         try {
1708             JSONArray cameras = new JSONArray();
1709             JSONObject jsonObj = new JSONObject();
1710             for (String p : mUnavailablePhysicalCameras) {
1711                 cameras.put(p);
1712             }
1713             jsonObj.put("unavailablePhysicalCamerasArray", cameras);
1714             Log.i(TAG, "unavailablePhysicalCameras : " +
1715                     List.of(mUnavailablePhysicalCameras.toString()));
1716             mSocketRunnableObj.sendResponse("unavailablePhysicalCameras", null, jsonObj, null);
1717         } catch (org.json.JSONException e) {
1718             throw new ItsException("JSON error: ", e);
1719         }
1720     }
1721 
1722     private void doGetDisplaySize() throws ItsException {
1723         Size displaySize = getDisplaySize();
1724         mSocketRunnableObj.sendResponse("displaySize", displaySize.toString());
1725     }
1726 
1727     private Size getDisplaySize() throws ItsException {
1728         WindowManager windowManager = getSystemService(WindowManager.class);
1729         if (windowManager == null) {
1730             throw new ItsException("No window manager.");
1731         }
1732         WindowMetrics metrics = windowManager.getCurrentWindowMetrics();
1733         if (metrics == null) {
1734             throw new ItsException("No current window metrics in window manager.");
1735         }
1736         Rect windowBounds = metrics.getBounds();
1737 
1738         int width = windowBounds.width();
1739         int height = windowBounds.height();
1740         if (height > width) {
1741             height = width;
1742             width = windowBounds.height();
1743         }
1744 
1745         Size displaySize = new Size(width, height);
1746         return displaySize;
1747     }
1748 
1749     private void doGetMaxCamcorderProfileSize(String cameraId) throws ItsException {
1750         validateCameraId(cameraId);
1751 
1752         int cameraDeviceId = Integer.parseInt(cameraId);
1753         int maxArea = -1;
1754         Size maxProfileSize = new Size(0, 0);
1755         for (int profileId : CAMCORDER_PROFILE_QUALITIES_MAP.keySet()) {
1756             if (CamcorderProfile.hasProfile(cameraDeviceId, profileId)) {
1757                 CamcorderProfile profile = CamcorderProfile.get(cameraDeviceId, profileId);
1758                 if (profile == null) {
1759                     throw new ItsException("Invalid camcorder profile for id " + profileId);
1760                 }
1761 
1762                 int area = profile.videoFrameWidth * profile.videoFrameHeight;
1763                 if (area > maxArea) {
1764                     maxProfileSize = new Size(profile.videoFrameWidth, profile.videoFrameHeight);
1765                     maxArea = area;
1766                 }
1767             }
1768         }
1769         mSocketRunnableObj.sendResponse("maxCamcorderProfileSize", maxProfileSize.toString());
1770     }
1771 
1772     private void doCheckPrimaryCamera(String cameraId) throws ItsException {
1773         validateCameraId(cameraId);
1774 
1775         boolean isPrimaryCamera = false;
1776         try {
1777             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId);
1778             Integer cameraFacing = c.get(CameraCharacteristics.LENS_FACING);
1779             for (String id : mItsCameraIdList.mCameraIds) {
1780                 c = mCameraManager.getCameraCharacteristics(id);
1781                 Integer facing = c.get(CameraCharacteristics.LENS_FACING);
1782                 if (cameraFacing.equals(facing)) {
1783                     if (cameraId.equals(id)) {
1784                         isPrimaryCamera = true;
1785                     } else {
1786                         isPrimaryCamera = false;
1787                     }
1788                     break;
1789                 }
1790             }
1791         } catch (CameraAccessException e) {
1792             throw new ItsException("Failed to get camera characteristics", e);
1793         }
1794 
1795         mSocketRunnableObj.sendResponse("primaryCamera",
1796                 isPrimaryCamera ? "true" : "false");
1797     }
1798 
1799     private void doCheckHLG10SupportForProfile(String cameraId, int profileId) throws ItsException {
1800         validateCameraId(cameraId);
1801 
1802         int cameraDeviceId = Integer.parseInt(cameraId);
1803         CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId);
1804         assert (camcorderProfile != null);
1805 
1806         Size videoSize = new Size(camcorderProfile.videoFrameWidth,
1807                 camcorderProfile.videoFrameHeight);
1808         doCheckHLG10SupportInternal(cameraId, videoSize, camcorderProfile.videoBitRate,
1809                 camcorderProfile.videoFrameRate);
1810     }
1811 
1812     private void doCheckHLG10SupportForSizeAndFps(String cameraId, String videoSizeStr, int maxFps)
1813             throws ItsException {
1814         validateCameraId(cameraId);
1815 
1816         Size videoSize = Size.parseSize(videoSizeStr);
1817         int cameraIdInt = Integer.parseInt(cameraId);
1818         int videoBitRate = ItsUtils.calculateBitrate(cameraIdInt, videoSize, maxFps);
1819         doCheckHLG10SupportInternal(cameraId, videoSize, videoBitRate, maxFps);
1820     }
1821 
1822     private void doCheckHLG10SupportInternal(String cameraId, Size videoSize,
1823             int videoBitRate, int maxFps) throws ItsException {
1824         boolean cameraHLG10OutputSupported = false;
1825         try {
1826             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId);
1827             int[] caps = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1828             cameraHLG10OutputSupported = IntStream.of(caps).anyMatch(x -> x ==
1829                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
1830         } catch (CameraAccessException e) {
1831             throw new ItsException("Failed to get camera characteristics", e);
1832         }
1833 
1834         MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
1835         MediaFormat format = ItsUtils.initializeHLG10Format(videoSize, videoBitRate,
1836                 maxFps);
1837         boolean codecSupported = (list.findEncoderForFormat(format) != null);
1838         Log.v(TAG, "codecSupported: " + codecSupported + ", cameraHLG10OutputSupported: "
1839                 + cameraHLG10OutputSupported);
1840 
1841         mSocketRunnableObj.sendResponse("hlg10Response",
1842                 codecSupported && cameraHLG10OutputSupported ? "true" : "false");
1843     }
1844 
1845     private void doCheckP3Support(String cameraId) throws ItsException {
1846         boolean cameraP3OutputSupported = false;
1847         try {
1848             CameraCharacteristics c = mCameraManager.getCameraCharacteristics(cameraId);
1849             int[] caps = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1850             boolean colorSpaceProfilesSupported = IntStream.of(caps).anyMatch(x -> x
1851                     == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES);
1852             if (colorSpaceProfilesSupported) {
1853                 ColorSpaceProfiles colorSpaceProfiles = c.get(
1854                         CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES);
1855                 Set<ColorSpace.Named> colorSpaces =
1856                         colorSpaceProfiles.getSupportedColorSpaces(ImageFormat.UNKNOWN);
1857                 if (colorSpaces.contains(ColorSpace.Named.DISPLAY_P3)) {
1858                     cameraP3OutputSupported = true;
1859                 }
1860             }
1861         } catch (CameraAccessException e) {
1862             throw new ItsException("Failed to get camera characteristics", e);
1863         }
1864 
1865         mSocketRunnableObj.sendResponse("p3Response", cameraP3OutputSupported ? "true" : "false");
1866     }
1867 
1868     private void doCheckLandscapeToPortraitEnabled() throws ItsException {
1869         boolean enabled = SystemProperties.getBoolean(CameraManager.LANDSCAPE_TO_PORTRAIT_PROP,
1870                 false);
1871         mSocketRunnableObj.sendResponse("landscapeToPortraitEnabledResponse",
1872                 enabled ? "true" : "false");
1873     }
1874 
1875     private void doCheckPerformanceClass() throws ItsException {
1876         boolean  isPerfClass = (Build.VERSION.MEDIA_PERFORMANCE_CLASS >= PERFORMANCE_CLASS_R);
1877 
1878         mSocketRunnableObj.sendResponse("performanceClass",
1879                 isPerfClass ? "true" : "false");
1880     }
1881 
1882     private void doCheckVicPerformanceClass() throws ItsException {
1883         boolean  isPerfClass = (Build.VERSION.MEDIA_PERFORMANCE_CLASS >= PERFORMANCE_CLASS_VIC);
1884 
1885         mSocketRunnableObj.sendResponse("vicPerformanceClass",
1886                 isPerfClass ? "true" : "false");
1887     }
1888 
1889     private double invokeCameraPerformanceTest(Class testClass, String testName,
1890             String cameraId, String metricName) throws ItsException {
1891         mResults.clear();
1892         mCameraInstrumentation = new CameraTestInstrumentation();
1893         MetricListener metricListener = new MetricListener() {
1894             @Override
1895             public void onResultMetric(Metric metric) {
1896                 mResults.add(metric);
1897             }
1898         };
1899         mCameraInstrumentation.initialize(this, metricListener);
1900 
1901         Bundle bundle = new Bundle();
1902         bundle.putString("camera-id", cameraId);
1903         bundle.putString("perf-measure", "on");
1904         bundle.putString("perf-class-test", "on");
1905         bundle.putByte("has-activity", (byte) 1);
1906         InstrumentationRegistry.registerInstance(mCameraInstrumentation, bundle);
1907 
1908         JUnitCore testRunner = new JUnitCore();
1909         Log.v(TAG, String.format("Execute Test: %s#%s", testClass.getSimpleName(), testName));
1910         Request request = Request.method(testClass, testName);
1911         Result runResult = testRunner.run(request);
1912         if (!runResult.wasSuccessful()) {
1913             throw new ItsException("Camera PerformanceTest " + testClass.getSimpleName() +
1914                     "#" + testName + " failed");
1915         }
1916 
1917         for (Metric m : mResults) {
1918             if (m.getMessage().equals(metricName) && m.getValues().length == 1) {
1919                 return m.getValues()[0];
1920             }
1921         }
1922 
1923         throw new ItsException("Failed to look up " + metricName +
1924                 " in Camera PerformanceTest result!");
1925     }
1926 
1927     private void doMeasureCameraLaunchMs(String cameraId) throws ItsException {
1928         double launchMs = invokeCameraPerformanceTest(PerformanceTest.class,
1929                 "testCameraLaunch", cameraId, "camera_launch_average_time_for_all_cameras");
1930         mSocketRunnableObj.sendResponse("cameraLaunchMs", Double.toString(launchMs));
1931     }
1932 
1933     private void doMeasureCamera1080pJpegCaptureMs(String cameraId) throws ItsException {
1934         double jpegCaptureMs = invokeCameraPerformanceTest(PerformanceTest.class,
1935                 "testSingleCapture", cameraId,
1936                 "camera_capture_average_latency_for_all_cameras_jpeg");
1937         mSocketRunnableObj.sendResponse("camera1080pJpegCaptureMs", Double.toString(jpegCaptureMs));
1938     }
1939 
1940     private static long getReaderUsage(int format, boolean has10bitOutput, int inputFormat) {
1941         // Private image format camera readers will default to ZSL usage unless
1942         // explicitly configured to use a common consumer such as display.
1943         // We don't support the ZSL use case for the 10-bit use case, or if the input format
1944         // is not PRIVATE.
1945         boolean notForZslReprocess = (inputFormat != format);
1946         return (format == ImageFormat.PRIVATE && (has10bitOutput || notForZslReprocess))
1947                 ? HardwareBuffer.USAGE_COMPOSER_OVERLAY : HardwareBuffer.USAGE_CPU_READ_OFTEN;
1948     }
1949 
1950     private List<OutputConfiguration> getCaptureOutputConfigurations(
1951             JSONArray jsonOutputSpecs, boolean is10bitOutputPresent)
1952             throws org.json.JSONException {
1953         int numSurfaces = mOutputImageReaders.length;
1954         List<OutputConfiguration> outputConfigs =
1955                 new ArrayList<OutputConfiguration>(numSurfaces);
1956         for (int i = 0; i < numSurfaces; i++) {
1957             OutputConfiguration config = new OutputConfiguration(
1958                     mOutputImageReaders[i].getSurface());
1959             if (mPhysicalStreamMap.get(i) != null &&
1960                     !mPhysicalStreamMap.get(i).isEmpty()) {
1961                 config.setPhysicalCameraId(mPhysicalStreamMap.get(i));
1962             }
1963             if (mStreamUseCaseMap.get(i) != null) {
1964                 config.setStreamUseCase(mStreamUseCaseMap.get(i));
1965             }
1966             if (jsonOutputSpecs != null) {
1967                 if (i < jsonOutputSpecs.length()) {
1968                     JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
1969                     int colorSpaceInt = surfaceObj.optInt(
1970                         "colorSpace", ColorSpaceProfiles.UNSPECIFIED);
1971                     if (colorSpaceInt != ColorSpaceProfiles.UNSPECIFIED) {
1972                         config.setColorSpace(ColorSpace.Named.values()[colorSpaceInt]);
1973                     }
1974                 }
1975             }
1976             boolean hlg10Compatible =
1977                     isHlg10Compatible(mOutputImageReaders[i].getImageFormat());
1978             if (is10bitOutputPresent && hlg10Compatible) {
1979                 // HLG10 is mandatory for all 10-bit output capable devices
1980                 config.setDynamicRangeProfile(DynamicRangeProfiles.HLG10);
1981             }
1982             outputConfigs.add(config);
1983         }
1984         return outputConfigs;
1985     }
1986 
1987     private void prepareImageReaders(ImageReaderArgs args, boolean reuseSession) {
1988         if (reuseSession && args.equals(mImageReaderArgs)) {
1989             Logt.i(TAG, "Reusing image readers.");
1990             return;
1991         }
1992         Logt.i(TAG, String.format(Locale.getDefault(),
1993                 "Current imageReaderArgs: %s, mImageReaderArgs: %s", args, mImageReaderArgs));
1994         Size[] outputSizes = args.getOutputSizes();
1995         int[] outputFormats = args.getOutputFormats();
1996         Size inputSize = args.getInputSize();
1997         int inputFormat = args.getInputFormat();
1998         int maxInputBuffers = args.getMaxInputBuffers();
1999         boolean has10bitOutput = args.getHas10bitOutput();
2000         closeImageReaders();
2001         mOutputImageReaders = new ImageReader[outputSizes.length];
2002         for (int i = 0; i < outputSizes.length; i++) {
2003             // Check if the output image reader can be shared with the input image reader.
2004             if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) {
2005                 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(),
2006                         outputSizes[i].getHeight(), outputFormats[i],
2007                         MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers,
2008                         getReaderUsage(outputFormats[i], has10bitOutput, inputFormat));
2009                 mInputImageReader = mOutputImageReaders[i];
2010             } else {
2011                 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(),
2012                         outputSizes[i].getHeight(), outputFormats[i],
2013                         MAX_CONCURRENT_READER_BUFFERS, getReaderUsage(outputFormats[i],
2014                             has10bitOutput, inputFormat));
2015             }
2016         }
2017 
2018         if (inputSize != null && mInputImageReader == null) {
2019             mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(),
2020                     inputFormat, maxInputBuffers,
2021                     getReaderUsage(inputFormat, has10bitOutput, inputFormat));
2022         }
2023         mImageReaderArgs = ImageReaderArgs.valueOf(outputSizes, outputFormats, inputSize,
2024                 inputFormat, maxInputBuffers, has10bitOutput);
2025     }
2026 
2027     private void closeImageReaders() {
2028         Logt.i(TAG, "Closing image readers");
2029         if (mOutputImageReaders != null) {
2030             for (int i = 0; i < mOutputImageReaders.length; i++) {
2031                 if (mOutputImageReaders[i] != null) {
2032                     mOutputImageReaders[i].close();
2033                     mOutputImageReaders[i] = null;
2034                 }
2035             }
2036         }
2037         mOutputImageReaders = null;
2038         if (mInputImageReader != null) {
2039             mInputImageReader.close();
2040             mInputImageReader = null;
2041         }
2042         if (mThreeAOutputImageReader != null) {
2043             mThreeAOutputImageReader.close();
2044             mThreeAOutputImageReader = null;
2045         }
2046         mImageReaderArgs = ImageReaderArgs.EMPTY;
2047     }
2048 
2049     private void do3A(JSONObject params) throws ItsException {
2050         ThreeAResultListener threeAListener = new ThreeAResultListener();
2051         boolean reuseSession = params.optBoolean("reuseSession", false);
2052         boolean firstSurfaceFor3A = params.optBoolean("firstSurfaceFor3A", false);
2053         List<OutputConfiguration> captureOutputConfigurations = new ArrayList<>();
2054         try {
2055             // Start a 3A action, and wait for it to converge.
2056             // Get the converged values for each "A", and package into JSON result for caller.
2057 
2058             // Configure streams on physical sub-camera if PHYSICAL_ID_KEY is specified.
2059             String physicalId = null;
2060             CameraCharacteristics c = mCameraCharacteristics;
2061             if (params.has(PHYSICAL_ID_KEY)) {
2062                 physicalId = params.getString(PHYSICAL_ID_KEY);
2063                 c = mPhysicalCameraChars.get(physicalId);
2064             }
2065 
2066             // Prepare capture image readers here, and skip when doing the actual capture
2067             if (reuseSession) {
2068                 Logt.i(TAG, "Preparing capture image readers in 3A");
2069                 JSONArray jsonCaptureOutputSpecs = ItsUtils.getOutputSpecs(params);
2070                 List<CaptureRequest.Builder> backgroundRequests =
2071                         ItsSerializer.deserializeRequestList(mCamera, params, "repeatRequests");
2072                 boolean backgroundRequest = backgroundRequests.size() > 0;
2073                 boolean has10bitOutput = prepareImageReadersWithOutputSpecs(jsonCaptureOutputSpecs,
2074                         /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0,
2075                         backgroundRequest, reuseSession);
2076                 captureOutputConfigurations =
2077                         getCaptureOutputConfigurations(jsonCaptureOutputSpecs, has10bitOutput);
2078             }
2079 
2080             // Configure output format and size for 3A session, and prepare ImageReader.
2081             if (mThreeAOutputImageReader == null) {
2082                 if (firstSurfaceFor3A && reuseSession) {
2083                     mThreeAOutputImageReader = mOutputImageReaders[0];
2084                 } else {
2085                     Logt.i(TAG, "Setting up 3A image reader");
2086                     int outputFormat = ImageFormat.YUV_420_888;
2087                     Size size = ItsUtils.getYuvOutputSizes(c)[0];
2088                     mThreeAOutputImageReader = ImageReader.newInstance(
2089                             size.getWidth(), size.getHeight(), outputFormat,
2090                             MAX_CONCURRENT_READER_BUFFERS,
2091                             getReaderUsage(outputFormat, /*has10bitOutput=*/false,
2092                                     /*inputFormat*/-1));
2093                 }
2094             }
2095 
2096             // Add all necessary output configurations for the capture session
2097             List<OutputConfiguration> sessionOutputConfigs = new ArrayList<>();
2098             for (OutputConfiguration config : captureOutputConfigurations) {
2099                 sessionOutputConfigs.add(config);
2100             }
2101             if (!firstSurfaceFor3A) {
2102                 OutputConfiguration threeAConfig =
2103                         new OutputConfiguration(mThreeAOutputImageReader.getSurface());
2104                 if (physicalId != null) {
2105                     threeAConfig.setPhysicalCameraId(physicalId);
2106                 }
2107                 sessionOutputConfigs.add(threeAConfig);
2108             }
2109 
2110             if (mSession != null && reuseSession &&
2111                     mCaptureOutputConfigs.equals(captureOutputConfigurations)) {
2112                 Logt.i(TAG, "Reusing camera capture session in 3A.");
2113             } else {
2114                 Logt.i(TAG, "Need to create new capture session in 3A");
2115                 if (mSession != null) {
2116                     mSession.close();
2117                 }
2118                 mSessionListener = new BlockingSessionCallback();
2119                 mCamera.createCaptureSessionByOutputConfigurations(
2120                         sessionOutputConfigs, mSessionListener, mCameraHandler);
2121                 mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
2122                 mSessionListener.getStateWaiter().waitForState(
2123                                 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
2124                 Logt.i(TAG, "New capture session created.");
2125             }
2126             mCaptureOutputConfigs = new ArrayList<OutputConfiguration>(captureOutputConfigurations);
2127 
2128             // Add a listener that just recycles buffers; they aren't saved anywhere.
2129             ImageReader.OnImageAvailableListener readerListener =
2130                     createAvailableListenerDropper();
2131             mThreeAOutputImageReader.setOnImageAvailableListener(readerListener, mSaveHandlers[0]);
2132 
2133             // Get the user-specified regions for AE, AWB, AF.
2134             // Note that the user specifies normalized [x,y,w,h], which is converted below
2135             // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
2136             // also has a fifth "weight" element: [x0,y0,x1,y1,w].
2137             // Use logical camera's active array size for 3A regions.
2138             Rect activeArray = mCameraCharacteristics.get(
2139                     CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
2140             int aaWidth = activeArray.right - activeArray.left;
2141             int aaHeight = activeArray.bottom - activeArray.top;
2142             MeteringRectangle[] regionAE = new MeteringRectangle[]{
2143                     new MeteringRectangle(0,0,aaWidth,aaHeight,1)};
2144             MeteringRectangle[] regionAF = new MeteringRectangle[]{
2145                     new MeteringRectangle(0,0,aaWidth,aaHeight,1)};
2146             MeteringRectangle[] regionAWB = new MeteringRectangle[]{
2147                     new MeteringRectangle(0,0,aaWidth,aaHeight,1)};
2148             if (params.has(REGION_KEY)) {
2149                 JSONObject regions = params.getJSONObject(REGION_KEY);
2150                 if (regions.has(REGION_AE_KEY)) {
2151                     regionAE = ItsUtils.getJsonWeightedRectsFromArray(
2152                             regions.getJSONArray(REGION_AE_KEY), true, aaWidth, aaHeight);
2153                 }
2154                 if (regions.has(REGION_AF_KEY)) {
2155                     regionAF = ItsUtils.getJsonWeightedRectsFromArray(
2156                             regions.getJSONArray(REGION_AF_KEY), true, aaWidth, aaHeight);
2157                 }
2158                 if (regions.has(REGION_AWB_KEY)) {
2159                     regionAWB = ItsUtils.getJsonWeightedRectsFromArray(
2160                             regions.getJSONArray(REGION_AWB_KEY), true, aaWidth, aaHeight);
2161                 }
2162             }
2163 
2164             // An EV compensation can be specified as part of AE convergence.
2165             int evComp = params.optInt(EVCOMP_KEY, 0);
2166             if (evComp != 0) {
2167                 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp));
2168             }
2169 
2170             // Auto flash can be specified as part of AE convergence.
2171             boolean autoFlash = params.optBoolean(AUTO_FLASH_KEY, false);
2172             if (autoFlash == true) {
2173                 Logt.i(TAG, String.format("Running with auto flash mode."));
2174             }
2175 
2176             double zoomRatio = params.optDouble(ZOOM_RATIO_KEY);
2177             if (!Double.isNaN(zoomRatio)) {
2178                 Logt.i(TAG, String.format("Running 3A with zoom ratio: %f", zoomRatio));
2179             }
2180 
2181             // By default, AE and AF both get triggered, but the user can optionally override this.
2182             // Also, AF won't get triggered if the lens is fixed-focus.
2183             if (params.has(TRIGGER_KEY)) {
2184                 JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
2185                 if (triggers.has(TRIGGER_AE_KEY)) {
2186                     mDoAE = triggers.getBoolean(TRIGGER_AE_KEY);
2187                 }
2188                 if (triggers.has(TRIGGER_AF_KEY)) {
2189                     mDoAF = triggers.getBoolean(TRIGGER_AF_KEY);
2190                 }
2191             }
2192 
2193             boolean isFixedFocusLens = isFixedFocusLens(c);
2194             if (mDoAF && isFixedFocusLens) {
2195                 // Send a fake result back for the code that is waiting for this message to see
2196                 // that AF has converged.
2197                 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
2198                 mSocketRunnableObj.sendResponse("afResult", "0.0");
2199                 mDoAF = false;
2200             }
2201 
2202             mInterlock3A.open();
2203             synchronized(m3AStateLock) {
2204                 // If AE or AWB lock is specified, then the 3A will converge first and then lock these
2205                 // values, waiting until the HAL has reported that the lock was successful.
2206                 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
2207                 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
2208                 mConvergedAE = false;
2209                 mConvergedAWB = false;
2210                 mConvergedAF = false;
2211                 mLockedAE = false;
2212                 mLockedAWB = false;
2213             }
2214             long tstart = System.currentTimeMillis();
2215             boolean triggeredAE = false;
2216             boolean triggeredAF = false;
2217 
2218             Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d",
2219                     mDoAE?1:0, mDoAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0));
2220 
2221             // Keep issuing capture requests until 3A has converged.
2222             while (true) {
2223 
2224                 // Block until can take the next 3A frame. Only want one outstanding frame
2225                 // at a time, to simplify the logic here.
2226                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
2227                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
2228                     throw new ItsException(
2229                             "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" +
2230                             "AE converge state: " + mConvergedAE + ", \n" +
2231                             "AF convergence state: " + mConvergedAF + ", \n" +
2232                             "AWB convergence state: " + mConvergedAWB + ".");
2233                 }
2234                 mInterlock3A.close();
2235 
2236                 synchronized(m3AStateLock) {
2237                     // If not converged yet, issue another capture request.
2238                     if ((mDoAE && (!triggeredAE || !mConvergedAE))
2239                             || !mConvergedAWB
2240                             || (mDoAF && (!triggeredAF || !mConvergedAF))
2241                             || (mDoAE && mNeedsLockedAE && !mLockedAE)
2242                             || (mNeedsLockedAWB && !mLockedAWB)) {
2243 
2244                         // Baseline capture request for 3A.
2245                         CaptureRequest.Builder req = mCamera.createCaptureRequest(
2246                                 CameraDevice.TEMPLATE_PREVIEW);
2247                         req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
2248                         req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
2249                         req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
2250                                 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
2251                         req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
2252                         req.set(CaptureRequest.CONTROL_AE_LOCK, false);
2253                         req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
2254                         req.set(CaptureRequest.CONTROL_AF_MODE,
2255                                 CaptureRequest.CONTROL_AF_MODE_AUTO);
2256                         req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
2257                         req.set(CaptureRequest.CONTROL_AWB_MODE,
2258                                 CaptureRequest.CONTROL_AWB_MODE_AUTO);
2259                         req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
2260                         req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
2261                         // ITS only turns OIS on when it's explicitly requested
2262                         req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
2263                                 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
2264 
2265                         if (evComp != 0) {
2266                             req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp);
2267                         }
2268 
2269                         if (autoFlash == false) {
2270                             req.set(CaptureRequest.CONTROL_AE_MODE,
2271                                     CaptureRequest.CONTROL_AE_MODE_ON);
2272                         } else {
2273                             req.set(CaptureRequest.CONTROL_AE_MODE,
2274                                     CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
2275                         }
2276 
2277                         if (!Double.isNaN(zoomRatio)) {
2278                             req.set(CaptureRequest.CONTROL_ZOOM_RATIO, (float) zoomRatio);
2279                         }
2280 
2281                         if (mConvergedAE && mNeedsLockedAE) {
2282                             req.set(CaptureRequest.CONTROL_AE_LOCK, true);
2283                         }
2284                         if (mConvergedAWB && mNeedsLockedAWB) {
2285                             req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
2286                         }
2287 
2288                         boolean triggering = false;
2289                         // Trigger AE first.
2290                         if (mDoAE && !triggeredAE) {
2291                             Logt.i(TAG, "Triggering AE");
2292                             req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
2293                                     CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
2294                             if (mDoAF) {
2295                                 req.set(CaptureRequest.CONTROL_AF_TRIGGER,
2296                                         CaptureRequest.CONTROL_AF_TRIGGER_CANCEL);
2297                             }
2298                             triggeredAE = true;
2299                             triggering = true;
2300                         }
2301 
2302                         // After AE has converged, trigger AF.
2303                         if (mDoAF && !triggeredAF && (!mDoAE || (triggeredAE && mConvergedAE))) {
2304                             Logt.i(TAG, "Triggering AF");
2305                             req.set(CaptureRequest.CONTROL_AF_TRIGGER,
2306                                     CaptureRequest.CONTROL_AF_TRIGGER_START);
2307                             triggeredAF = true;
2308                             triggering = true;
2309                         }
2310 
2311                         req.addTarget(mThreeAOutputImageReader.getSurface());
2312 
2313                         if (triggering) {
2314                             // Send single request for AE/AF trigger
2315                             mSession.capture(req.build(),
2316                                     threeAListener, mResultHandler);
2317                         } else {
2318                             // Use repeating request for non-trigger requests
2319                             mSession.setRepeatingRequest(req.build(),
2320                                     threeAListener, mResultHandler);
2321                         }
2322                     } else {
2323                         mSocketRunnableObj.sendResponse("3aConverged", "");
2324                         Logt.i(TAG, "3A converged");
2325                         break;
2326                     }
2327                 }
2328             }
2329             mSession.stopRepeating();
2330             mSessionListener.getStateWaiter().waitForState(
2331                     BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
2332             Logt.i(TAG, "Session is ready again after doing 3A.");
2333         } catch (android.hardware.camera2.CameraAccessException e) {
2334             throw new ItsException("Access error: ", e);
2335         } catch (org.json.JSONException e) {
2336             throw new ItsException("JSON error: ", e);
2337         } finally {
2338             mSocketRunnableObj.sendResponse("3aDone", "");
2339             // stop listener from updating 3A states
2340             threeAListener.stop();
2341             if (mSession != null && !reuseSession) {
2342                 closeImageReaders();
2343                 mSession.close();
2344                 mSession = null;
2345             }
2346         }
2347     }
2348 
2349     private void doVibrate(JSONObject params) throws ItsException {
2350         try {
2351             if (mVibrator == null) {
2352                 throw new ItsException("Unable to start vibrator");
2353             }
2354             JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY);
2355             int len = patternArray.length();
2356             long pattern[] = new long[len];
2357             for (int i = 0; i < len; i++) {
2358                 pattern[i] = patternArray.getLong(i);
2359             }
2360             Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len));
2361 
2362             // Mark the vibrator as alarm to test the audio restriction API
2363             // TODO: consider making this configurable
2364             AudioAttributes audioAttributes = new AudioAttributes.Builder()
2365                     .setUsage(AudioAttributes.USAGE_ALARM).build();
2366             mVibrator.vibrate(pattern, -1, audioAttributes);
2367             mSocketRunnableObj.sendResponse("vibrationStarted", "");
2368         } catch (org.json.JSONException e) {
2369             throw new ItsException("JSON error: ", e);
2370         }
2371     }
2372 
2373     private void doSetAudioRestriction(JSONObject params) throws ItsException {
2374         try {
2375             if (mCamera == null) {
2376                 throw new ItsException("Camera is closed");
2377             }
2378             int mode = params.getInt(AUDIO_RESTRICTION_MODE_KEY);
2379             mCamera.setCameraAudioRestriction(mode);
2380             Logt.i(TAG, String.format("Set audio restriction mode to %d", mode));
2381 
2382             mSocketRunnableObj.sendResponse("audioRestrictionSet", "");
2383         } catch (org.json.JSONException e) {
2384             throw new ItsException("JSON error: ", e);
2385         } catch (android.hardware.camera2.CameraAccessException e) {
2386             throw new ItsException("Access error: ", e);
2387         }
2388     }
2389 
2390     /**
2391      * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output
2392      * image readers for the parsed output surface sizes, output formats, and the given input
2393      * size and format.
2394      */
2395     private boolean prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs,
2396             Size inputSize, int inputFormat, int maxInputBuffers,
2397             boolean backgroundRequest, boolean reuseSession)
2398             throws ItsException {
2399         Size outputSizes[];
2400         int outputFormats[];
2401         int numSurfaces = 0;
2402         mPhysicalStreamMap.clear();
2403         mStreamUseCaseMap.clear();
2404 
2405         boolean is10bitOutputPresent = false;
2406         if (jsonOutputSpecs != null) {
2407             try {
2408                 numSurfaces = jsonOutputSpecs.length();
2409                 if (backgroundRequest) {
2410                     numSurfaces += 1;
2411                 }
2412                 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
2413                     throw new ItsException("Too many output surfaces");
2414                 }
2415 
2416                 outputSizes = new Size[numSurfaces];
2417                 outputFormats = new int[numSurfaces];
2418                 for (int i = 0; i < numSurfaces; i++) {
2419                     // Append optional background stream at the end
2420                     if (backgroundRequest && i == numSurfaces - 1) {
2421                         outputFormats[i] = ImageFormat.YUV_420_888;
2422                         outputSizes[i] = new Size(640, 480);
2423                         continue;
2424                     }
2425                     // Get the specified surface.
2426                     JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
2427                     String physicalCameraId = surfaceObj.optString("physicalCamera");
2428                     CameraCharacteristics cameraCharacteristics =  mCameraCharacteristics;
2429                     mPhysicalStreamMap.put(i, physicalCameraId);
2430                     if (!physicalCameraId.isEmpty()) {
2431                         cameraCharacteristics = mPhysicalCameraChars.get(physicalCameraId);
2432                     }
2433 
2434                     String sformat = surfaceObj.optString("format");
2435                     Size sizes[];
2436                     if ("yuv".equals(sformat) || "".equals(sformat)) {
2437                         // Default to YUV if no format is specified.
2438                         outputFormats[i] = ImageFormat.YUV_420_888;
2439                         sizes = ItsUtils.getYuvOutputSizes(cameraCharacteristics);
2440                     } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
2441                         outputFormats[i] = ImageFormat.JPEG;
2442                         sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics);
2443                     } else if (JPEG_R_FMT.equals(sformat)) {
2444                         outputFormats[i] = ImageFormat.JPEG_R;
2445                         sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics);
2446                         is10bitOutputPresent = true;
2447                     } else if ("priv".equals(sformat)) {
2448                         outputFormats[i] = ImageFormat.PRIVATE;
2449                         sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics);
2450                         is10bitOutputPresent = surfaceObj.optBoolean("hlg10");
2451                     } else if ("raw".equals(sformat)) {
2452                         outputFormats[i] = ImageFormat.RAW_SENSOR;
2453                         sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics);
2454                     } else if ("rawQuadBayer".equals(sformat)) {
2455                         outputFormats[i] = ImageFormat.RAW_SENSOR;
2456                         sizes = ItsUtils.getRaw16MaxResulolutionOutputSizes(cameraCharacteristics);
2457                         mCaptureRawIsQuadBayer = true;
2458                     } else if ("rawStats".equals(sformat)) {
2459                         outputFormats[i] = ImageFormat.RAW_SENSOR;
2460                         sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics);
2461                         mCaptureRawIsStats = true;
2462                         mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth");
2463                         mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight");
2464                     } else if ("rawQuadBayerStats".equals(sformat)) {
2465                         outputFormats[i] = ImageFormat.RAW_SENSOR;
2466                         sizes = ItsUtils.getRaw16MaxResulolutionOutputSizes(cameraCharacteristics);
2467                         mCaptureRawIsQuadBayerStats = true;
2468                         mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth");
2469                         mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight");
2470                     }
2471                     else if ("raw10".equals(sformat)) {
2472                         outputFormats[i] = ImageFormat.RAW10;
2473                         sizes = ItsUtils.getRaw10OutputSizes(cameraCharacteristics);
2474                     } else if ("raw10QuadBayer".equals(sformat)) {
2475                         outputFormats[i] = ImageFormat.RAW10;
2476                         sizes = ItsUtils.getRaw10MaxResulolutionOutputSizes(cameraCharacteristics);
2477                         mCaptureRawIsQuadBayer = true;
2478                     } else if ("raw10Stats".equals(sformat)) {
2479                         outputFormats[i] = ImageFormat.RAW10;
2480                         sizes = ItsUtils.getRaw10OutputSizes(cameraCharacteristics);
2481                         mCaptureRawIsStats = true;
2482                         mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth");
2483                         mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight");
2484                     } else if ("raw10QuadBayerStats".equals(sformat)) {
2485                         outputFormats[i] = ImageFormat.RAW10;
2486                         sizes = ItsUtils.getRaw10MaxResulolutionOutputSizes(cameraCharacteristics);
2487                         mCaptureRawIsQuadBayerStats = true;
2488                         mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth");
2489                         mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight");
2490                     } else if ("raw12".equals(sformat)) {
2491                         outputFormats[i] = ImageFormat.RAW12;
2492                         sizes = ItsUtils.getRaw12OutputSizes(cameraCharacteristics);
2493                     } else if ("dng".equals(sformat)) {
2494                         outputFormats[i] = ImageFormat.RAW_SENSOR;
2495                         sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics);
2496                         mCaptureRawIsDng = true;
2497                     } else if ("y8".equals(sformat)) {
2498                         outputFormats[i] = ImageFormat.Y8;
2499                         sizes = ItsUtils.getY8OutputSizes(cameraCharacteristics);
2500                     } else {
2501                         throw new ItsException("Unsupported format: " + sformat);
2502                     }
2503                     // If the size is omitted, then default to the largest allowed size for the
2504                     // format.
2505                     int width = surfaceObj.optInt("width");
2506                     int height = surfaceObj.optInt("height");
2507                     if (width <= 0) {
2508                         if (sizes == null || sizes.length == 0) {
2509                             throw new ItsException(String.format(
2510                                     "Zero stream configs available for requested format: %s",
2511                                     sformat));
2512                         }
2513                         width = ItsUtils.getMaxSize(sizes).getWidth();
2514                     }
2515                     if (height <= 0) {
2516                         height = ItsUtils.getMaxSize(sizes).getHeight();
2517                     }
2518                     // The stats computation only applies to the active array region.
2519                     boolean isMaximumResolution =
2520                         mCaptureRawIsQuadBayer || mCaptureRawIsQuadBayerStats;
2521                     Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
2522                         cameraCharacteristics, isMaximumResolution);
2523                     int aaw = activeArrayCropRegion.width();
2524                     int aah = activeArrayCropRegion.height();
2525                     if (mCaptureStatsGridWidth <= 0 || mCaptureStatsGridWidth > aaw) {
2526                         mCaptureStatsGridWidth = aaw;
2527                     }
2528                     if (mCaptureStatsGridHeight <= 0 || mCaptureStatsGridHeight > aah) {
2529                         mCaptureStatsGridHeight = aah;
2530                     }
2531 
2532                     outputSizes[i] = new Size(width, height);
2533                     if (!surfaceObj.isNull("useCase")) {
2534                         mStreamUseCaseMap.put(i, surfaceObj.optLong("useCase"));
2535                     }
2536                 }
2537             } catch (org.json.JSONException e) {
2538                 throw new ItsException("JSON error", e);
2539             }
2540         } else {
2541             // No surface(s) specified at all.
2542             // Default: a single output surface which is full-res YUV.
2543             Size maxYuvSize = ItsUtils.getMaxOutputSize(
2544                     mCameraCharacteristics, ImageFormat.YUV_420_888);
2545             numSurfaces = backgroundRequest ? 2 : 1;
2546 
2547             outputSizes = new Size[numSurfaces];
2548             outputFormats = new int[numSurfaces];
2549             outputSizes[0] = maxYuvSize;
2550             outputFormats[0] = ImageFormat.YUV_420_888;
2551             if (backgroundRequest) {
2552                 outputSizes[1] = new Size(640, 480);
2553                 outputFormats[1] = ImageFormat.YUV_420_888;
2554             }
2555         }
2556 
2557         prepareImageReaders(ImageReaderArgs.valueOf(outputSizes, outputFormats, inputSize,
2558                 inputFormat, maxInputBuffers,
2559                 is10bitOutputPresent), reuseSession);
2560 
2561         return is10bitOutputPresent;
2562     }
2563 
2564     /**
2565      * Wait until mCountCallbacksRemaining is 0 or a specified amount of time has elapsed between
2566      * each callback.
2567      */
2568     private void waitForCallbacks(long timeoutMs) throws ItsException {
2569         synchronized(mCountCallbacksRemaining) {
2570             int currentCount = mCountCallbacksRemaining.get();
2571             while (currentCount > 0) {
2572                 try {
2573                     mCountCallbacksRemaining.wait(timeoutMs);
2574                 } catch (InterruptedException e) {
2575                     throw new ItsException("Waiting for callbacks was interrupted.", e);
2576                 }
2577 
2578                 int newCount = mCountCallbacksRemaining.get();
2579                 if (newCount == currentCount) {
2580                     throw new ItsException("No callback received within timeout " +
2581                             timeoutMs + "ms");
2582                 }
2583                 currentCount = newCount;
2584             }
2585         }
2586     }
2587 
2588     private void doGetSupportedVideoQualities(String id) throws ItsException {
2589         int cameraId = Integer.parseInt(id);
2590         StringBuilder profiles = new StringBuilder();
2591         for (Map.Entry<Integer, String> entry : CAMCORDER_PROFILE_QUALITIES_MAP.entrySet()) {
2592             appendSupportProfile(profiles, entry.getValue(), entry.getKey(), cameraId);
2593         }
2594         mSocketRunnableObj.sendResponse("supportedVideoQualities", profiles.toString());
2595     }
2596 
2597     private void doGetDefaultCameraPkgName() throws ItsException {
2598         PackageManager pkgMgr = getPackageManager();
2599         Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
2600         String pkgName = intent.resolveActivity(pkgMgr).getPackageName();
2601         Log.i(TAG, "Default camera pkg name: " + pkgName);
2602         mSocketRunnableObj.sendResponse("defaultCameraPkg", pkgName);
2603     }
2604 
2605     private void doGainMapCheck(JSONObject params) throws ItsException {
2606         String filePath;
2607         try {
2608             filePath = params.getString("filePath");
2609         } catch(org.json.JSONException e) {
2610             throw new ItsException("JSON error: ", e);
2611         }
2612         Bitmap bitmapImage = BitmapFactory.decodeFile(filePath);
2613         assert(bitmapImage != null);
2614         boolean gainmapPresent = bitmapImage.hasGainmap();
2615         Log.i(TAG, "Gainmap present? " + gainmapPresent);
2616         mSocketRunnableObj.sendResponse("gainmapPresent",
2617                 gainmapPresent ? "true" : "false");
2618     }
2619 
2620     private void doGetSupportedVideoSizesCapped(String id) throws ItsException {
2621         int cameraId = Integer.parseInt(id);
2622         // s1440p which is the max supported stream size in a combination, when preview
2623         // stabilization is on.
2624         Size maxPreviewSize = new Size(1920, 1440);
2625         ArrayList<Size> outputSizes = new ArrayList<>();
2626         for (Map.Entry<Integer, String> entry : CAMCORDER_PROFILE_QUALITIES_MAP.entrySet()) {
2627             if (CamcorderProfile.hasProfile(cameraId, entry.getKey())) {
2628                 CamcorderProfile camcorderProfile = getCamcorderProfile(cameraId, entry.getKey());
2629                 assert(camcorderProfile != null);
2630                 Size videoSize = new Size(camcorderProfile.videoFrameWidth,
2631                         camcorderProfile.videoFrameHeight);
2632                 outputSizes.add(videoSize);
2633             }
2634         }
2635         Log.i(TAG, "Supported video sizes: " + outputSizes.toString());
2636         String response = outputSizes.stream()
2637                 .distinct()
2638                 .filter(s -> s.getWidth() * s.getHeight()
2639                         <= maxPreviewSize.getWidth() * maxPreviewSize.getHeight())
2640                 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight()))
2641                 .map(Size::toString)
2642                 .collect(Collectors.joining(";"));
2643         mSocketRunnableObj.sendResponse("supportedVideoSizes", response);
2644     }
2645 
2646     private void appendSupportProfile(StringBuilder profiles, String name, int profile,
2647             int cameraId) {
2648         if (CamcorderProfile.hasProfile(cameraId, profile)) {
2649             profiles.append(name).append(':').append(profile).append(';');
2650         }
2651     }
2652 
2653     private boolean isVideoStabilizationModeSupported(int mode) {
2654         int[] videoStabilizationModes = mCameraCharacteristics.get(
2655                 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
2656         List<Integer> arrList = Arrays.asList(CameraTestUtils.toObject(videoStabilizationModes));
2657         assert(videoStabilizationModes != null);
2658         assert(arrList.contains(CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF));
2659         Log.i(TAG, "videoStabilizationModes:" + Arrays.toString(videoStabilizationModes));
2660         return arrList.contains(mode);
2661     }
2662 
2663     private void doGetSupportedPreviewSizes() throws ItsException {
2664         StreamConfigurationMap configMap = mCameraCharacteristics.get(
2665                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2666         if (!StreamConfigurationMap.isOutputSupportedFor(SurfaceHolder.class)) {
2667             mSocketRunnableObj.sendResponse("supportedPreviewSizes", "");
2668             return;
2669         }
2670 
2671         Size[] outputSizes = configMap.getOutputSizes(ImageFormat.YUV_420_888);
2672         if (outputSizes == null) {
2673             mSocketRunnableObj.sendResponse("supportedPreviewSizes", "");
2674             return;
2675         }
2676 
2677         String response = Arrays.stream(outputSizes)
2678                 .distinct()
2679                 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight()))
2680                 .map(Size::toString)
2681                 .collect(Collectors.joining(";"));
2682 
2683         mSocketRunnableObj.sendResponse("supportedPreviewSizes", response);
2684     }
2685 
2686     private void doGetQueryableStreamCombinations() throws ItsException {
2687         StaticMetadata staticMetadata = new StaticMetadata(mCameraCharacteristics);
2688         MaxStreamSizes maxStreamSizes = new MaxStreamSizes(staticMetadata,
2689                 mCamera.getId(), (Context) this, /*matchSize*/true);
2690         StringBuilder responseBuilder = new StringBuilder();
2691         int[][] queryableCombinations = maxStreamSizes.getQueryableCombinations();
2692         for (int i = 0; i < queryableCombinations.length; i++) {
2693             String oneCombination = "";
2694             for (int j = 0; j < queryableCombinations[i].length; j += 2) {
2695                 String format = sFormatMap.get(queryableCombinations[i][j]);
2696                 int sizeIndex = queryableCombinations[i][j + 1];
2697                 Size size = maxStreamSizes.getOutputSizeForFormat(
2698                         queryableCombinations[i][j], sizeIndex);
2699                 String oneStream = format + ":" + size.toString();
2700                 if (j > 0) {
2701                     oneCombination += "+";
2702                 }
2703                 oneCombination += oneStream;
2704             }
2705 
2706             if (i > 0) {
2707                 responseBuilder.append(";");
2708             }
2709             responseBuilder.append(oneCombination);
2710         }
2711 
2712         Log.i(TAG, "queryableStreamCombinations response is " + responseBuilder.toString());
2713         mSocketRunnableObj.sendResponse("queryableStreamCombinations", responseBuilder.toString());
2714     }
2715 
2716     private void doGetSupportedExtensions(String id) throws ItsException {
2717         try {
2718             CameraExtensionCharacteristics chars =
2719                     mCameraManager.getCameraExtensionCharacteristics(id);
2720             List<Integer> extensionsList = chars.getSupportedExtensions();
2721             mSocketRunnableObj.sendResponse("supportedExtensions", extensionsList.toString());
2722         } catch (CameraAccessException e) {
2723             throw new ItsException("Failed to get supported extensions list", e);
2724         }
2725     }
2726 
2727     private void doGetSupportedExtensionSizes(
2728             String id, int extension, int format) throws ItsException {
2729         try {
2730             CameraExtensionCharacteristics chars =
2731                     mCameraManager.getCameraExtensionCharacteristics(id);
2732             List<Size> extensionSizes = chars.getExtensionSupportedSizes(extension, format);
2733             String response = extensionSizes.stream()
2734                 .distinct()
2735                 .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight()))
2736                 .map(Size::toString)
2737                 .collect(Collectors.joining(";"));
2738             mSocketRunnableObj.sendResponse("supportedExtensionSizes", response);
2739         } catch (CameraAccessException e) {
2740             throw new ItsException("Failed to get supported extensions sizes list", e);
2741         }
2742     }
2743 
2744     private void doGetSupportedExtensionPreviewSizes(String id, int extension)
2745             throws ItsException {
2746         try {
2747             CameraExtensionCharacteristics chars =
2748                     mCameraManager.getCameraExtensionCharacteristics(id);
2749             List<Size> extensionSizes = chars.getExtensionSupportedSizes(extension,
2750                     SurfaceTexture.class);
2751             String response = extensionSizes.stream()
2752                     .distinct()
2753                     .sorted(Comparator.comparingInt(s -> s.getWidth() * s.getHeight()))
2754                     .map(Size::toString)
2755                     .collect(Collectors.joining(";"));
2756             mSocketRunnableObj.sendResponse("supportedExtensionPreviewSizes", response);
2757         } catch (CameraAccessException e) {
2758             throw new ItsException("Failed to get supported extensions sizes list", e);
2759         }
2760     }
2761 
2762     private void doBasicRecording(String cameraId, int profileId, String quality,
2763             int recordingDuration, int videoStabilizationMode,
2764             boolean hlg10Enabled, double zoomRatio, int aeTargetFpsMin, int aeTargetFpsMax)
2765             throws ItsException {
2766         RecordingResultListener recordingResultListener = new RecordingResultListener();
2767 
2768         if (!hlg10Enabled) {
2769             doBasicRecording(cameraId, profileId, quality, recordingDuration,
2770                     videoStabilizationMode, zoomRatio, aeTargetFpsMin, aeTargetFpsMax);
2771             return;
2772         }
2773 
2774         int cameraDeviceId = Integer.parseInt(cameraId);
2775         CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId);
2776         assert (camcorderProfile != null);
2777         boolean supportsVideoStabilizationMode = isVideoStabilizationModeSupported(
2778                 videoStabilizationMode);
2779         if (!supportsVideoStabilizationMode) {
2780             throw new ItsException("Device does not support video stabilization mode: " +
2781                     videoStabilizationMode);
2782         }
2783         Size videoSize = new Size(camcorderProfile.videoFrameWidth,
2784                 camcorderProfile.videoFrameHeight);
2785         int fileFormat = camcorderProfile.fileFormat;
2786         String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize, quality, fileFormat,
2787                 /* hlg10Enabled= */ true,
2788                 /* stabilized= */
2789                 videoStabilizationMode != CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF,
2790                 zoomRatio);
2791         assert (outputFilePath != null);
2792 
2793         MediaCodecList list = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
2794         MediaFormat format = ItsUtils.initializeHLG10Format(videoSize,
2795                 camcorderProfile.videoBitRate, camcorderProfile.videoFrameRate);
2796 
2797         String codecName = list.findEncoderForFormat(format);
2798         assert (codecName != null);
2799 
2800         int[] caps = mCameraCharacteristics.get(
2801                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
2802         assert ((caps != null) && IntStream.of(caps).anyMatch(x -> x ==
2803                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT));
2804 
2805         DynamicRangeProfiles profiles = mCameraCharacteristics.get(
2806                 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES);
2807         assert ((profiles != null) &&
2808                 profiles.getSupportedProfiles().contains(DynamicRangeProfiles.HLG10));
2809 
2810         MediaCodec mediaCodec = null;
2811         MediaMuxer muxer = null;
2812         Log.i(TAG, "Video recording outputFilePath:"+ outputFilePath);
2813         try {
2814             muxer = new MediaMuxer(outputFilePath,
2815                     MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
2816         } catch (IOException e) {
2817             throw new ItsException("Error preparing the MediaMuxer.");
2818         }
2819         try {
2820             mediaCodec = MediaCodec.createByCodecName(codecName);
2821         } catch (IOException e) {
2822             throw new ItsException("Error preparing the MediaCodec.");
2823         }
2824 
2825         mediaCodec.configure(format, null, null,
2826                 MediaCodec.CONFIGURE_FLAG_ENCODE);
2827         Object condition = new Object();
2828         mediaCodec.setCallback(new ItsUtils.MediaCodecListener(muxer, condition), mCameraHandler);
2829 
2830         mRecordSurface = mediaCodec.createInputSurface();
2831         assert(mRecordSurface != null);
2832 
2833         CameraCaptureSession.StateCallback mockCallback = mock(
2834                 CameraCaptureSession.StateCallback.class);
2835         // Configure and create capture session.
2836         try {
2837             configureAndCreateCaptureSession(CameraDevice.TEMPLATE_RECORD, mRecordSurface,
2838                     videoStabilizationMode, /*ois=*/ false, DynamicRangeProfiles.HLG10,
2839                     mockCallback, zoomRatio, aeTargetFpsMin, aeTargetFpsMax,
2840                     recordingResultListener, /*extraConfigs*/null);
2841         } catch (CameraAccessException e) {
2842             throw new ItsException("Access error: ", e);
2843         }
2844 
2845         Log.i(TAG, "Now recording video for quality: " + quality + " profile id: " +
2846                 profileId + " cameraId: " + cameraDeviceId + " size: " + videoSize + " in HLG10!");
2847         mediaCodec.start();
2848         try {
2849             Thread.sleep(recordingDuration * 1000L); // recordingDuration is in seconds
2850         } catch (InterruptedException e) {
2851             throw new ItsException("Unexpected InterruptedException: ", e);
2852         }
2853 
2854         mediaCodec.signalEndOfInputStream();
2855         mSession.close();
2856         verify(mockCallback, timeout(ItsUtils.SESSION_CLOSE_TIMEOUT_MS)
2857                 .times(1)).onClosed(eq(mSession));
2858 
2859         synchronized (condition) {
2860             try {
2861                 condition.wait(ItsUtils.SESSION_CLOSE_TIMEOUT_MS);
2862             } catch (InterruptedException e) {
2863                 throw new ItsException("Unexpected InterruptedException: ", e);
2864             }
2865         }
2866 
2867         muxer.stop();
2868         mediaCodec.stop();
2869         mediaCodec.release();
2870         muxer.release();
2871         mRecordSurface.release();
2872         mRecordSurface = null;
2873 
2874         Log.i(TAG, "10-bit Recording Done for quality: " + quality);
2875 
2876         // Send VideoRecordingObject for further processing.
2877         VideoRecordingObject obj = new VideoRecordingObject(outputFilePath,
2878                 quality, videoSize, camcorderProfile.videoFrameRate, fileFormat, zoomRatio,
2879                 /*perFrameCaptureResults=*/ Collections.emptyList());
2880         mSocketRunnableObj.sendVideoRecordingObject(obj);
2881     }
2882 
2883     private void doBasicRecording(String cameraId, int profileId, String quality,
2884             int recordingDuration, int videoStabilizationMode,
2885             double zoomRatio, int aeTargetFpsMin, int aeTargetFpsMax) throws ItsException {
2886         RecordingResultListener recordingResultListener = new RecordingResultListener();
2887         int cameraDeviceId = Integer.parseInt(cameraId);
2888         mMediaRecorder = new MediaRecorder();
2889         CamcorderProfile camcorderProfile = getCamcorderProfile(cameraDeviceId, profileId);
2890         assert(camcorderProfile != null);
2891         boolean supportsVideoStabilizationMode = isVideoStabilizationModeSupported(
2892                 videoStabilizationMode);
2893         if (!supportsVideoStabilizationMode) {
2894             throw new ItsException("Device does not support video stabilization mode: " +
2895                     videoStabilizationMode);
2896         }
2897         Size videoSize = new Size(camcorderProfile.videoFrameWidth,
2898                 camcorderProfile.videoFrameHeight);
2899         int fileFormat = camcorderProfile.fileFormat;
2900         String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize, quality,
2901                 fileFormat, /* stabilized= */
2902                 videoStabilizationMode != CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF,
2903                 zoomRatio);
2904         assert(outputFilePath != null);
2905         Log.i(TAG, "Video recording outputFilePath:"+ outputFilePath);
2906         setupMediaRecorderWithProfile(camcorderProfile, outputFilePath);
2907         // Prepare MediaRecorder
2908         try {
2909             mMediaRecorder.prepare();
2910         } catch (IOException e) {
2911             throw new ItsException("Error preparing the MediaRecorder.");
2912         }
2913 
2914         mRecordSurface = mMediaRecorder.getSurface();
2915         // Configure and create capture session.
2916         try {
2917             configureAndCreateCaptureSession(CameraDevice.TEMPLATE_RECORD, mRecordSurface,
2918                     videoStabilizationMode, /*ois=*/ false, DynamicRangeProfiles.STANDARD,
2919                     /*stateCallback=*/ null, zoomRatio, aeTargetFpsMin, aeTargetFpsMax,
2920                     recordingResultListener, /*extraConfigs*/null);
2921         } catch (android.hardware.camera2.CameraAccessException e) {
2922             throw new ItsException("Access error: ", e);
2923         }
2924         // Start Recording
2925         if (mMediaRecorder != null) {
2926             Log.i(TAG, "Now recording video for quality: " + quality + " profile id: " +
2927                 profileId + " cameraId: " + cameraDeviceId + " size: " + videoSize);
2928             mMediaRecorder.start();
2929             try {
2930                 Thread.sleep(recordingDuration * 1000L); // recordingDuration is in seconds
2931             } catch (InterruptedException e) {
2932                 throw new ItsException("Unexpected InterruptedException: ", e);
2933             }
2934             // Stop MediaRecorder
2935             mMediaRecorder.stop();
2936             mSession.close();
2937             mMediaRecorder.reset();
2938             mMediaRecorder.release();
2939             mMediaRecorder = null;
2940             if (mRecordSurface != null) {
2941                 mRecordSurface.release();
2942                 mRecordSurface = null;
2943             }
2944         }
2945 
2946         Log.i(TAG, "Recording Done for quality: " + quality);
2947 
2948         // Send VideoRecordingObject for further processing.
2949         VideoRecordingObject obj = new VideoRecordingObject(outputFilePath,
2950                 quality, videoSize, camcorderProfile.videoFrameRate, fileFormat, zoomRatio,
2951                 /*perFrameCaptureResults=*/ Collections.emptyList());
2952         mSocketRunnableObj.sendVideoRecordingObject(obj);
2953     }
2954 
2955     /**
2956      * Sets up a PreviewRecorder with a surface set up as a preview.
2957      *
2958      * This method sets up 2 surfaces: an {@link ImageReader} surface and a
2959      * {@link MediaRecorder} surface. The ImageReader surface is set up with
2960      * {@link HardwareBuffer#USAGE_COMPOSER_OVERLAY} and set as the target of one or many capture
2961      * requests created with {@link CameraDevice#TEMPLATE_PREVIEW}. This should force the HAL to
2962      * use the Preview pipeline and output to the ImageReader. An {@link ImageWriter} pipes the
2963      * images from ImageReader to the MediaRecorder surface which is encoded into a video.
2964      */
2965     private PreviewRecorder getPreviewRecorder(JSONObject cmdObj, String outputFilePath,
2966             Size videoSize, boolean hlg10Enabled) throws ItsException, JSONException {
2967         String cameraId = cmdObj.getString("cameraId");
2968         boolean stabilize = cmdObj.getBoolean("stabilize");
2969         int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax");
2970 
2971         if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU) {
2972             throw new ItsException("Cannot record preview before API level 33");
2973         }
2974 
2975         boolean stabilizationSupported = isVideoStabilizationModeSupported(
2976                 CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION);
2977         if (stabilize && !stabilizationSupported) {
2978             throw new ItsException("Preview stabilization requested, but not supported by device.");
2979         }
2980 
2981         int[] caps = mCameraCharacteristics.get(
2982                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
2983         boolean support10Bit = (caps != null) && IntStream.of(caps).anyMatch(x -> x
2984                 == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
2985         if (hlg10Enabled) {
2986             if (!support10Bit) {
2987                 throw new ItsException("HLG10 requested, but 10-bit capability "
2988                         + "is not supported by device.");
2989             }
2990         }
2991 
2992         int cameraDeviceId = Integer.parseInt(cameraId);
2993         int sensorOrientation = Objects.requireNonNull(
2994                 mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION),
2995                 "Sensor orientation must not be null"
2996         );
2997 
2998         // By default aeTargetFpsMax is not set. In that case, default to 30
2999         if (aeTargetFpsMax == 0) {
3000             aeTargetFpsMax = 30;
3001         }
3002         return new PreviewRecorder(cameraDeviceId, videoSize, aeTargetFpsMax,
3003                 sensorOrientation, outputFilePath, mCameraHandler, hlg10Enabled, this);
3004     }
3005 
3006     private void doStaticPreviewRecording(JSONObject cmdObj) throws JSONException, ItsException {
3007         int recordingDuration = cmdObj.getInt("recordingDuration");
3008         RecordingResultListener recordingResultListener = new RecordingResultListener();
3009         mPreviewAction = new PreviewSleepAction(
3010                 mCameraCharacteristics,
3011                 mCameraHandler,
3012                 recordingResultListener,
3013                 recordingDuration * 1000L);
3014         doPreviewRecordingWithAction(cmdObj, mPreviewAction);
3015     }
3016 
3017     private void doDynamicZoomPreviewRecording(JSONObject cmdObj)
3018             throws JSONException, ItsException {
3019         double zoomStart = cmdObj.getDouble("zoomStart");
3020         double zoomEnd = cmdObj.getDouble("zoomEnd");
3021         double stepSize = cmdObj.getDouble("stepSize");
3022         long stepDuration = cmdObj.getLong("stepDuration");
3023         RecordingResultListener recordingResultListener = new RecordingResultListener();
3024         mPreviewAction = new PreviewDynamicZoomAction(
3025                 mCameraCharacteristics,
3026                 mCameraHandler,
3027                 recordingResultListener,
3028                 zoomStart,
3029                 zoomEnd,
3030                 stepSize,
3031                 stepDuration);
3032         doPreviewRecordingWithAction(cmdObj, mPreviewAction);
3033     }
3034 
3035     private void doDynamicMeteringRegionPreviewRecording(JSONObject cmdObj)
3036             throws JSONException, ItsException {
3037         JSONArray aeAwbRegionOne = cmdObj.getJSONArray("aeAwbRegionOne");
3038         JSONArray aeAwbRegionTwo = cmdObj.getJSONArray("aeAwbRegionTwo");
3039         JSONArray aeAwbRegionThree = cmdObj.getJSONArray("aeAwbRegionThree");
3040         JSONArray aeAwbRegionFour = cmdObj.getJSONArray("aeAwbRegionFour");
3041         long aeAwbRegionDuration = cmdObj.getLong("aeAwbRegionDuration");
3042         RecordingResultListener recordingResultListener = new RecordingResultListener();
3043         mPreviewAction = new PreviewDynamicMeteringAction(
3044                 mCameraCharacteristics,
3045                 mCameraHandler,
3046                 recordingResultListener,
3047                 aeAwbRegionOne,
3048                 aeAwbRegionTwo,
3049                 aeAwbRegionThree,
3050                 aeAwbRegionFour,
3051                 aeAwbRegionDuration);
3052         doPreviewRecordingWithAction(cmdObj, mPreviewAction);
3053     }
3054 
3055     /**
3056      * Records a video of a surface set up as a preview, performing an action while recording.
3057      */
3058     private void doPreviewRecordingWithAction(
3059             JSONObject cmdObj,
3060             IntraPreviewAction action)
3061             throws JSONException, ItsException {
3062         String cameraId = cmdObj.getString("cameraId");
3063         boolean stabilize = cmdObj.getBoolean("stabilize");
3064         boolean ois = cmdObj.getBoolean("ois");
3065         double zoomRatio = cmdObj.optDouble("zoomRatio");
3066         // Override with zoomStart if zoomRatio was not specified
3067         zoomRatio = (Double.isNaN(zoomRatio)) ? cmdObj.optDouble("zoomStart") : zoomRatio;
3068         boolean paddedFrames = cmdObj.optBoolean("paddedFrames", false);
3069         int aeTargetFpsMin = cmdObj.optInt("aeTargetFpsMin");
3070         int aeTargetFpsMax = cmdObj.optInt("aeTargetFpsMax");
3071         // Record surface size and HDRness.
3072         JSONArray outputSpecs = ItsUtils.getOutputSpecs(cmdObj);
3073         if (outputSpecs == null || outputSpecs.length() == 0) {
3074             throw new ItsException("No output surfaces!");
3075         }
3076         JSONObject recordSurfaceObj = outputSpecs.getJSONObject(0);
3077         String format = recordSurfaceObj.optString("format");
3078         if (!format.equals("priv")) {
3079             throw new ItsException("Record surface must be PRIV format!, but is " + format);
3080         }
3081         Size videoSize = new Size(
3082                 recordSurfaceObj.getInt("width"),
3083                 recordSurfaceObj.getInt("height"));
3084         boolean hlg10Enabled = recordSurfaceObj.optBoolean("hlg10");
3085 
3086         // Remove first output spec and use the rest to create ImageReaders
3087         List<OutputConfiguration> extraConfigs = null;
3088         outputSpecs.remove(0);
3089         if (outputSpecs.length() > 0) {
3090             boolean is10bitOutputPresent = prepareImageReadersWithOutputSpecs(
3091                     outputSpecs, /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0,
3092                     /*backgroundRequest*/false, /*reuseSession*/false);
3093             extraConfigs = getCaptureOutputConfigurations(outputSpecs, is10bitOutputPresent);
3094         }
3095         List<RecordingResult> recordingResults = new ArrayList<>();
3096         RecordingResultListener recordingResultListener = action.getRecordingResultListener();
3097 
3098         int fileFormat = MediaRecorder.OutputFormat.DEFAULT;
3099         int cameraDeviceId = Integer.parseInt(cameraId);
3100         String outputFilePath = getOutputMediaFile(cameraDeviceId, videoSize,
3101                 /* quality= */"preview", fileFormat, hlg10Enabled, stabilize, zoomRatio,
3102                 aeTargetFpsMin, aeTargetFpsMax);
3103         assert outputFilePath != null;
3104 
3105         try (PreviewRecorder pr = getPreviewRecorder(cmdObj, outputFilePath, videoSize,
3106                 hlg10Enabled)) {
3107             int stabilizationMode = stabilize
3108                     ? CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION
3109                     : CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF;
3110             BlockingSessionCallback sessionListener = new BlockingSessionCallback();
3111             long dynamicRangeProfile = hlg10Enabled ? DynamicRangeProfiles.HLG10 :
3112                     DynamicRangeProfiles.STANDARD;
3113             pr.startRecording();
3114             if (paddedFrames) {
3115                 Logt.v(TAG, "Record Green frames at the beginning of the video");
3116                 pr.overrideCameraFrames(true);
3117 
3118                 // MediaRecorder APIs don't specify whether they're synchronous or asynchronous,
3119                 // and different vendors seem to have interpret this differently. This delay
3120                 // allows for MediaRecorder to complete the `startRecording` routine before
3121                 // streaming frames from the camera. b/348332718
3122                 try {
3123                     Thread.sleep(PADDED_FRAMES_MS);
3124                 } catch (InterruptedException e) {
3125                     Logt.e(TAG, "Interrupted while waiting for MediaRecorder to prepare", e);
3126                 }
3127             }
3128             configureAndCreateCaptureSession(CameraDevice.TEMPLATE_PREVIEW,
3129                     pr.getCameraSurface(), stabilizationMode, ois, dynamicRangeProfile,
3130                     sessionListener, zoomRatio, aeTargetFpsMin, aeTargetFpsMax,
3131                     recordingResultListener, extraConfigs);
3132             if (paddedFrames) {
3133                 Logt.v(TAG, "Wait " + PADDED_FRAMES_MS + " msec for Green frames for padding");
3134                 try {
3135                     Thread.sleep(PADDED_FRAMES_MS);
3136                 } catch (InterruptedException e) {
3137                     Logt.e(TAG, "Interrupted while waiting for green frames", e);
3138                 }
3139 
3140                 Logt.v(TAG, "Record Camera frames after green frames");
3141                 pr.overrideCameraFrames(false);
3142             }
3143 
3144             action.execute();
3145 
3146             if (paddedFrames) {
3147                 pr.overrideCameraFrames(true);
3148                 try {
3149                     Thread.sleep(PADDED_FRAMES_MS);
3150                 } catch (InterruptedException e) {
3151                     Logt.e(TAG, "Interrupted while waiting for green frames", e);
3152                 }
3153             }
3154 
3155             // Stop repeating request and ensure frames in flight are sent to MediaRecorder
3156             mSession.stopRepeating();
3157             sessionListener.getStateWaiter().waitForState(
3158                     BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
3159             pr.stopRecording();
3160             mSession.close();
3161 
3162             int frameNum = 1;
3163             for (Long timestamp : pr.getFrameTimeStamps()) {
3164                 if (recordingResultListener.getCaptureResultsMap().containsKey(timestamp)) {
3165                     RecordingResult result = recordingResultListener.getCaptureResultsMap().get(
3166                             timestamp);
3167                     recordingResults.add(result);
3168                     Logt.v(TAG, "Frame# " + frameNum + " timestamp: " + timestamp + " cr = "
3169                             + result.mMap.values());
3170                     recordingResultListener.getCaptureResultsMap().remove(timestamp);
3171                 } else {
3172                     throw new ItsException("Frame# " + frameNum
3173                             + " No RecordingResult found for timestamp: " + timestamp);
3174                 }
3175                 frameNum++;
3176             }
3177         } catch (CameraAccessException e) {
3178             throw new ItsException("Error configuring and creating capture request", e);
3179         } catch (InterruptedException e) {
3180             throw new ItsException("Interrupted while recording preview", e);
3181         }
3182 
3183         Log.i(TAG, "Preview recording complete: " + outputFilePath);
3184         // Send VideoRecordingObject for further processing.
3185         VideoRecordingObject obj = new VideoRecordingObject(outputFilePath, /* quality= */"preview",
3186                 videoSize, fileFormat, zoomRatio, recordingResults);
3187         mSocketRunnableObj.sendVideoRecordingObject(obj);
3188     }
3189 
3190     /**
3191      * Captures the nth frame of a surface set up as a preview.
3192      *
3193      * This method sets up an {@link ImageReader} surface. The ImageReader surface is set up with
3194      * {@link HardwareBuffer#USAGE_COMPOSER_OVERLAY} and set as the target of a capture request
3195      * created with {@link CameraDevice#TEMPLATE_PREVIEW}. This should force the HAL to use the
3196      * Preview pipeline and output to the ImageReader.
3197      **/
3198     private void doCapturePreviewFrame(JSONObject params)
3199             throws org.json.JSONException, ItsException {
3200         int cameraDeviceId = Integer.parseInt(params.getString("cameraId"));
3201         Size previewSize = Size.parseSize(params.getString("previewSize"));
3202         int frameNumToCapture = params.getInt("frameNum");
3203         int extension = params.getInt("extension");
3204 
3205         Log.i(TAG, "doCapturePreviewFrame [start] cameraId: " + cameraDeviceId
3206                 + " previewSize: " + previewSize + " frameNum: " + frameNumToCapture
3207                 + " extension: " + extension);
3208 
3209         int sensorOrientation = mCameraCharacteristics.get(
3210                 CameraCharacteristics.SENSOR_ORIENTATION);
3211 
3212         // We don't invoke recording but a valid file is still required
3213         String quality = "preview";
3214         int fileFormat = MediaRecorder.OutputFormat.DEFAULT;
3215         boolean stabilize = false;
3216         float zoomRatio = 1.0f;
3217         String outputFilePath = getOutputMediaFile(cameraDeviceId, previewSize,
3218                 quality, fileFormat, stabilize, zoomRatio);
3219         assert outputFilePath != null;
3220 
3221         int aeTargetFpsMax = 30;
3222         try (PreviewRecorder pr = new PreviewRecorder(cameraDeviceId, previewSize, aeTargetFpsMax,
3223                 sensorOrientation, outputFilePath, mCameraHandler, /*hlg10Enabled*/false, this)) {
3224             CaptureRequest.Builder reqBuilder = mCamera.createCaptureRequest(
3225                     CameraDevice.TEMPLATE_PREVIEW);
3226             JSONObject captureReqJSON = params.getJSONObject("captureRequest");
3227             // Create deep copy of the original capture request. The deserialize operation strips
3228             // keys. The deep copy preserves the keys.
3229             JSONObject threeAReqJSON = new JSONObject(captureReqJSON.toString());
3230             reqBuilder = ItsSerializer.deserialize(reqBuilder, captureReqJSON);
3231             CaptureRequest.Builder threeAReqBuilder = mCamera.createCaptureRequest(
3232                     CameraDevice.TEMPLATE_PREVIEW);
3233             threeAReqBuilder = ItsSerializer.deserialize(threeAReqBuilder, threeAReqJSON);
3234             ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
3235             // Do not send 3A results
3236             mSend3AResults = false;
3237 
3238             // If extension is -1 then use Camera2
3239             if (extension == -1) {
3240                 capturePreviewFrame(
3241                         reqBuilder,
3242                         threeAReqBuilder,
3243                         frameNumToCapture,
3244                         pr,
3245                         outputStream);
3246             } else {
3247                 capturePreviewFrameWithExtension(
3248                         reqBuilder,
3249                         threeAReqBuilder,
3250                         frameNumToCapture,
3251                         pr,
3252                         outputStream,
3253                         extension);
3254             }
3255 
3256             Log.i(TAG, "Preview frame capture complete");
3257             mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage",
3258                         ByteBuffer.wrap(outputStream.toByteArray()));
3259         } catch (CameraAccessException e) {
3260             Log.e(TAG, "doCapturePreviewFrame [error]", e);
3261             throw new ItsException("Error configuring and creating capture request", e);
3262         } catch (InterruptedException e) {
3263             Log.e(TAG, "doCapturePreviewFrame [error]", e);
3264             throw new ItsException("Interrupted while recording preview", e);
3265         }
3266     }
3267 
3268     private void capturePreviewFrame(CaptureRequest.Builder reqBuilder,
3269             CaptureRequest.Builder threeAReqBuilder, int frameNumToCapture,
3270             PreviewRecorder pr, OutputStream outputStream)
3271             throws ItsException, CameraAccessException, InterruptedException {
3272         Log.d(TAG, "capturePreviewFrame [start]");
3273         CountDownLatch frameNumLatch = new CountDownLatch(frameNumToCapture + 1);
3274         PreviewFrameCaptureResultListener captureResultListener =
3275                 new PreviewFrameCaptureResultListener(frameNumLatch);
3276 
3277         Surface surface = pr.getCameraSurface();
3278         reqBuilder.addTarget(surface);
3279         reqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
3280                 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
3281         OutputConfiguration outConfig = new OutputConfiguration(surface);
3282 
3283         long[] availableStreamUseCases = mCameraCharacteristics.get(
3284                 CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES);
3285         long previewStreamUseCase =
3286                 Long.valueOf(CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW);
3287         if (availableStreamUseCases != null && Longs.asList(availableStreamUseCases).contains(
3288                 previewStreamUseCase)) {
3289             outConfig.setStreamUseCase(
3290                     CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW);
3291         }
3292 
3293         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
3294         SessionConfiguration sessionConfiguration = new SessionConfiguration(
3295                 SessionConfiguration.SESSION_REGULAR, List.of(outConfig),
3296                 new HandlerExecutor(mCameraHandler),
3297                 new CameraCaptureSession.StateCallback() {
3298                     @Override
3299                     public void onConfigured(CameraCaptureSession session) {
3300                         mSession = session;
3301                         sessionListener.onConfigured(session);
3302                     }
3303 
3304                     @Override
3305                     public void onReady(CameraCaptureSession session) {
3306                         sessionListener.onReady(session);
3307                     }
3308 
3309                     @Override
3310                     public void onConfigureFailed(CameraCaptureSession session) {
3311                         Log.i(TAG, "CameraCaptureSession configuration failed.");
3312                         sessionListener.onConfigureFailed(session);
3313                     }
3314 
3315                     @Override
3316                     public void onClosed(CameraCaptureSession session) {
3317                         sessionListener.onClosed(session);
3318                     }
3319                 });
3320 
3321         // Create capture session
3322         mCamera.createCaptureSession(sessionConfiguration);
3323 
3324         sessionListener.getStateWaiter().waitForState(
3325                 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
3326         try {
3327             ThreeAResultListener threeAListener = new ThreeAResultListener();
3328             Logt.i(TAG, "Triggering precapture sequence");
3329 
3330             mSession.setRepeatingRequest(reqBuilder.build(), threeAListener,
3331                     mCameraHandler);
3332             synchronized (m3AStateLock) {
3333                 mPrecaptureTriggered = false;
3334                 mConvergeAETriggered = false;
3335             }
3336 
3337             threeAReqBuilder.addTarget(surface);
3338             threeAReqBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
3339                     CameraMetadata.CONTROL_AF_TRIGGER_START);
3340             threeAReqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
3341                     CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
3342 
3343             mSession.capture(threeAReqBuilder.build(), threeAListener,
3344                     mCameraHandler);
3345             mInterlock3A.open();
3346             long tstart = System.currentTimeMillis();
3347             while (!mPrecaptureTriggered) {
3348                 if (!mInterlock3A.block(TIMEOUT_3A * 1000)
3349                         || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3350                     throw new ItsException(
3351                         "3A failed to converge after " + TIMEOUT_3A + " seconds.\n"
3352                         + "AE converge state: " + mConvergedAE + ".");
3353                 }
3354             }
3355 
3356             tstart = System.currentTimeMillis();
3357             while (!mConvergeAETriggered) {
3358                 if (!mInterlock3A.block(TIMEOUT_3A * 1000)
3359                         || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3360                     throw new ItsException(
3361                         "3A failed to converge after " + TIMEOUT_3A + " seconds.\n"
3362                         + "AE converge state: " + mConvergedAE + ".");
3363                 }
3364             }
3365             mInterlock3A.close();
3366             Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered);
3367             threeAListener.stop();
3368         } catch (CameraAccessException e) {
3369             Log.e(TAG, "CameraCaptureSession configuration failed.", e);
3370         }
3371 
3372         Log.d(TAG, "capturePreviewFrame [waiting for " + frameNumToCapture + " frames]");
3373         // Wait until the requested number of frames have been received and then capture the frame
3374         mSession.setRepeatingRequest(reqBuilder.build(), captureResultListener,
3375                 mCameraHandler);
3376         frameNumLatch.await(TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS, TimeUnit.SECONDS);
3377         Log.d(TAG, "capturePreviewFrame [getting frame]");
3378         pr.getFrame(outputStream);
3379 
3380         // Stop repeating request
3381         Log.d(TAG, "capturePreviewFrame [stopping repeating request]");
3382         mSession.stopRepeating();
3383         mSession.close();
3384         sessionListener.getStateWaiter().waitForState(
3385                 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE);
3386         Log.d(TAG, "capturePreviewFrame [end]");
3387     }
3388 
3389     private void capturePreviewFrameWithExtension(CaptureRequest.Builder reqBuilder,
3390             CaptureRequest.Builder threeAReqBuilder, int frameNumToCapture,
3391             PreviewRecorder pr, OutputStream outputStream, int extension)
3392             throws CameraAccessException, InterruptedException, ItsException {
3393         Log.d(TAG, "capturePreviewFrameWithExtension [start]");
3394 
3395         Surface surface = pr.getCameraSurface();
3396         reqBuilder.addTarget(surface);
3397         reqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
3398                 CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
3399 
3400         CountDownLatch frameNumLatch = new CountDownLatch(frameNumToCapture + 1);
3401         ExtensionPreviewFrameCaptureResultListener captureResultListener =
3402                 new ExtensionPreviewFrameCaptureResultListener(frameNumLatch);
3403 
3404         BlockingExtensionSessionCallback sessionListener =
3405                 new BlockingExtensionSessionCallback();
3406         Log.d(TAG, "capturePreviewFrameWithExtension [config and create extension session]");
3407         configureAndCreateExtensionSession(
3408                 /* previewSurface */ surface,
3409                 /* captureSurface */ null,
3410                 extension,
3411                 sessionListener);
3412 
3413         Log.d(TAG, "capturePreviewFrameWithExtension [start extension session]");
3414         mExtensionSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS_EXTENSIONS);
3415         Executor executor = new HandlerExecutor(mResultHandler);
3416         try {
3417             ExtensionsThreeAResultListener threeAListener = new ExtensionsThreeAResultListener();
3418             Logt.i(TAG, "Triggering precapture sequence");
3419             mExtensionSession.setRepeatingRequest(reqBuilder.build(), executor,
3420                     threeAListener);
3421             synchronized (m3AStateLock) {
3422                 mPrecaptureTriggered = false;
3423                 mConvergeAETriggered = false;
3424             }
3425 
3426             threeAReqBuilder.addTarget(surface);
3427             threeAReqBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
3428                     CameraMetadata.CONTROL_AF_TRIGGER_START);
3429             threeAReqBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
3430                     CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
3431 
3432             mExtensionSession.capture(threeAReqBuilder.build(), executor,
3433                     threeAListener);
3434             mInterlock3A.open();
3435 
3436             long tstart = System.currentTimeMillis();
3437             while (!mPrecaptureTriggered) {
3438                 if (!mInterlock3A.block(TIMEOUT_3A * 1000)
3439                         || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3440                     throw new ItsException(
3441                         "3A failed to converge after " + TIMEOUT_3A + " seconds.\n"
3442                         + "AE converge state: " + mConvergedAE + ".");
3443                 }
3444             }
3445 
3446             tstart = System.currentTimeMillis();
3447             while (!mConvergeAETriggered) {
3448                 if (!mInterlock3A.block(TIMEOUT_3A * 1000)
3449                         || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3450                     throw new ItsException(
3451                         "3A failed to converge after " + TIMEOUT_3A + " seconds.\n"
3452                         + "AE converge state: " + mConvergedAE + ".");
3453                 }
3454             }
3455             mInterlock3A.close();
3456             Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered);
3457             threeAListener.stop();
3458         } catch (CameraAccessException e) {
3459             Log.e(TAG, "CameraCaptureSession configuration failed.", e);
3460         }
3461 
3462         mExtensionSession.setRepeatingRequest(reqBuilder.build(),
3463                 executor,
3464                 captureResultListener);
3465 
3466         Log.d(TAG, "capturePreviewFrameWithExtension [wait for " + frameNumToCapture + " frames]");
3467         // Wait until the requested number of frames have been received and then capture the frame
3468         frameNumLatch.await(TIMEOUT_CAPTURE_PREVIEW_FRAME_SECONDS, TimeUnit.SECONDS);
3469 
3470         Log.d(TAG, "capturePreviewFrameWithExtension [getting frame]");
3471         pr.getFrame(outputStream);
3472 
3473         Log.d(TAG, "capturePreviewFrameWithExtension [stop repeating request]");
3474         mExtensionSession.stopRepeating();
3475         mExtensionSession.close();
3476 
3477         sessionListener.getStateWaiter().waitForState(
3478                 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE);
3479         Log.d(TAG, "capturePreviewFrameWithExtension [end]");
3480     }
3481 
3482     private Size pickPreviewResolution(Size captureSize, int extension) {
3483         int captureWidth = captureSize.getWidth();
3484         int captureHeight = captureSize.getHeight();
3485         List<Size> extensionSizes = mCameraExtensionCharacteristics.getExtensionSupportedSizes(
3486                 extension, SurfaceTexture.class);
3487         StreamConfigurationMap configMap = mCameraCharacteristics.get(
3488                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
3489         Size[] outputSizes = configMap.getOutputSizes(SurfaceTexture.class);
3490         if (outputSizes == null || !Arrays.asList(outputSizes).contains(captureSize)) {
3491             Log.i(TAG, "Failed to find valid output size");
3492             return null;
3493         }
3494         Log.i(TAG, "outputSizes: " + Arrays.toString(outputSizes));
3495         Log.i(TAG, "extensionSizes: " + extensionSizes.toString());
3496         // Pick preview size close to capture size, based on area
3497         Size previewSize = extensionSizes.stream()
3498                 .distinct()
3499                 .min(Comparator.comparingInt(s -> Math.abs(
3500                         (s.getWidth() * s.getHeight()) - (captureWidth * captureHeight))))
3501                 .get();
3502         Log.i(TAG, "previewSize for extension " + String.valueOf(extension) +
3503                 ": " + previewSize.toString());
3504         return previewSize;
3505     }
3506 
3507     private Surface configureAndCreateExtensionSession(
3508             Surface captureSurface,
3509             int extension,
3510             CameraExtensionSession.StateCallback stateCallback) throws ItsException {
3511         return configureAndCreateExtensionSession(
3512             null,
3513             captureSurface,
3514             extension,
3515             stateCallback);
3516     }
3517 
3518     private Surface configureAndCreateExtensionSession(
3519             Surface previewSurface,
3520             Surface captureSurface,
3521             int extension,
3522             CameraExtensionSession.StateCallback stateCallback) throws ItsException {
3523         List<OutputConfiguration> outputConfig = new ArrayList<>();
3524         Size captureSize = null;
3525         if (captureSurface != null) {
3526             int captureWidth = mOutputImageReaders[0].getWidth();
3527             int captureHeight = mOutputImageReaders[0].getHeight();
3528             captureSize = new Size(captureWidth, captureHeight);
3529             Log.i(TAG, "Capture size: " + captureSize);
3530             outputConfig.add(new OutputConfiguration(captureSurface));
3531         }
3532 
3533         if (previewSurface == null) {
3534             Size previewSize = pickPreviewResolution(captureSize, extension);
3535             mExtensionPreviewImageReader = ImageReader.newInstance(
3536                     previewSize.getWidth(),
3537                     previewSize.getHeight(),
3538                     ImageFormat.PRIVATE,
3539                     MAX_CONCURRENT_READER_BUFFERS,
3540                     HardwareBuffer.USAGE_CPU_READ_OFTEN | HardwareBuffer.USAGE_COMPOSER_OVERLAY);
3541             previewSurface = mExtensionPreviewImageReader.getSurface();
3542         }
3543 
3544         outputConfig.add(new OutputConfiguration(previewSurface));
3545 
3546         ExtensionSessionConfiguration extSessionConfig = new ExtensionSessionConfiguration(
3547                 extension, outputConfig,
3548                 new HandlerExecutor(mCameraHandler),
3549                 stateCallback);
3550         // Create capture session
3551         try {
3552             mCamera.createExtensionSession(extSessionConfig);
3553         } catch (CameraAccessException e) {
3554             throw new ItsException("Error creating extension session: " + e);
3555         }
3556         return previewSurface;
3557     }
3558 
3559     private void configureAndCreateCaptureSession(int requestTemplate, Surface recordSurface,
3560             int videoStabilizationMode, boolean ois, long dynamicRangeProfile,
3561             CameraCaptureSession.StateCallback stateCallback,
3562             double zoomRatio, int aeTargetFpsMin, int aeTargetFpsMax,
3563             CameraCaptureSession.CaptureCallback captureCallback,
3564             List<OutputConfiguration> extraConfigs) throws CameraAccessException {
3565         assert (recordSurface != null);
3566         // Create capture request builder
3567         mCaptureRequestBuilder = mCamera.createCaptureRequest(requestTemplate);
3568 
3569         // handle optional arguments
3570         if (!Double.isNaN(zoomRatio)) {
3571             Logt.i(TAG, "zoomRatio set to " + zoomRatio);
3572             mCaptureRequestBuilder.set(CaptureRequest.CONTROL_ZOOM_RATIO, (float) zoomRatio);
3573         }
3574         if (aeTargetFpsMin > 0 && aeTargetFpsMax > 0) {
3575             Logt.i(TAG, "AE target FPS range: (" + aeTargetFpsMin + ", " + aeTargetFpsMax + ")");
3576             mCaptureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
3577                     new Range<Integer>(aeTargetFpsMin, aeTargetFpsMax));
3578         }
3579 
3580         switch (videoStabilizationMode) {
3581             case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON:
3582                 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
3583                         CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
3584                 Log.i(TAG, "Turned ON video stabilization.");
3585                 break;
3586             case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION:
3587                 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
3588                         CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION);
3589                 Log.i(TAG, "Turned ON preview stabilization.");
3590                 break;
3591             case CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF:
3592                 mCaptureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
3593                         CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
3594                 Log.i(TAG, "Turned OFF video stabilization.");
3595                 break;
3596             default:
3597                 Log.w(TAG, "Invalid video stabilization mode " + videoStabilizationMode
3598                         + ". Leaving unchanged.");
3599                 break;
3600         }
3601         Log.i(TAG, "ois = " + ois);
3602         if (ois) {
3603             mCaptureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
3604                     CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
3605         } else {
3606             mCaptureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
3607                     CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
3608         }
3609         mCaptureRequestBuilder.addTarget(recordSurface);
3610         List<OutputConfiguration> configs = new ArrayList<OutputConfiguration>();
3611         OutputConfiguration outConfig = new OutputConfiguration(recordSurface);
3612         outConfig.setDynamicRangeProfile(dynamicRangeProfile);
3613         configs.add(outConfig);
3614         if (extraConfigs != null) {
3615             configs.addAll(extraConfigs);
3616         }
3617 
3618         SessionConfiguration sessionConfiguration = new SessionConfiguration(
3619                 SessionConfiguration.SESSION_REGULAR, configs,
3620                 new HandlerExecutor(mCameraHandler),
3621                 new CameraCaptureSession.StateCallback() {
3622                     @Override
3623                     public void onConfigured(@NonNull CameraCaptureSession session) {
3624                         mSession = session;
3625                         if (mPreviewAction != null) {
3626                             mPreviewAction.setSession(session);
3627                             mPreviewAction.setCaptureRequestBuilder(mCaptureRequestBuilder);
3628                         }
3629                         try {
3630                             mSession.setRepeatingRequest(mCaptureRequestBuilder.build(),
3631                                     captureCallback, mResultHandler);
3632                         } catch (CameraAccessException e) {
3633                             Log.e(TAG, "CameraCaptureSession configuration failed.", e);
3634                         }
3635                     }
3636 
3637                     @Override
3638                     public void onReady(CameraCaptureSession session) {
3639                         if (stateCallback != null) {
3640                             stateCallback.onReady(session);
3641                         }
3642                     }
3643 
3644                     @Override
3645                     public void onConfigureFailed(CameraCaptureSession session) {
3646                         Log.i(TAG, "CameraCaptureSession configuration failed.");
3647                     }
3648 
3649                     @Override
3650                     public void onClosed(CameraCaptureSession session) {
3651                         if (stateCallback != null) {
3652                             stateCallback.onClosed(session);
3653                         }
3654                     }
3655                 });
3656 
3657         // Create capture session
3658         mCamera.createCaptureSession(sessionConfiguration);
3659     }
3660 
3661     // Returns the default camcorder profile for the given camera at the given quality level
3662     // Each CamcorderProfile has duration, quality, fileFormat, videoCodec, videoBitRate,
3663     // videoFrameRate,videoWidth, videoHeight, audioCodec, audioBitRate, audioSampleRate
3664     // and audioChannels.
3665     private CamcorderProfile getCamcorderProfile(int cameraId, int profileId) {
3666         CamcorderProfile camcorderProfile = CamcorderProfile.get(cameraId, profileId);
3667         return camcorderProfile;
3668     }
3669 
3670     // This method should be called before preparing MediaRecorder.
3671     // Set video and audio source should be done before setting the CamcorderProfile.
3672     // Output file path should be set after setting the CamcorderProfile.
3673     // These events should always be done in this particular order.
3674     private void setupMediaRecorderWithProfile(CamcorderProfile camcorderProfile,
3675             String outputFilePath) {
3676         mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
3677         mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
3678         mMediaRecorder.setProfile(camcorderProfile);
3679         mMediaRecorder.setOutputFile(outputFilePath);
3680     }
3681 
3682     private String getOutputMediaFile(int cameraId, Size videoSize, String quality,
3683             int fileFormat, boolean stabilized, double zoomRatio) {
3684         return getOutputMediaFile(cameraId, videoSize, quality, fileFormat,
3685                 /* hlg10Enabled= */false, stabilized, zoomRatio, /* minFps */0,
3686                 /* maxFps */0);
3687     }
3688 
3689     private String getOutputMediaFile(int cameraId, Size videoSize, String quality,
3690             int fileFormat, boolean hlg10Enabled, boolean stabilized, double zoomRatio) {
3691         return getOutputMediaFile(cameraId, videoSize, quality, fileFormat,
3692                 hlg10Enabled, stabilized, zoomRatio, /* minFps */0,
3693                 /* maxFps */0);
3694     }
3695 
3696     private String getOutputMediaFile(int cameraId, Size videoSize, String quality,
3697             int fileFormat, boolean hlg10Enabled, boolean stabilized, double zoomRatio,
3698             int minFps, int maxFps) {
3699         // If any quality has file format other than 3gp and webm then the
3700         // recording file will have mp4 as default extension.
3701         String fileExtension = "";
3702         if (fileFormat == MediaRecorder.OutputFormat.THREE_GPP) {
3703             fileExtension = ".3gp";
3704         } else if (fileFormat == MediaRecorder.OutputFormat.WEBM) {
3705             fileExtension = ".webm";
3706         } else {
3707             fileExtension = ".mp4";
3708         }
3709         // All the video recordings will be available in VideoITS directory on device.
3710         File mediaStorageDir = new File(getExternalFilesDir(null), "VideoITS");
3711         if (mediaStorageDir == null) {
3712             Log.e(TAG, "Failed to retrieve external files directory.");
3713             return null;
3714         }
3715         if (!mediaStorageDir.exists()) {
3716             if (!mediaStorageDir.mkdirs()) {
3717                 Log.d(TAG, "Failed to create media storage directory.");
3718                 return null;
3719             }
3720         }
3721         String timestamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
3722         String fileName = mediaStorageDir.getPath() + File.separator +
3723                 "VID_" + timestamp + '_' + cameraId + '_' + quality + '_' +
3724                 videoSize;
3725         if (!Double.isNaN(zoomRatio)) {
3726             fileName += "_" + zoomRatio;
3727         }
3728         if (hlg10Enabled) {
3729             fileName += "_hlg10";
3730         }
3731         if (stabilized) {
3732             fileName += "_stabilized";
3733         }
3734         if (minFps > 0 && maxFps > 0) {
3735             fileName += "_" + minFps + "_" + maxFps;
3736         }
3737 
3738         File mediaFile = new File(fileName);
3739         return mediaFile + fileExtension;
3740     }
3741 
3742     private void doCaptureWithFlash(JSONObject params) throws ItsException {
3743         // Parse the json to get the capture requests
3744         List<CaptureRequest.Builder> previewStartRequests = ItsSerializer.deserializeRequestList(
3745             mCamera, params, "previewRequestStart");
3746         List<CaptureRequest.Builder> previewIdleRequests = ItsSerializer.deserializeRequestList(
3747             mCamera, params, "previewRequestIdle");
3748         List<CaptureRequest.Builder> stillCaptureRequests = ItsSerializer.deserializeRequestList(
3749             mCamera, params, "stillCaptureRequest");
3750 
3751         mCaptureResults = new CaptureResult[2];
3752 
3753         ThreeAResultListener threeAListener = new ThreeAResultListener();
3754         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
3755         SurfaceTexture preview = new SurfaceTexture(/*random int*/ 1);
3756         Surface previewSurface = new Surface(preview);
3757         try {
3758             mSessionListener = new BlockingSessionCallback();
3759             try {
3760                 mCountCapRes.set(0);
3761                 mCountJpg.set(0);
3762                 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
3763                 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null,
3764                         /*inputFormat*/0, /*maxInputBuffers*/0, false, /*reuseSession*/ false);
3765 
3766                 outputConfigs.add(new OutputConfiguration(mOutputImageReaders[0].getSurface()));
3767                 outputConfigs.add(new OutputConfiguration(previewSurface));
3768                 mCamera.createCaptureSessionByOutputConfigurations(
3769                         outputConfigs, mSessionListener, mCameraHandler);
3770                 mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
3771                 ImageReader.OnImageAvailableListener readerListener =
3772                         createAvailableListener(mCaptureCallback);
3773                 mOutputImageReaders[0].setOnImageAvailableListener(readerListener,
3774                         mSaveHandlers[0]);
3775             } catch (Exception e) {
3776                 throw new ItsException("Error configuring outputs", e);
3777             }
3778             CaptureRequest.Builder previewIdleReq = previewIdleRequests.get(0);
3779             previewIdleReq.addTarget(previewSurface);
3780             mSession.setRepeatingRequest(previewIdleReq.build(), threeAListener, mResultHandler);
3781             Logt.i(TAG, "Triggering precapture sequence");
3782             mPrecaptureTriggered = false;
3783             CaptureRequest.Builder previewStartReq = previewStartRequests.get(0);
3784             previewStartReq.addTarget(previewSurface);
3785             mSession.capture(previewStartReq.build(), threeAListener ,mResultHandler);
3786             mInterlock3A.open();
3787             synchronized(m3AStateLock) {
3788                 mPrecaptureTriggered = false;
3789                 mConvergeAETriggered = false;
3790             }
3791             long tstart = System.currentTimeMillis();
3792             while (!mPrecaptureTriggered) {
3793                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
3794                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3795                     throw new ItsException (
3796                         "AE state is " + CaptureResult.CONTROL_AE_STATE_PRECAPTURE +
3797                         "after " + TIMEOUT_3A + " seconds.");
3798                 }
3799             }
3800             mConvergeAETriggered = false;
3801 
3802             tstart = System.currentTimeMillis();
3803             while (!mConvergeAETriggered) {
3804                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
3805                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
3806                     throw new ItsException (
3807                         "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" +
3808                         "AE converge state: " + mConvergedAE + ".");
3809                 }
3810             }
3811             mInterlock3A.close();
3812             Logt.i(TAG, "AE state after precapture sequence: " + mConvergeAETriggered);
3813             threeAListener.stop();
3814 
3815             // Send a still capture request
3816             CaptureRequest.Builder stillCaptureRequest = stillCaptureRequests.get(0);
3817             Logt.i(TAG, "Taking still capture with ON_AUTO_FLASH.");
3818             stillCaptureRequest.addTarget(mOutputImageReaders[0].getSurface());
3819             mSession.capture(stillCaptureRequest.build(), mCaptureResultListener, mResultHandler);
3820             mCountCallbacksRemaining.set(1);
3821             long timeout = TIMEOUT_CALLBACK * 1000;
3822             waitForCallbacks(timeout);
3823             mSession.stopRepeating();
3824         } catch (android.hardware.camera2.CameraAccessException e) {
3825             throw new ItsException("Access error: ", e);
3826         } finally {
3827             if (mSession != null) {
3828                 mSession.close();
3829             }
3830             previewSurface.release();
3831             preview.release();
3832         }
3833     }
3834 
3835     private void doCaptureWithExtensions(JSONObject params, int extension) throws ItsException {
3836         try {
3837             // Parse the JSON to get the list of capture requests.
3838             List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
3839                     mCamera, params, "captureRequests");
3840 
3841             BlockingExtensionSessionCallback sessionListener =
3842                     new BlockingExtensionSessionCallback();
3843             mCountRawOrDng.set(0);
3844             mCountJpg.set(0);
3845             mCountYuv.set(0);
3846             mCountRaw10.set(0);
3847             mCountRaw12.set(0);
3848             mCountCapRes.set(0);
3849             mCountRaw10QuadBayer.set(0);
3850             mCountRaw10Stats.set(0);
3851             mCountRaw10QuadBayerStats.set(0);
3852             mCountRaw.set(0);
3853             mCountRawQuadBayer.set(0);
3854             mCountRawStats.set(0);
3855             mCountRawQuadBayerStats.set(0);
3856 
3857             mCaptureRawIsDng = false;
3858             mCaptureRawIsStats = false;
3859             mCaptureRawIsQuadBayer = false;
3860             mCaptureRawIsQuadBayerStats = false;
3861             mCaptureResults = new CaptureResult[requests.size()];
3862 
3863             JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
3864 
3865             prepareImageReadersWithOutputSpecs(jsonOutputSpecs,
3866                     /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0,
3867                     /*backgroundRequest*/ false, /*reuseSession*/ false);
3868 
3869             configureAndCreateExtensionSession(
3870                     mOutputImageReaders[0].getSurface(),
3871                     extension,
3872                     sessionListener);
3873 
3874             mExtensionSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS_EXTENSIONS);
3875 
3876             CaptureRequest.Builder captureBuilder = requests.get(0);
3877 
3878             if (params.optBoolean("waitAE", true)) {
3879                 if (mExtensionPreviewImageReader == null) {
3880                     throw new ItsException("Preview ImageReader has not been initialized!");
3881                 }
3882                 // Set repeating request and wait for AE convergence, using another ImageReader.
3883                 Logt.i(TAG, "Waiting for AE to converge before taking extensions capture.");
3884                 CaptureRequest.Builder previewRequestBuilder = mCamera.createCaptureRequest(
3885                         CameraDevice.TEMPLATE_PREVIEW);
3886                 previewRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
3887                         CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
3888                 previewRequestBuilder.addTarget(mExtensionPreviewImageReader.getSurface());
3889                 ImageReader.OnImageAvailableListener dropperListener =
3890                         createAvailableListenerDropper();
3891                 mExtensionPreviewImageReader.setOnImageAvailableListener(dropperListener,
3892                         mSaveHandlers[0]);
3893                 mExtensionSession.setRepeatingRequest(previewRequestBuilder.build(),
3894                         new HandlerExecutor(mResultHandler),
3895                         mExtAEResultListener);
3896                 mCountCallbacksRemaining.set(1);
3897                 long timeout = TIMEOUT_CALLBACK * 1000;
3898                 waitForCallbacks(timeout);
3899                 mExtensionSession.stopRepeating();
3900                 mResultThread.sleep(PIPELINE_WARMUP_TIME_MS);
3901             }
3902 
3903             ImageReader.OnImageAvailableListener readerListener =
3904                     createExtensionAvailableListener(mCaptureCallback);
3905             mOutputImageReaders[0].setOnImageAvailableListener(readerListener,
3906                     mSaveHandlers[0]);
3907             captureBuilder.addTarget(mOutputImageReaders[0].getSurface());
3908             mExtensionSession.capture(captureBuilder.build(), new HandlerExecutor(mResultHandler),
3909                     mExtCaptureResultListener);
3910             // Two callbacks: one for onCaptureResultAvailable and one for onImageAvailable
3911             mCountCallbacksRemaining.set(2);
3912             long timeout = TIMEOUT_CALLBACK * 1000;
3913             waitForCallbacks(timeout);
3914 
3915             if (mExtensionPreviewImageReader != null) {
3916                 mExtensionPreviewImageReader.close();
3917                 mExtensionPreviewImageReader = null;
3918             }
3919 
3920             // Close session and wait until session is fully closed
3921             mExtensionSession.close();
3922             sessionListener.getStateWaiter().waitForState(
3923                 BlockingExtensionSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE);
3924         } catch (android.hardware.camera2.CameraAccessException e) {
3925             throw new ItsException("Access error: ", e);
3926         } catch (InterruptedException e) {
3927             throw new ItsException("Unexpected InterruptedException: ", e);
3928         }
3929     }
3930 
3931     private void doCapture(JSONObject params) throws ItsException {
3932         boolean reuseSession = params.optBoolean("reuseSession", false);
3933         try {
3934             // Parse the JSON to get the list of capture requests.
3935             List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
3936                     mCamera, params, "captureRequests");
3937 
3938             // optional background preview requests
3939             List<CaptureRequest.Builder> backgroundRequests = ItsSerializer.deserializeRequestList(
3940                     mCamera, params, "repeatRequests");
3941             boolean backgroundRequest = backgroundRequests.size() > 0;
3942             boolean firstSurfaceFor3A = params.optBoolean("firstSurfaceFor3A", false);
3943             int indexOffsetFor3A = firstSurfaceFor3A ? 1 : 0;
3944 
3945             int numSurfaces = 0;
3946             int numCaptureSurfaces = 0;
3947             try {
3948                 mCountRawOrDng.set(0);
3949                 mCountJpg.set(0);
3950                 mCountYuv.set(0);
3951                 mCountRaw10.set(0);
3952                 mCountRaw12.set(0);
3953                 mCountCapRes.set(0);
3954                 mCountRaw10QuadBayer.set(0);
3955                 mCountRaw10Stats.set(0);
3956                 mCountRaw10QuadBayerStats.set(0);
3957                 mCountRaw.set(0);
3958                 mCountRawQuadBayer.set(0);
3959                 mCountRawStats.set(0);
3960                 mCountRawQuadBayerStats.set(0);
3961 
3962                 mCaptureRawIsDng = false;
3963                 mCaptureRawIsStats = false;
3964                 mCaptureRawIsQuadBayer = false;
3965                 mCaptureRawIsQuadBayerStats = false;
3966                 mCaptureResults = new CaptureResult[requests.size()];
3967 
3968                 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
3969                 boolean is10bitOutputPresent = false;
3970 
3971                 if (mOutputImageReaders == null) {
3972                     Logt.i(TAG, "Preparing image readers with output specs in doCapture");
3973                     is10bitOutputPresent = prepareImageReadersWithOutputSpecs(jsonOutputSpecs,
3974                             /*inputSize*/null, /*inputFormat*/0, /*maxInputBuffers*/0,
3975                             backgroundRequest, reuseSession);
3976                 } else {
3977                     is10bitOutputPresent = mImageReaderArgs.getHas10bitOutput();
3978                 }
3979                 numSurfaces = mOutputImageReaders.length;
3980                 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0)
3981                         - indexOffsetFor3A;
3982                 if (numCaptureSurfaces <= 0) {
3983                     throw new ItsException(String.format(
3984                             "Invalid number of capture surfaces: numSurfaces %d, "
3985                             + "backgroundRequest %b, firstSurfaceFor3A: %b!", numSurfaces,
3986                             backgroundRequest, firstSurfaceFor3A));
3987                 }
3988 
3989                 List<OutputConfiguration> outputConfigs = getCaptureOutputConfigurations(
3990                         jsonOutputSpecs, is10bitOutputPresent);
3991                 if (mSession != null && reuseSession
3992                         && mCaptureOutputConfigs.equals(outputConfigs)) {
3993                     Logt.i(TAG, "Reusing camera capture session in doCapture()");
3994                 } else {
3995                     Logt.i(TAG, "Need to create new capture session in doCapture()");
3996                     mSessionListener = new BlockingSessionCallback();
3997                     mCamera.createCaptureSessionByOutputConfigurations(
3998                             outputConfigs, mSessionListener, mCameraHandler);
3999                     mSession = mSessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
4000                     mSessionListener.getStateWaiter().waitForState(
4001                                 BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
4002                     Logt.i(TAG, "New capture session created.");
4003                 }
4004                 mCaptureOutputConfigs = new ArrayList<OutputConfiguration>(outputConfigs);
4005 
4006                 for (int i = indexOffsetFor3A; i < numSurfaces; i++) {
4007                     ImageReader.OnImageAvailableListener readerListener;
4008                     if (backgroundRequest && i == numSurfaces - 1) {
4009                         readerListener = createAvailableListenerDropper();
4010                     } else {
4011                         readerListener = createAvailableListener(mCaptureCallback);
4012                     }
4013                     mOutputImageReaders[i].setOnImageAvailableListener(readerListener,
4014                             mSaveHandlers[i]);
4015                 }
4016 
4017                 // Plan for how many callbacks need to be received throughout the duration of this
4018                 // sequence of capture requests. There is one callback per image surface, and one
4019                 // callback for the CaptureResult, for each capture.
4020                 int numCaptures = requests.size();
4021                 mCountCallbacksRemaining.set(numCaptures * (numCaptureSurfaces + 1));
4022 
4023             } catch (CameraAccessException e) {
4024                 throw new ItsException("Error configuring outputs", e);
4025             } catch (org.json.JSONException e) {
4026                 throw new ItsException("Error parsing params", e);
4027             }
4028 
4029             // Start background requests and let it warm up pipeline
4030             if (backgroundRequest) {
4031                 List<CaptureRequest> bgRequestList =
4032                         new ArrayList<CaptureRequest>(backgroundRequests.size());
4033                 for (int i = 0; i < backgroundRequests.size(); i++) {
4034                     CaptureRequest.Builder req = backgroundRequests.get(i);
4035                     req.addTarget(mOutputImageReaders[numCaptureSurfaces].getSurface());
4036                     bgRequestList.add(req.build());
4037                 }
4038                 mSession.setRepeatingBurst(bgRequestList, null, null);
4039                 // warm up the pipeline
4040                 Thread.sleep(PIPELINE_WARMUP_TIME_MS);
4041             }
4042 
4043             // Initiate the captures.
4044             long maxExpTimeNs = -1;
4045             List<CaptureRequest> requestList =
4046                     new ArrayList<>(requests.size());
4047             for (int i = 0; i < requests.size(); i++) {
4048                 CaptureRequest.Builder req = requests.get(i);
4049                 // For DNG captures, need the LSC map to be available.
4050                 if (mCaptureRawIsDng) {
4051                     req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
4052                 }
4053                 if (mCaptureRawIsQuadBayer || mCaptureRawIsQuadBayerStats) {
4054                     req.set(CaptureRequest.SENSOR_PIXEL_MODE,
4055                         CaptureRequest.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
4056                 }
4057                 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME);
4058                 if (expTimeNs != null && expTimeNs > maxExpTimeNs) {
4059                     maxExpTimeNs = expTimeNs;
4060                 }
4061 
4062                 for (int j = 0; j < numCaptureSurfaces; j++) {
4063                     req.addTarget(mOutputImageReaders[j + indexOffsetFor3A].getSurface());
4064                 }
4065                 requestList.add(req.build());
4066             }
4067             mSession.captureBurst(requestList, mCaptureResultListener, mResultHandler);
4068 
4069             long timeout = TIMEOUT_CALLBACK * 1000;
4070             if (maxExpTimeNs > 0) {
4071                 timeout += maxExpTimeNs / 1000000; // ns to ms
4072             }
4073             // Make sure all callbacks have been hit (wait until captures are done).
4074             // If no timeouts are received after a timeout, then fail.
4075             waitForCallbacks(timeout);
4076 
4077             mSession.stopRepeating();
4078             mSessionListener.getStateWaiter().waitForState(
4079                     BlockingSessionCallback.SESSION_READY, TIMEOUT_SESSION_READY);
4080             Logt.i(TAG, "Session is ready again after doing capture.");
4081 
4082             // Close session and wait until session is fully closed, if desired.
4083             if (!reuseSession) {
4084                 mSession.close();
4085                 mSessionListener.getStateWaiter().waitForState(
4086                         BlockingSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE);
4087                 mSession = null;
4088                 closeImageReaders();
4089             }
4090         } catch (android.hardware.camera2.CameraAccessException e) {
4091             throw new ItsException("Access error: ", e);
4092         } catch (InterruptedException e) {
4093             throw new ItsException("Unexpected InterruptedException: ", e);
4094         }
4095     }
4096 
4097     /**
4098      * Perform reprocess captures.
4099      *
4100      * It takes captureRequests in a JSON object and perform capture requests in two steps:
4101      * regular capture request to get reprocess input and reprocess capture request to get
4102      * reprocess outputs.
4103      *
4104      * Regular capture requests:
4105      *   1. For each capture request in the JSON object, create a full-size capture request with
4106      *      the settings in the JSON object.
4107      *   2. Remember and clear noise reduction, edge enhancement, and effective exposure factor
4108      *      from the regular capture requests. (Those settings will be used for reprocess requests.)
4109      *   3. Submit the regular capture requests.
4110      *
4111      * Reprocess capture requests:
4112      *   4. Wait for the regular capture results and use them to create reprocess capture requests.
4113      *   5. Wait for the regular capture output images and queue them to the image writer.
4114      *   6. Set the noise reduction, edge enhancement, and effective exposure factor from #2.
4115      *   7. Submit the reprocess capture requests.
4116      *
4117      * The output images and results for the regular capture requests won't be written to socket.
4118      * The output images and results for the reprocess capture requests will be written to socket.
4119      */
4120     private void doReprocessCapture(JSONObject params) throws ItsException {
4121         ImageWriter imageWriter = null;
4122         ArrayList<Integer> noiseReductionModes = new ArrayList<>();
4123         ArrayList<Integer> edgeModes = new ArrayList<>();
4124         ArrayList<Float> effectiveExposureFactors = new ArrayList<>();
4125 
4126         mCountRawOrDng.set(0);
4127         mCountJpg.set(0);
4128         mCountYuv.set(0);
4129         mCountRaw10.set(0);
4130         mCountRaw12.set(0);
4131         mCountCapRes.set(0);
4132         mCountRaw10QuadBayer.set(0);
4133         mCountRaw10Stats.set(0);
4134         mCountRaw10QuadBayerStats.set(0);
4135         mCountRaw.set(0);
4136         mCountRawQuadBayer.set(0);
4137         mCountRawStats.set(0);
4138         mCountRawQuadBayerStats.set(0);
4139 
4140         mCaptureRawIsDng = false;
4141         mCaptureRawIsStats = false;
4142         mCaptureRawIsQuadBayer = false;
4143         mCaptureRawIsQuadBayerStats = false;
4144 
4145         try {
4146             // Parse the JSON to get the list of capture requests.
4147             List<CaptureRequest.Builder> inputRequests =
4148                     ItsSerializer.deserializeRequestList(mCamera, params, "captureRequests");
4149 
4150             // Prepare the image readers for reprocess input and reprocess outputs.
4151             int inputFormat = getReprocessInputFormat(params);
4152             Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat);
4153             JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
4154             prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat,
4155                     inputRequests.size(), /*backgroundRequest*/false, /*reuseSession*/ false);
4156 
4157             // Prepare a reprocessable session.
4158             int numOutputSurfaces = mOutputImageReaders.length;
4159             InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(),
4160                     inputSize.getHeight(), inputFormat);
4161             List<Surface> outputSurfaces = new ArrayList<Surface>();
4162             boolean addSurfaceForInput = true;
4163             for (int i = 0; i < numOutputSurfaces; i++) {
4164                 outputSurfaces.add(mOutputImageReaders[i].getSurface());
4165                 if (mOutputImageReaders[i] == mInputImageReader) {
4166                     // If input and one of the outputs share the same image reader, avoid
4167                     // adding the same surfaces twice.
4168                     addSurfaceForInput = false;
4169                 }
4170             }
4171 
4172             if (addSurfaceForInput) {
4173                 // Besides the output surfaces specified in JSON object, add an additional one
4174                 // for reprocess input.
4175                 outputSurfaces.add(mInputImageReader.getSurface());
4176             }
4177 
4178             BlockingSessionCallback sessionListener = new BlockingSessionCallback();
4179             mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener,
4180                     mCameraHandler);
4181             mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
4182 
4183             // Create an image writer for reprocess input.
4184             Surface inputSurface = mSession.getInputSurface();
4185             imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size());
4186 
4187             // Set up input reader listener and capture callback listener to get
4188             // reprocess input buffers and the results in order to create reprocess capture
4189             // requests.
4190             ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter();
4191             mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]);
4192 
4193             CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter();
4194             // Prepare the reprocess input request
4195             for (CaptureRequest.Builder inputReqest : inputRequests) {
4196                 // Remember and clear noise reduction, edge enhancement, and effective exposure
4197                 // factors.
4198                 noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE));
4199                 edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE));
4200                 effectiveExposureFactors.add(inputReqest.get(
4201                         CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR));
4202 
4203                 inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE,
4204                         CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG);
4205                 inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG);
4206                 inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null);
4207                 inputReqest.addTarget(mInputImageReader.getSurface());
4208                 mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler);
4209             }
4210 
4211             // Wait for reprocess input images
4212             ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>();
4213             for (int i = 0; i < inputRequests.size(); i++) {
4214                 TotalCaptureResult result =
4215                         captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000);
4216                 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result));
4217                 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000));
4218             }
4219 
4220             // Start performing reprocess captures.
4221 
4222             mCaptureResults = new CaptureResult[inputRequests.size()];
4223 
4224             // Prepare reprocess capture requests.
4225             for (int i = 0; i < numOutputSurfaces; i++) {
4226                 ImageReader.OnImageAvailableListener outputReaderListener =
4227                         createAvailableListener(mCaptureCallback);
4228                 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener,
4229                         mSaveHandlers[i]);
4230             }
4231 
4232             // Plan for how many callbacks need to be received throughout the duration of this
4233             // sequence of capture requests. There is one callback per image surface, and one
4234             // callback for the CaptureResult, for each capture.
4235             int numCaptures = reprocessOutputRequests.size();
4236             mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1));
4237 
4238             // Initiate the captures.
4239             for (int i = 0; i < reprocessOutputRequests.size(); i++) {
4240                 CaptureRequest.Builder req = reprocessOutputRequests.get(i);
4241                 for (ImageReader outputImageReader : mOutputImageReaders) {
4242                     req.addTarget(outputImageReader.getSurface());
4243                 }
4244 
4245                 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i));
4246                 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i));
4247                 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR,
4248                         effectiveExposureFactors.get(i));
4249 
4250                 mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
4251             }
4252 
4253             // Make sure all callbacks have been hit (wait until captures are done).
4254             // If no timeouts are received after a timeout, then fail.
4255             waitForCallbacks(TIMEOUT_CALLBACK * 1000);
4256         } catch (android.hardware.camera2.CameraAccessException e) {
4257             throw new ItsException("Access error: ", e);
4258         } finally {
4259             closeImageReaders();
4260             if (mSession != null) {
4261                 mSession.close();
4262                 mSession = null;
4263             }
4264             if (imageWriter != null) {
4265                 imageWriter.close();
4266             }
4267         }
4268     }
4269 
4270     @Override
4271     public final void onAccuracyChanged(Sensor sensor, int accuracy) {
4272         Logt.i(TAG, "Sensor " + sensor.getName() + " accuracy changed to " + accuracy);
4273     }
4274 
4275     @Override
4276     public final void onSensorChanged(SensorEvent event) {
4277         synchronized(mEventLock) {
4278             if (mEventsEnabled) {
4279                 MySensorEvent ev2 = new MySensorEvent();
4280                 ev2.sensor = event.sensor;
4281                 ev2.accuracy = event.accuracy;
4282                 ev2.timestamp = event.timestamp;
4283                 ev2.values = new float[event.values.length];
4284                 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length);
4285                 mEvents.add(ev2);
4286             }
4287         }
4288     }
4289 
4290     /**
4291      * Computes the stats image from raw image byte array and sends the stats image buffer.
4292      *
4293      * @param statsFormat The format of stats images.
4294      * @param cameraCharacteristics Camera characteristics object.
4295      * @param img Image byte array.
4296      * @param captureWidth The width of raw image.
4297      * @param captureHeight The height of raw image.
4298      * @param gridWidth The grid width.
4299      * @param gridHeight The grid height.
4300      * @param bufTag The tag of stats image buffer.
4301      * @throws ItsException  If the stats image computation fails.
4302      * @throws InterruptedException  If there is not enough quota available in the socket queue.
4303      */
4304     private void computeAndSendStatsImage(String statsFormat,
4305         CameraCharacteristics cameraCharacteristics, byte[] img, int captureWidth,
4306         int captureHeight, int gridWidth, int gridHeight, String bufTag)
4307         throws ItsException, InterruptedException {
4308         long startTimeMs = SystemClock.elapsedRealtime();
4309         boolean isMaximumResolution = statsFormat.contains("QuadBayer");
4310         Rect activeArrayCropRegion = ItsUtils.getActiveArrayCropRegion(
4311             cameraCharacteristics, isMaximumResolution);
4312         int aaw = activeArrayCropRegion.width();
4313         int aah = activeArrayCropRegion.height();
4314         int aax = activeArrayCropRegion.left;
4315         int aay = activeArrayCropRegion.top;
4316         float[] stats = StatsImage.computeStatsImage(img, statsFormat, captureWidth, captureHeight,
4317             aax, aay, aaw, aah, gridWidth, gridHeight);
4318         if (stats == null) {
4319             throw new ItsException(String.format(Locale.getDefault(),
4320                 "Stats image computation fails with format %s.", statsFormat));
4321         }
4322         long endTimeMs = SystemClock.elapsedRealtime();
4323         Logt.i(TAG, String.format(Locale.getDefault(),
4324             "%s computation takes %d ms.", statsFormat, endTimeMs - startTimeMs));
4325         int statsImgSize = stats.length * 4;
4326         if (mSocketQueueQuota != null) {
4327             mSocketQueueQuota.release(img.length);
4328             mSocketQueueQuota.acquire(statsImgSize);
4329         }
4330         ByteBuffer bBuf = ByteBuffer.allocate(statsImgSize);
4331         bBuf.order(ByteOrder.nativeOrder());
4332         FloatBuffer fBuf = bBuf.asFloatBuffer();
4333         fBuf.put(stats);
4334         fBuf.position(0);
4335         mSocketRunnableObj.sendResponseCaptureBuffer(bufTag, bBuf);
4336     }
4337 
4338     private final CaptureCallback mCaptureCallback = new CaptureCallback() {
4339         @Override
4340         public void onCaptureAvailable(Image capture, String physicalCameraId) {
4341             if (physicalCameraId != null && !physicalCameraId.isEmpty()) {
4342                 CameraCharacteristics physicalCameraCharacteristics = mPhysicalCameraChars.get(
4343                     physicalCameraId);
4344                 if (physicalCameraCharacteristics != null) {
4345                     mCameraCharacteristics = physicalCameraCharacteristics;
4346                     Logt.i(TAG, String.format(Locale.getDefault(),
4347                         "Physical camera Id is non-empty, set mCameraCharacteristics to the "
4348                             + "characteristics of physical camera %s.",
4349                         physicalCameraId));
4350                 }
4351             }
4352 
4353             try {
4354                 int format = capture.getFormat();
4355                 final int captureWidth = capture.getWidth();
4356                 final int captureHeight = capture.getHeight();
4357                 if (format == ImageFormat.JPEG) {
4358                     Logt.i(TAG, "Received JPEG capture");
4359                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4360                     ByteBuffer buf = ByteBuffer.wrap(img);
4361                     mCountJpg.getAndIncrement();
4362                     mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage" + physicalCameraId,
4363                         buf);
4364                 } else if (format == ImageFormat.JPEG_R) {
4365                     Logt.i(TAG, "Received JPEG/R capture");
4366                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4367                     ByteBuffer buf = ByteBuffer.wrap(img);
4368                     mCountJpg.getAndIncrement();
4369                     mSocketRunnableObj.sendResponseCaptureBuffer("jpeg_rImage" + physicalCameraId,
4370                         buf);
4371                 } else if (format == ImageFormat.PRIVATE) {
4372                     Logt.i(TAG, "Received PRIVATE capture");
4373                     // Private images have client opaque buffers
4374                     mSocketRunnableObj.sendResponseCaptureBuffer("privImage" + physicalCameraId,
4375                         null);
4376                 } else if (format == ImageFormat.YUV_420_888) {
4377                     Logt.i(TAG, "Received YUV capture");
4378                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4379                     ByteBuffer buf = ByteBuffer.wrap(img);
4380                     mSocketRunnableObj.sendResponseCaptureBuffer(
4381                         "yuvImage" + physicalCameraId, buf);
4382                 } else if (format == ImageFormat.RAW10) {
4383                     Logt.i(TAG, "Received RAW10 capture.");
4384                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4385                     if (mCaptureRawIsStats) {
4386                         String statsFormat = StatsFormat.RAW10_STATS.getValue();
4387                         String bufTag = "raw10StatsImage" + physicalCameraId;
4388                         computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img,
4389                             captureWidth, captureHeight, mCaptureStatsGridWidth,
4390                             mCaptureStatsGridHeight, bufTag);
4391                         mCountRaw10Stats.getAndIncrement();
4392                     } else if (mCaptureRawIsQuadBayerStats) {
4393                         String statsFormat = StatsFormat.RAW10_QUAD_BAYER_STATS.getValue();
4394                         String bufTag = "raw10QuadBayerStatsImage" + physicalCameraId;
4395                         computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img,
4396                             captureWidth, captureHeight, mCaptureStatsGridWidth,
4397                             mCaptureStatsGridHeight, bufTag);
4398                         mCountRaw10QuadBayerStats.getAndIncrement();
4399                     } else if (mCaptureRawIsQuadBayer) {
4400                         ByteBuffer buf = ByteBuffer.wrap(img);
4401                         mSocketRunnableObj.sendResponseCaptureBuffer(
4402                             "raw10QuadBayerImage" + physicalCameraId, buf);
4403                         mCountRaw10QuadBayer.getAndIncrement();
4404                     } else {
4405                         ByteBuffer buf = ByteBuffer.wrap(img);
4406                         mSocketRunnableObj.sendResponseCaptureBuffer(
4407                             "raw10Image" + physicalCameraId, buf);
4408                     }
4409                 } else if (format == ImageFormat.RAW12) {
4410                     Logt.i(TAG, "Received RAW12 capture");
4411                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4412                     ByteBuffer buf = ByteBuffer.wrap(img);
4413                     mCountRaw12.getAndIncrement();
4414                     mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image" + physicalCameraId,
4415                         buf);
4416                 } else if (format == ImageFormat.RAW_SENSOR) {
4417                     Logt.i(TAG, "Received RAW16 capture");
4418                     int count = mCountRawOrDng.getAndIncrement();
4419                     if (!mCaptureRawIsDng) {
4420                         byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4421                         if (mCaptureRawIsStats) {
4422                             String statsFormat = StatsFormat.RAW16_STATS.getValue();
4423                             String bufTag = "rawStatsImage" + physicalCameraId;
4424                             computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img,
4425                                 captureWidth, captureHeight, mCaptureStatsGridWidth,
4426                                 mCaptureStatsGridHeight, bufTag);
4427                             mCountRawStats.getAndIncrement();
4428                         } else if (mCaptureRawIsQuadBayerStats) {
4429                             String statsFormat = StatsFormat.RAW16_QUAD_BAYER_STATS.getValue();
4430                             String bufTag = "rawQuadBayerStatsImage" + physicalCameraId;
4431                             computeAndSendStatsImage(statsFormat, mCameraCharacteristics, img,
4432                                 captureWidth, captureHeight, mCaptureStatsGridWidth,
4433                                 mCaptureStatsGridHeight, bufTag);
4434                             mCountRawQuadBayerStats.getAndIncrement();
4435                         } else if (mCaptureRawIsQuadBayer) {
4436                             ByteBuffer buf = ByteBuffer.wrap(img);
4437                             mSocketRunnableObj.sendResponseCaptureBuffer(
4438                                 "rawQuadBayerImage" + physicalCameraId, buf);
4439                             mCountRawQuadBayer.getAndIncrement();
4440                         } else {
4441                             ByteBuffer buf = ByteBuffer.wrap(img);
4442                             mSocketRunnableObj.sendResponseCaptureBuffer(
4443                                 "rawImage" + physicalCameraId, buf);
4444                             mCountRaw.getAndIncrement();
4445                         }
4446                     } else {
4447                         // Wait until the corresponding capture result is ready, up to a timeout.
4448                         long t0 = android.os.SystemClock.elapsedRealtime();
4449                         while (!mThreadExitFlag
4450                             && android.os.SystemClock.elapsedRealtime() - t0 < TIMEOUT_CAP_RES) {
4451                             if (mCaptureResults[count] != null) {
4452                                 Logt.i(TAG, "Writing capture as DNG");
4453                                 DngCreator dngCreator = new DngCreator(
4454                                     mCameraCharacteristics, mCaptureResults[count]);
4455                                 ByteArrayOutputStream dngStream = new ByteArrayOutputStream();
4456                                 dngCreator.writeImage(dngStream, capture);
4457                                 byte[] dngArray = dngStream.toByteArray();
4458                                 if (mSocketQueueQuota != null) {
4459                                     // Ideally we should acquire before allocating memory, but
4460                                     // here the DNG size is unknown before toByteArray call, so
4461                                     // we have to register the size afterward. This should still
4462                                     // works most of the time since all DNG images are handled by
4463                                     // the same handler thread, so we are at most one buffer over
4464                                     // the quota.
4465                                     mSocketQueueQuota.acquire(dngArray.length);
4466                                 }
4467                                 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray);
4468                                 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf);
4469                                 break;
4470                             } else {
4471                                 Thread.sleep(1);
4472                             }
4473                         }
4474                     }
4475                 } else if (format == ImageFormat.Y8) {
4476                     Logt.i(TAG, "Received Y8 capture");
4477                     byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota);
4478                     ByteBuffer buf = ByteBuffer.wrap(img);
4479                     mSocketRunnableObj.sendResponseCaptureBuffer(
4480                         "y8Image" + physicalCameraId, buf);
4481                 } else {
4482                     throw new ItsException("Unsupported image format: " + format);
4483                 }
4484 
4485                 synchronized (mCountCallbacksRemaining) {
4486                     mCountCallbacksRemaining.decrementAndGet();
4487                     mCountCallbacksRemaining.notify();
4488                 }
4489             } catch (IOException | InterruptedException | ItsException e) {
4490                 Logt.e(TAG, "Script error: ", e);
4491             }
4492         }
4493     };
4494 
4495     private static float r2f(Rational r) {
4496         return (float)r.getNumerator() / (float)r.getDenominator();
4497     }
4498 
4499     private boolean hasCapability(int capability) throws ItsException {
4500         int[] capabilities = mCameraCharacteristics.get(
4501                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
4502         if (capabilities == null) {
4503             throw new ItsException("Failed to get capabilities");
4504         }
4505         for (int c : capabilities) {
4506             if (c == capability) {
4507                 return true;
4508             }
4509         }
4510         return false;
4511     }
4512 
4513     private String buildLogString(CaptureResult result) throws ItsException {
4514         StringBuilder logMsg = new StringBuilder();
4515         logMsg.append(String.format(
4516                 "Capt result: AE=%d, AF=%d, AWB=%d, ",
4517                 result.get(CaptureResult.CONTROL_AE_STATE),
4518                 result.get(CaptureResult.CONTROL_AF_STATE),
4519                 result.get(CaptureResult.CONTROL_AWB_STATE)));
4520 
4521         boolean readSensorSettings = hasCapability(
4522                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
4523 
4524         if (readSensorSettings) {
4525             logMsg.append(String.format(
4526                     "sens=%d, exp=%.1fms, dur=%.1fms, ",
4527                     result.get(CaptureResult.SENSOR_SENSITIVITY),
4528                     result.get(CaptureResult.SENSOR_EXPOSURE_TIME).longValue() / 1000000.0f,
4529                     result.get(CaptureResult.SENSOR_FRAME_DURATION).longValue() /
4530                                 1000000.0f));
4531         }
4532         if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
4533             logMsg.append(String.format(
4534                     "gains=[%.1f, %.1f, %.1f, %.1f], ",
4535                     result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
4536                     result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
4537                     result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
4538                     result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue()));
4539         } else {
4540             logMsg.append("gains=[], ");
4541         }
4542         if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
4543             logMsg.append(String.format(
4544                     "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
4545                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
4546                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
4547                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
4548                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
4549                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
4550                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
4551                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
4552                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
4553                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))));
4554         } else {
4555             logMsg.append("xform=[], ");
4556         }
4557         logMsg.append(String.format(
4558                 Locale.getDefault(),
4559                 "foc=%.1f, ",
4560                 result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
4561         logMsg.append(String.format(
4562                 Locale.getDefault(),
4563                 "zoom=%.1f, ",
4564                 result.get(CaptureResult.CONTROL_ZOOM_RATIO)));
4565         logMsg.append(String.format(
4566                 Locale.getDefault(),
4567                 "timestamp=%d",
4568                 result.get(CaptureResult.SENSOR_TIMESTAMP)));
4569         return logMsg.toString();
4570     }
4571 
4572     private class ThreeAResultHandler {
4573         private volatile boolean stopped = false;
4574         private boolean aeResultSent = false;
4575         private boolean awbResultSent = false;
4576         private boolean afResultSent = false;
4577         private CameraCharacteristics c = mCameraCharacteristics;
4578         private boolean isFixedFocusLens = isFixedFocusLens(c);
4579 
4580         void handleCaptureResult(CaptureRequest request, TotalCaptureResult result)
4581                 throws ItsException {
4582             if (stopped) {
4583                 return;
4584             }
4585 
4586             if (request == null || result == null) {
4587                 throw new ItsException("Request/result is invalid");
4588             }
4589 
4590             Logt.i(TAG, "TotalCaptureResult: " + buildLogString(result));
4591 
4592             synchronized (m3AStateLock) {
4593                 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) {
4594                     mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE)
4595                             == CaptureResult.CONTROL_AE_STATE_CONVERGED
4596                             || result.get(CaptureResult.CONTROL_AE_STATE)
4597                             == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED
4598                             || result.get(CaptureResult.CONTROL_AE_STATE)
4599                             == CaptureResult.CONTROL_AE_STATE_LOCKED;
4600                     mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE)
4601                             == CaptureResult.CONTROL_AE_STATE_LOCKED;
4602                     if (!mPrecaptureTriggered) {
4603                         mPrecaptureTriggered = result.get(CaptureResult.CONTROL_AE_STATE)
4604                                 == CaptureResult.CONTROL_AE_STATE_PRECAPTURE;
4605                     }
4606                     if (!mConvergeAETriggered) {
4607                         mConvergeAETriggered = mConvergedAE;
4608                     }
4609                 }
4610                 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) {
4611                     mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE)
4612                              == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
4613                 }
4614                 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) {
4615                     mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE)
4616                             == CaptureResult.CONTROL_AWB_STATE_CONVERGED
4617                                     || result.get(CaptureResult.CONTROL_AWB_STATE)
4618                                             == CaptureResult.CONTROL_AWB_STATE_LOCKED;
4619                     mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE)
4620                             == CaptureResult.CONTROL_AWB_STATE_LOCKED;
4621                 }
4622 
4623                 if ((mConvergedAE || !mDoAE) && mConvergedAWB
4624                         && (!mDoAF || isFixedFocusLens || mConvergedAF)) {
4625                     if (mSend3AResults && (!mNeedsLockedAE || mLockedAE) && !aeResultSent) {
4626                         aeResultSent = true;
4627                         if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null
4628                                 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) {
4629                             mSocketRunnableObj.sendResponse("aeResult",
4630                                     String.format(Locale.getDefault(), "%d %d",
4631                                         result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
4632                                         result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
4633                                     ));
4634                         } else {
4635                             Logt.i(TAG, String.format(
4636                                     "AE converged but NULL exposure values, sensitivity:%b,"
4637                                     + " expTime:%b",
4638                                     result.get(CaptureResult.SENSOR_SENSITIVITY) == null,
4639                                     result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null));
4640                         }
4641                     }
4642                     if (mSend3AResults && !afResultSent) {
4643                         afResultSent = true;
4644                         if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) {
4645                             mSocketRunnableObj.sendResponse("afResult", String.format(
4646                                     Locale.getDefault(), "%f",
4647                                     result.get(CaptureResult.LENS_FOCUS_DISTANCE)
4648                                     ));
4649                         } else {
4650                             Logt.i(TAG, "AF converged but NULL focus distance values");
4651                         }
4652                     }
4653                     if (mSend3AResults && (!mNeedsLockedAWB || mLockedAWB) && !awbResultSent) {
4654                         awbResultSent = true;
4655                         if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
4656                                 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
4657                             mSocketRunnableObj.sendResponse("awbResult", String.format(
4658                                     Locale.getDefault(),
4659                                     "%f %f %f %f %f %f %f %f %f %f %f %f %f",
4660                                     result.get(CaptureResult.COLOR_CORRECTION_GAINS)
4661                                             .getRed(),
4662                                     result.get(CaptureResult.COLOR_CORRECTION_GAINS)
4663                                             .getGreenEven(),
4664                                     result.get(CaptureResult.COLOR_CORRECTION_GAINS)
4665                                             .getGreenOdd(),
4666                                     result.get(CaptureResult.COLOR_CORRECTION_GAINS)
4667                                             .getBlue(),
4668                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4669                                             .getElement(0, 0)),
4670                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4671                                             .getElement(1, 0)),
4672                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4673                                             .getElement(2, 0)),
4674                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4675                                             .getElement(0, 1)),
4676                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4677                                             .getElement(1, 1)),
4678                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4679                                             .getElement(2, 1)),
4680                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4681                                             .getElement(0, 2)),
4682                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4683                                             .getElement(1, 2)),
4684                                     r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4685                                             .getElement(2, 2))));
4686                         } else {
4687                             Logt.i(TAG, String.format(
4688                                     "AWB converged but NULL color correction values, gains:%b,"
4689                                     + " ccm:%b",
4690                                     result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null,
4691                                     result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)
4692                                             == null));
4693                         }
4694                     }
4695                 }
4696             }
4697 
4698             mInterlock3A.open();
4699         }
4700 
4701         void stop() {
4702             stopped = true;
4703         }
4704     }
4705 
4706     private class ThreeAResultListener extends CaptureResultListener {
4707         private ThreeAResultHandler mThreeAResultHandler = new ThreeAResultHandler();
4708 
4709         @Override
4710         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
4711                 long timestamp, long frameNumber) {
4712         }
4713 
4714         @Override
4715         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
4716                 TotalCaptureResult result) {
4717             try {
4718                 mThreeAResultHandler.handleCaptureResult(request, result);
4719             } catch (ItsException e) {
4720                 Logt.e(TAG, "Script error: ", e);
4721             } catch (Exception e) {
4722                 Logt.e(TAG, "Script error: ", e);
4723             }
4724         }
4725 
4726         @Override
4727         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
4728                 CaptureFailure failure) {
4729             Logt.e(TAG, "Script error: capture failed");
4730         }
4731 
4732         public void stop() {
4733             mThreeAResultHandler.stop();
4734         }
4735     }
4736 
4737     private class ExtensionsThreeAResultListener extends ExtensionCaptureResultListener {
4738         private ThreeAResultHandler mThreeAResultHandler = new ThreeAResultHandler();
4739         @Override
4740         public void onCaptureStarted(CameraExtensionSession session, CaptureRequest request,
4741                 long timestamp) {
4742         }
4743 
4744         @Override
4745         public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request,
4746                 TotalCaptureResult result) {
4747             try {
4748                 mThreeAResultHandler.handleCaptureResult(request, result);
4749             } catch (ItsException e) {
4750                 Logt.e(TAG, "Script error: ", e);
4751             } catch (Exception e) {
4752                 Logt.e(TAG, "Script error: ", e);
4753             }
4754         }
4755 
4756         @Override
4757         public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) {
4758             Logt.e(TAG, "Script error: capture failed");
4759         }
4760 
4761         public void stop() {
4762             mThreeAResultHandler.stop();
4763         }
4764     }
4765 
4766     private class PreviewFrameCaptureResultListener extends CaptureResultListener {
4767         private CountDownLatch mFrameCaptureLatch;
4768         PreviewFrameCaptureResultListener(CountDownLatch frameCaptureLatch) {
4769             mFrameCaptureLatch = frameCaptureLatch;
4770         }
4771 
4772         @Override
4773         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
4774                 long timestamp, long frameNumber) {
4775             Log.d(TAG, "PreviewFrameCaptureResultListener [onCaptureStarted]");
4776         }
4777 
4778         @Override
4779         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
4780                 TotalCaptureResult result) {
4781             Log.d(TAG, "PreviewFrameCaptureResultListener [onCaptureCompleted]");
4782             try {
4783                 if (request == null || result == null) {
4784                     throw new ItsException("Request/Result is invalid");
4785                 }
4786                 Logt.i(TAG, buildLogString(result));
4787                 mFrameCaptureLatch.countDown();
4788             } catch (ItsException e) {
4789                 throw new ItsRuntimeException("Error handling capture result", e);
4790             }
4791         }
4792 
4793         @Override
4794         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
4795                 CaptureFailure failure) {
4796             Logt.e(TAG, "Script error: capture failed");
4797         }
4798 
4799     }
4800 
4801     private class ExtensionPreviewFrameCaptureResultListener
4802             extends ExtensionCaptureResultListener {
4803         private CountDownLatch mFrameCaptureLatch;
4804 
4805         ExtensionPreviewFrameCaptureResultListener(CountDownLatch frameCaptureLatch) {
4806             mFrameCaptureLatch = frameCaptureLatch;
4807         }
4808 
4809         @Override
4810         public void onCaptureStarted(CameraExtensionSession session, CaptureRequest request,
4811                 long timestamp) {
4812             try {
4813                 if (request == null) {
4814                     throw new ItsException("Request is invalid");
4815                 }
4816                 mFrameCaptureLatch.countDown();
4817             } catch (ItsException e) {
4818                 Logt.e(TAG, "Script error: ", e);
4819             }
4820 
4821         }
4822 
4823         @Override
4824         public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request,
4825                 TotalCaptureResult result) {
4826             try {
4827                 if (request == null || result == null) {
4828                     throw new ItsException("Request/result is invalid");
4829                 }
4830                 Logt.i(TAG, buildLogString(result));
4831             } catch (ItsException e) {
4832                 Logt.e(TAG, "Script error: ", e);
4833             }
4834         }
4835 
4836         @Override
4837         public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) {
4838             Logt.e(TAG, "Script error: capture failed");
4839         }
4840     }
4841 
4842     class RecordingResultListener extends CaptureResultListener {
4843         private Map<Long, RecordingResult> mTimestampToCaptureResultsMap =
4844                 new ConcurrentHashMap<>();
4845         @Override
4846         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
4847                 long timestamp, long frameNumber) {
4848         }
4849 
4850         @Override
4851         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
4852                 TotalCaptureResult result) {
4853             try {
4854                 if (request == null || result == null) {
4855                     throw new ItsException("Request/Result is invalid");
4856                 }
4857 
4858                 Logt.i(TAG, buildLogString(result));
4859 
4860                 RecordingResult partialResult = new RecordingResult();
4861                 Logt.i(TAG, "TotalCaptureResult # " + mTimestampToCaptureResultsMap.size()
4862                         + " timestamp = " + result.get(CaptureResult.SENSOR_TIMESTAMP)
4863                         + " z = " + result.get(CaptureResult.CONTROL_ZOOM_RATIO)
4864                         + " fl = " + result.get(CaptureResult.LENS_FOCAL_LENGTH)
4865                         + " phyid = "
4866                         + result.get(CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID));
4867                 long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
4868                 partialResult.addKeys(result, RecordingResult.PREVIEW_RESULT_TRACKED_KEYS);
4869                 mTimestampToCaptureResultsMap.put(timestamp, partialResult);
4870             } catch (ItsException e) {
4871                 throw new ItsRuntimeException("Error handling capture result", e);
4872             }
4873         }
4874 
4875         @Override
4876         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
4877                 CaptureFailure failure) {
4878             Logt.e(TAG, "Script error: capture failed");
4879         }
4880 
4881         /**
4882          * Returns up-to-date value of recording capture results for calling thread.
4883          */
4884         public Map<Long, RecordingResult> getCaptureResultsMap() {
4885             return mTimestampToCaptureResultsMap;
4886         }
4887     }
4888 
4889     private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
4890         @Override
4891         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
4892                 long timestamp, long frameNumber) {
4893         }
4894 
4895         @Override
4896         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
4897                 TotalCaptureResult result) {
4898             try {
4899                 if (request == null || result == null) {
4900                     throw new ItsException("Request/result is invalid");
4901                 }
4902 
4903                 Logt.i(TAG, buildLogString(result));
4904 
4905                 int count = mCountCapRes.getAndIncrement();
4906                 mCaptureResults[count] = result;
4907                 mSocketRunnableObj.sendResponseCaptureResult(request, result, mOutputImageReaders);
4908                 synchronized(mCountCallbacksRemaining) {
4909                     mCountCallbacksRemaining.decrementAndGet();
4910                     mCountCallbacksRemaining.notify();
4911                 }
4912             } catch (ItsException e) {
4913                 Logt.e(TAG, "Script error: ", e);
4914             } catch (Exception e) {
4915                 Logt.e(TAG, "Script error: ", e);
4916             }
4917         }
4918 
4919         @Override
4920         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
4921                 CaptureFailure failure) {
4922             Logt.e(TAG, "Script error: capture failed");
4923         }
4924     };
4925 
4926     private final ExtensionCaptureResultListener mExtCaptureResultListener =
4927             new ExtensionCaptureResultListener() {
4928         @Override
4929         public void onCaptureProcessStarted(CameraExtensionSession session,
4930                 CaptureRequest request) {
4931         }
4932 
4933         @Override
4934         public void onCaptureResultAvailable(CameraExtensionSession session, CaptureRequest request,
4935                 TotalCaptureResult result) {
4936             try {
4937                 if (request == null || result == null) {
4938                     throw new ItsException("Request/result is invalid");
4939                 }
4940 
4941                 Logt.i(TAG, buildLogString(result));
4942 
4943                 int count = mCountCapRes.getAndIncrement();
4944                 mCaptureResults[count] = result;
4945                 mSocketRunnableObj.sendResponseCaptureResult(request, result, mOutputImageReaders);
4946                 synchronized(mCountCallbacksRemaining) {
4947                     mCountCallbacksRemaining.decrementAndGet();
4948                     mCountCallbacksRemaining.notify();
4949                 }
4950             } catch (ItsException e) {
4951                 Logt.e(TAG, "Script error: ", e);
4952             } catch (Exception e) {
4953                 Logt.e(TAG, "Script error: ", e);
4954             }
4955         }
4956 
4957         @Override
4958         public void onCaptureFailed(CameraExtensionSession session, CaptureRequest request) {
4959             Logt.e(TAG, "Script error: capture failed");
4960         }
4961     };
4962 
4963     private final ExtensionCaptureResultListener mExtAEResultListener =
4964             new ExtensionCaptureResultListener() {
4965                 @Override
4966                 public void onCaptureProcessStarted(CameraExtensionSession session,
4967                         CaptureRequest request) {
4968                 }
4969 
4970                 @Override
4971                 public void onCaptureResultAvailable(CameraExtensionSession session,
4972                         CaptureRequest request,
4973                         TotalCaptureResult result) {
4974                     try {
4975                         if (request == null || result == null) {
4976                             throw new ItsException("Request/result is invalid");
4977                         }
4978                         int aeState = result.get(CaptureResult.CONTROL_AE_STATE);
4979                         if (aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
4980                                 || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) {
4981                             synchronized (mCountCallbacksRemaining) {
4982                                 mCountCallbacksRemaining.decrementAndGet();
4983                                 mCountCallbacksRemaining.notify();
4984                             }
4985                         }
4986                     } catch (ItsException e) {
4987                         Logt.e(TAG, "Script error: ", e);
4988                     } catch (Exception e) {
4989                         Logt.e(TAG, "Script error: ", e);
4990                     }
4991                 }
4992 
4993                 @Override
4994                 public void onCaptureFailed(CameraExtensionSession session,
4995                         CaptureRequest request) {
4996                     Logt.e(TAG, "Script error: capture failed");
4997                 }
4998             };
4999 
5000     private static class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback {
5001         private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue =
5002                 new LinkedBlockingQueue<>();
5003 
5004         @Override
5005         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
5006                 long timestamp, long frameNumber) {
5007         }
5008 
5009         @Override
5010         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
5011                 TotalCaptureResult result) {
5012             try {
5013                 mResultQueue.put(result);
5014             } catch (InterruptedException e) {
5015                 throw new UnsupportedOperationException(
5016                         "Can't handle InterruptedException in onImageAvailable");
5017             }
5018         }
5019 
5020         @Override
5021         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
5022                 CaptureFailure failure) {
5023             Logt.e(TAG, "Script error: capture failed");
5024         }
5025 
5026         public TotalCaptureResult getResult(long timeoutMs) throws ItsException {
5027             TotalCaptureResult result;
5028             try {
5029                 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
5030             } catch (InterruptedException e) {
5031                 throw new ItsException(e);
5032             }
5033 
5034             if (result == null) {
5035                 throw new ItsException("Getting an image timed out after " + timeoutMs +
5036                         "ms");
5037             }
5038 
5039             return result;
5040         }
5041     }
5042 
5043     private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener {
5044         private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>();
5045 
5046         @Override
5047         public void onImageAvailable(ImageReader reader) {
5048             try {
5049                 mImageQueue.put(reader.acquireNextImage());
5050             } catch (InterruptedException e) {
5051                 throw new UnsupportedOperationException(
5052                         "Can't handle InterruptedException in onImageAvailable");
5053             }
5054         }
5055 
5056         public Image getImage(long timeoutMs) throws ItsException {
5057             Image image;
5058             try {
5059                 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
5060             } catch (InterruptedException e) {
5061                 throw new ItsException(e);
5062             }
5063 
5064             if (image == null) {
5065                 throw new ItsException("Getting an image timed out after " + timeoutMs +
5066                         "ms");
5067             }
5068             return image;
5069         }
5070     }
5071 
5072     private int getReprocessInputFormat(JSONObject params) throws ItsException {
5073         String reprocessFormat;
5074         try {
5075             reprocessFormat = params.getString("reprocessFormat");
5076         } catch (org.json.JSONException e) {
5077             throw new ItsException("Error parsing reprocess format: " + e);
5078         }
5079 
5080         if (reprocessFormat.equals("yuv")) {
5081             return ImageFormat.YUV_420_888;
5082         } else if (reprocessFormat.equals("private")) {
5083             return ImageFormat.PRIVATE;
5084         }
5085 
5086         throw new ItsException("Uknown reprocess format: " + reprocessFormat);
5087     }
5088 
5089     private boolean isFixedFocusLens(CameraCharacteristics c) {
5090         Float minFocusDistance = c.get(
5091                 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
5092         return (minFocusDistance != null) && (minFocusDistance == 0.0);
5093     }
5094 
5095     private void validateCameraId(String cameraId) throws ItsException {
5096         if (mItsCameraIdList == null) {
5097             mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager);
5098         }
5099         if (mItsCameraIdList.mCameraIds.size() == 0) {
5100             throw new ItsException("No camera devices");
5101         }
5102         if (!mItsCameraIdList.mCameraIds.contains(cameraId)) {
5103             throw new ItsException("Invalid cameraId " + cameraId);
5104         }
5105     }
5106 
5107     private boolean isHlg10Compatible(int format) {
5108         return (format == ImageFormat.PRIVATE
5109                 || format == ImageFormat.JPEG_R
5110                 || format == ImageFormat.YCBCR_P010);
5111     }
5112 }
5113