1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import android.app.Service; 20 import android.content.Context; 21 import android.content.Intent; 22 import android.graphics.ImageFormat; 23 import android.hardware.camera2.CameraCaptureSession; 24 import android.hardware.camera2.CameraAccessException; 25 import android.hardware.camera2.CameraCharacteristics; 26 import android.hardware.camera2.CameraDevice; 27 import android.hardware.camera2.CameraManager; 28 import android.hardware.camera2.CaptureFailure; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.DngCreator; 32 import android.hardware.camera2.TotalCaptureResult; 33 import android.hardware.camera2.params.InputConfiguration; 34 import android.hardware.camera2.params.MeteringRectangle; 35 import android.hardware.Sensor; 36 import android.hardware.SensorEvent; 37 import android.hardware.SensorEventListener; 38 import android.hardware.SensorManager; 39 import android.media.Image; 40 import android.media.ImageReader; 41 import android.media.ImageWriter; 42 import android.net.Uri; 43 import android.os.ConditionVariable; 44 import android.os.Handler; 45 import android.os.HandlerThread; 46 import android.os.IBinder; 47 import android.os.Message; 48 import android.os.Vibrator; 49 import android.util.Log; 50 import android.util.Rational; 51 import android.util.Size; 52 import android.view.Surface; 53 54 import com.android.ex.camera2.blocking.BlockingCameraManager; 55 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 56 import com.android.ex.camera2.blocking.BlockingStateCallback; 57 import com.android.ex.camera2.blocking.BlockingSessionCallback; 58 59 import org.json.JSONArray; 60 import org.json.JSONObject; 61 62 import java.io.BufferedReader; 63 import java.io.BufferedWriter; 64 import java.io.ByteArrayOutputStream; 65 import java.io.IOException; 66 import java.io.InputStreamReader; 67 import java.io.OutputStreamWriter; 68 import java.io.PrintWriter; 69 import java.math.BigInteger; 70 import java.net.ServerSocket; 71 import java.net.Socket; 72 import java.nio.ByteBuffer; 73 import java.nio.charset.Charset; 74 import java.security.MessageDigest; 75 import java.util.ArrayList; 76 import java.util.Arrays; 77 import java.util.LinkedList; 78 import java.util.List; 79 import java.util.concurrent.BlockingQueue; 80 import java.util.concurrent.CountDownLatch; 81 import java.util.concurrent.LinkedBlockingDeque; 82 import java.util.concurrent.LinkedBlockingQueue; 83 import java.util.concurrent.TimeUnit; 84 import java.util.concurrent.atomic.AtomicInteger; 85 86 public class ItsService extends Service implements SensorEventListener { 87 public static final String TAG = ItsService.class.getSimpleName(); 88 89 // Timeouts, in seconds. 90 public static final int TIMEOUT_CALLBACK = 3; 91 public static final int TIMEOUT_3A = 10; 92 93 // State transition timeouts, in ms. 94 private static final long TIMEOUT_IDLE_MS = 2000; 95 private static final long TIMEOUT_STATE_MS = 500; 96 97 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 98 private static final long TIMEOUT_CAP_RES = 2000; 99 100 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 101 102 // Supports at most RAW+YUV+JPEG, one surface each. 103 private static final int MAX_NUM_OUTPUT_SURFACES = 3; 104 105 public static final int SERVERPORT = 6000; 106 107 public static final String REGION_KEY = "regions"; 108 public static final String REGION_AE_KEY = "ae"; 109 public static final String REGION_AWB_KEY = "awb"; 110 public static final String REGION_AF_KEY = "af"; 111 public static final String LOCK_AE_KEY = "aeLock"; 112 public static final String LOCK_AWB_KEY = "awbLock"; 113 public static final String TRIGGER_KEY = "triggers"; 114 public static final String TRIGGER_AE_KEY = "ae"; 115 public static final String TRIGGER_AF_KEY = "af"; 116 public static final String VIB_PATTERN_KEY = "pattern"; 117 public static final String EVCOMP_KEY = "evComp"; 118 119 private CameraManager mCameraManager = null; 120 private HandlerThread mCameraThread = null; 121 private Handler mCameraHandler = null; 122 private BlockingCameraManager mBlockingCameraManager = null; 123 private BlockingStateCallback mCameraListener = null; 124 private CameraDevice mCamera = null; 125 private CameraCaptureSession mSession = null; 126 private ImageReader[] mOutputImageReaders = null; 127 private ImageReader mInputImageReader = null; 128 private CameraCharacteristics mCameraCharacteristics = null; 129 130 private Vibrator mVibrator = null; 131 132 private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 133 private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES]; 134 private HandlerThread mResultThread = null; 135 private Handler mResultHandler = null; 136 137 private volatile boolean mThreadExitFlag = false; 138 139 private volatile ServerSocket mSocket = null; 140 private volatile SocketRunnable mSocketRunnableObj = null; 141 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = 142 new LinkedBlockingDeque<ByteBuffer>(); 143 private final Object mSocketWriteEnqueueLock = new Object(); 144 private final Object mSocketWriteDrainLock = new Object(); 145 146 private volatile BlockingQueue<Object[]> mSerializerQueue = 147 new LinkedBlockingDeque<Object[]>(); 148 149 private AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 150 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 151 private AtomicInteger mCountRaw10 = new AtomicInteger(); 152 private AtomicInteger mCountRaw12 = new AtomicInteger(); 153 private AtomicInteger mCountJpg = new AtomicInteger(); 154 private AtomicInteger mCountYuv = new AtomicInteger(); 155 private AtomicInteger mCountCapRes = new AtomicInteger(); 156 private boolean mCaptureRawIsDng; 157 private CaptureResult mCaptureResults[] = null; 158 159 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 160 private volatile boolean mIssuedRequest3A = false; 161 private volatile boolean mConvergedAE = false; 162 private volatile boolean mConvergedAF = false; 163 private volatile boolean mConvergedAWB = false; 164 private volatile boolean mLockedAE = false; 165 private volatile boolean mLockedAWB = false; 166 private volatile boolean mNeedsLockedAE = false; 167 private volatile boolean mNeedsLockedAWB = false; 168 169 class MySensorEvent { 170 public Sensor sensor; 171 public int accuracy; 172 public long timestamp; 173 public float values[]; 174 } 175 176 // For capturing motion sensor traces. 177 private SensorManager mSensorManager = null; 178 private Sensor mAccelSensor = null; 179 private Sensor mMagSensor = null; 180 private Sensor mGyroSensor = null; 181 private volatile LinkedList<MySensorEvent> mEvents = null; 182 private volatile Object mEventLock = new Object(); 183 private volatile boolean mEventsEnabled = false; 184 185 public interface CaptureCallback { onCaptureAvailable(Image capture)186 void onCaptureAvailable(Image capture); 187 } 188 189 public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {} 190 191 @Override onBind(Intent intent)192 public IBinder onBind(Intent intent) { 193 return null; 194 } 195 196 @Override onCreate()197 public void onCreate() { 198 try { 199 mThreadExitFlag = false; 200 201 // Get handle to camera manager. 202 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 203 if (mCameraManager == null) { 204 throw new ItsException("Failed to connect to camera manager"); 205 } 206 mBlockingCameraManager = new BlockingCameraManager(mCameraManager); 207 mCameraListener = new BlockingStateCallback(); 208 209 // Register for motion events. 210 mEvents = new LinkedList<MySensorEvent>(); 211 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 212 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 213 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 214 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 215 mSensorManager.registerListener(this, mAccelSensor, SensorManager.SENSOR_DELAY_FASTEST); 216 mSensorManager.registerListener(this, mMagSensor, SensorManager.SENSOR_DELAY_FASTEST); 217 mSensorManager.registerListener(this, mGyroSensor, SensorManager.SENSOR_DELAY_FASTEST); 218 219 // Get a handle to the system vibrator. 220 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 221 222 // Create threads to receive images and save them. 223 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 224 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 225 mSaveThreads[i].start(); 226 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 227 } 228 229 // Create a thread to handle object serialization. 230 (new Thread(new SerializerRunnable())).start();; 231 232 // Create a thread to receive capture results and process them. 233 mResultThread = new HandlerThread("ResultThread"); 234 mResultThread.start(); 235 mResultHandler = new Handler(mResultThread.getLooper()); 236 237 // Create a thread for the camera device. 238 mCameraThread = new HandlerThread("ItsCameraThread"); 239 mCameraThread.start(); 240 mCameraHandler = new Handler(mCameraThread.getLooper()); 241 242 // Create a thread to process commands, listening on a TCP socket. 243 mSocketRunnableObj = new SocketRunnable(); 244 (new Thread(mSocketRunnableObj)).start(); 245 } catch (ItsException e) { 246 Logt.e(TAG, "Service failed to start: ", e); 247 } 248 } 249 250 @Override onStartCommand(Intent intent, int flags, int startId)251 public int onStartCommand(Intent intent, int flags, int startId) { 252 try { 253 // Just log a message indicating that the service is running and is able to accept 254 // socket connections. 255 while (!mThreadExitFlag && mSocket==null) { 256 Thread.sleep(1); 257 } 258 if (!mThreadExitFlag){ 259 Logt.i(TAG, "ItsService ready"); 260 } else { 261 Logt.e(TAG, "Starting ItsService in bad state"); 262 } 263 } catch (java.lang.InterruptedException e) { 264 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 265 } 266 return START_STICKY; 267 } 268 269 @Override onDestroy()270 public void onDestroy() { 271 mThreadExitFlag = true; 272 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 273 if (mSaveThreads[i] != null) { 274 mSaveThreads[i].quit(); 275 mSaveThreads[i] = null; 276 } 277 } 278 if (mResultThread != null) { 279 mResultThread.quitSafely(); 280 mResultThread = null; 281 } 282 if (mCameraThread != null) { 283 mCameraThread.quitSafely(); 284 mCameraThread = null; 285 } 286 } 287 openCameraDevice(int cameraId)288 public void openCameraDevice(int cameraId) throws ItsException { 289 Logt.i(TAG, String.format("Opening camera %d", cameraId)); 290 291 String[] devices; 292 try { 293 devices = mCameraManager.getCameraIdList(); 294 if (devices == null || devices.length == 0) { 295 throw new ItsException("No camera devices"); 296 } 297 } catch (CameraAccessException e) { 298 throw new ItsException("Failed to get device ID list", e); 299 } 300 301 try { 302 mCamera = mBlockingCameraManager.openCamera(devices[cameraId], 303 mCameraListener, mCameraHandler); 304 mCameraCharacteristics = mCameraManager.getCameraCharacteristics( 305 devices[cameraId]); 306 } catch (CameraAccessException e) { 307 throw new ItsException("Failed to open camera", e); 308 } catch (BlockingOpenException e) { 309 throw new ItsException("Failed to open camera (after blocking)", e); 310 } 311 mSocketRunnableObj.sendResponse("cameraOpened", ""); 312 } 313 closeCameraDevice()314 public void closeCameraDevice() throws ItsException { 315 try { 316 if (mCamera != null) { 317 Logt.i(TAG, "Closing camera"); 318 mCamera.close(); 319 mCamera = null; 320 } 321 } catch (Exception e) { 322 throw new ItsException("Failed to close device"); 323 } 324 mSocketRunnableObj.sendResponse("cameraClosed", ""); 325 } 326 327 class SerializerRunnable implements Runnable { 328 // Use a separate thread to perform JSON serialization (since this can be slow due to 329 // the reflection). 330 @Override run()331 public void run() { 332 Logt.i(TAG, "Serializer thread starting"); 333 while (! mThreadExitFlag) { 334 try { 335 Object objs[] = mSerializerQueue.take(); 336 JSONObject jsonObj = new JSONObject(); 337 String tag = null; 338 for (int i = 0; i < objs.length; i++) { 339 Object obj = objs[i]; 340 if (obj instanceof String) { 341 if (tag != null) { 342 throw new ItsException("Multiple tags for socket response"); 343 } 344 tag = (String)obj; 345 } else if (obj instanceof CameraCharacteristics) { 346 jsonObj.put("cameraProperties", ItsSerializer.serialize( 347 (CameraCharacteristics)obj)); 348 } else if (obj instanceof CaptureRequest) { 349 jsonObj.put("captureRequest", ItsSerializer.serialize( 350 (CaptureRequest)obj)); 351 } else if (obj instanceof CaptureResult) { 352 jsonObj.put("captureResult", ItsSerializer.serialize( 353 (CaptureResult)obj)); 354 } else if (obj instanceof JSONArray) { 355 jsonObj.put("outputs", (JSONArray)obj); 356 } else { 357 throw new ItsException("Invalid object received for serialiation"); 358 } 359 } 360 if (tag == null) { 361 throw new ItsException("No tag provided for socket response"); 362 } 363 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 364 Logt.i(TAG, String.format("Serialized %s", tag)); 365 } catch (org.json.JSONException e) { 366 Logt.e(TAG, "Error serializing object", e); 367 break; 368 } catch (ItsException e) { 369 Logt.e(TAG, "Error serializing object", e); 370 break; 371 } catch (java.lang.InterruptedException e) { 372 Logt.e(TAG, "Error serializing object (interrupted)", e); 373 break; 374 } 375 } 376 Logt.i(TAG, "Serializer thread terminated"); 377 } 378 } 379 380 class SocketWriteRunnable implements Runnable { 381 382 // Use a separate thread to service a queue of objects to be written to the socket, 383 // writing each sequentially in order. This is needed since different handler functions 384 // (called on different threads) will need to send data back to the host script. 385 386 public Socket mOpenSocket = null; 387 SocketWriteRunnable(Socket openSocket)388 public SocketWriteRunnable(Socket openSocket) { 389 mOpenSocket = openSocket; 390 } 391 setOpenSocket(Socket openSocket)392 public void setOpenSocket(Socket openSocket) { 393 mOpenSocket = openSocket; 394 } 395 396 @Override run()397 public void run() { 398 Logt.i(TAG, "Socket writer thread starting"); 399 while (true) { 400 try { 401 ByteBuffer b = mSocketWriteQueue.take(); 402 synchronized(mSocketWriteDrainLock) { 403 if (mOpenSocket == null) { 404 continue; 405 } 406 if (b.hasArray()) { 407 mOpenSocket.getOutputStream().write(b.array()); 408 } else { 409 byte[] barray = new byte[b.capacity()]; 410 b.get(barray); 411 mOpenSocket.getOutputStream().write(barray); 412 } 413 mOpenSocket.getOutputStream().flush(); 414 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 415 } 416 } catch (IOException e) { 417 Logt.e(TAG, "Error writing to socket", e); 418 break; 419 } catch (java.lang.InterruptedException e) { 420 Logt.e(TAG, "Error writing to socket (interrupted)", e); 421 break; 422 } 423 } 424 Logt.i(TAG, "Socket writer thread terminated"); 425 } 426 } 427 428 class SocketRunnable implements Runnable { 429 430 // Format of sent messages (over the socket): 431 // * Serialized JSON object on a single line (newline-terminated) 432 // * For byte buffers, the binary data then follows 433 // 434 // Format of received messages (from the socket): 435 // * Serialized JSON object on a single line (newline-terminated) 436 437 private Socket mOpenSocket = null; 438 private SocketWriteRunnable mSocketWriteRunnable = null; 439 440 @Override run()441 public void run() { 442 Logt.i(TAG, "Socket thread starting"); 443 try { 444 mSocket = new ServerSocket(SERVERPORT); 445 } catch (IOException e) { 446 Logt.e(TAG, "Failed to create socket", e); 447 } 448 449 // Create a new thread to handle writes to this socket. 450 mSocketWriteRunnable = new SocketWriteRunnable(null); 451 (new Thread(mSocketWriteRunnable)).start(); 452 453 while (!mThreadExitFlag) { 454 // Receive the socket-open request from the host. 455 try { 456 Logt.i(TAG, "Waiting for client to connect to socket"); 457 mOpenSocket = mSocket.accept(); 458 if (mOpenSocket == null) { 459 Logt.e(TAG, "Socket connection error"); 460 break; 461 } 462 mSocketWriteQueue.clear(); 463 mSocketWriteRunnable.setOpenSocket(mOpenSocket); 464 Logt.i(TAG, "Socket connected"); 465 } catch (IOException e) { 466 Logt.e(TAG, "Socket open error: ", e); 467 break; 468 } 469 470 // Process commands over the open socket. 471 while (!mThreadExitFlag) { 472 try { 473 BufferedReader input = new BufferedReader( 474 new InputStreamReader(mOpenSocket.getInputStream())); 475 if (input == null) { 476 Logt.e(TAG, "Failed to get socket input stream"); 477 break; 478 } 479 String line = input.readLine(); 480 if (line == null) { 481 Logt.i(TAG, "Socket readline retuned null (host disconnected)"); 482 break; 483 } 484 processSocketCommand(line); 485 } catch (IOException e) { 486 Logt.e(TAG, "Socket read error: ", e); 487 break; 488 } catch (ItsException e) { 489 Logt.e(TAG, "Script error: ", e); 490 break; 491 } 492 } 493 494 // Close socket and go back to waiting for a new connection. 495 try { 496 synchronized(mSocketWriteDrainLock) { 497 mSocketWriteQueue.clear(); 498 mOpenSocket.close(); 499 mOpenSocket = null; 500 mSocketWriteRunnable.setOpenSocket(null); 501 Logt.i(TAG, "Socket disconnected"); 502 } 503 } catch (java.io.IOException e) { 504 Logt.e(TAG, "Exception closing socket"); 505 } 506 } 507 508 // It's an overall error state if the code gets here; no recevery. 509 // Try to do some cleanup, but the service probably needs to be restarted. 510 Logt.i(TAG, "Socket server loop exited"); 511 mThreadExitFlag = true; 512 try { 513 synchronized(mSocketWriteDrainLock) { 514 if (mOpenSocket != null) { 515 mOpenSocket.close(); 516 mOpenSocket = null; 517 mSocketWriteRunnable.setOpenSocket(null); 518 } 519 } 520 } catch (java.io.IOException e) { 521 Logt.w(TAG, "Exception closing socket"); 522 } 523 try { 524 if (mSocket != null) { 525 mSocket.close(); 526 mSocket = null; 527 } 528 } catch (java.io.IOException e) { 529 Logt.w(TAG, "Exception closing socket"); 530 } 531 } 532 processSocketCommand(String cmd)533 public void processSocketCommand(String cmd) 534 throws ItsException { 535 // Each command is a serialized JSON object. 536 try { 537 JSONObject cmdObj = new JSONObject(cmd); 538 if ("open".equals(cmdObj.getString("cmdName"))) { 539 int cameraId = cmdObj.getInt("cameraId"); 540 openCameraDevice(cameraId); 541 } else if ("close".equals(cmdObj.getString("cmdName"))) { 542 closeCameraDevice(); 543 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 544 doGetProps(); 545 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 546 doStartSensorEvents(); 547 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 548 doGetSensorEvents(); 549 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 550 do3A(cmdObj); 551 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 552 doCapture(cmdObj); 553 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 554 doVibrate(cmdObj); 555 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) { 556 doGetCameraIds(); 557 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) { 558 doReprocessCapture(cmdObj); 559 } else { 560 throw new ItsException("Unknown command: " + cmd); 561 } 562 } catch (org.json.JSONException e) { 563 Logt.e(TAG, "Invalid command: ", e); 564 } 565 } 566 sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)567 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 568 throws ItsException { 569 try { 570 JSONObject jsonObj = new JSONObject(); 571 jsonObj.put("tag", tag); 572 if (str != null) { 573 jsonObj.put("strValue", str); 574 } 575 if (obj != null) { 576 jsonObj.put("objValue", obj); 577 } 578 if (bbuf != null) { 579 jsonObj.put("bufValueSize", bbuf.capacity()); 580 } 581 ByteBuffer bstr = ByteBuffer.wrap( 582 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 583 synchronized(mSocketWriteEnqueueLock) { 584 if (bstr != null) { 585 mSocketWriteQueue.put(bstr); 586 } 587 if (bbuf != null) { 588 mSocketWriteQueue.put(bbuf); 589 } 590 } 591 } catch (org.json.JSONException e) { 592 throw new ItsException("JSON error: ", e); 593 } catch (java.lang.InterruptedException e) { 594 throw new ItsException("Socket error: ", e); 595 } 596 } 597 sendResponse(String tag, String str)598 public void sendResponse(String tag, String str) 599 throws ItsException { 600 sendResponse(tag, str, null, null); 601 } 602 sendResponse(String tag, JSONObject obj)603 public void sendResponse(String tag, JSONObject obj) 604 throws ItsException { 605 sendResponse(tag, null, obj, null); 606 } 607 sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)608 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 609 throws ItsException { 610 sendResponse(tag, null, null, bbuf); 611 } 612 sendResponse(LinkedList<MySensorEvent> events)613 public void sendResponse(LinkedList<MySensorEvent> events) 614 throws ItsException { 615 try { 616 JSONArray accels = new JSONArray(); 617 JSONArray mags = new JSONArray(); 618 JSONArray gyros = new JSONArray(); 619 for (MySensorEvent event : events) { 620 JSONObject obj = new JSONObject(); 621 obj.put("time", event.timestamp); 622 obj.put("x", event.values[0]); 623 obj.put("y", event.values[1]); 624 obj.put("z", event.values[2]); 625 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 626 accels.put(obj); 627 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 628 mags.put(obj); 629 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 630 gyros.put(obj); 631 } 632 } 633 JSONObject obj = new JSONObject(); 634 obj.put("accel", accels); 635 obj.put("mag", mags); 636 obj.put("gyro", gyros); 637 sendResponse("sensorEvents", null, obj, null); 638 } catch (org.json.JSONException e) { 639 throw new ItsException("JSON error: ", e); 640 } 641 } 642 sendResponse(CameraCharacteristics props)643 public void sendResponse(CameraCharacteristics props) 644 throws ItsException { 645 try { 646 Object objs[] = new Object[2]; 647 objs[0] = "cameraProperties"; 648 objs[1] = props; 649 mSerializerQueue.put(objs); 650 } catch (InterruptedException e) { 651 throw new ItsException("Interrupted: ", e); 652 } 653 } 654 sendResponseCaptureResult(CameraCharacteristics props, CaptureRequest request, CaptureResult result, ImageReader[] readers)655 public void sendResponseCaptureResult(CameraCharacteristics props, 656 CaptureRequest request, 657 CaptureResult result, 658 ImageReader[] readers) 659 throws ItsException { 660 try { 661 JSONArray jsonSurfaces = new JSONArray(); 662 for (int i = 0; i < readers.length; i++) { 663 JSONObject jsonSurface = new JSONObject(); 664 jsonSurface.put("width", readers[i].getWidth()); 665 jsonSurface.put("height", readers[i].getHeight()); 666 int format = readers[i].getImageFormat(); 667 if (format == ImageFormat.RAW_SENSOR) { 668 jsonSurface.put("format", "raw"); 669 } else if (format == ImageFormat.RAW10) { 670 jsonSurface.put("format", "raw10"); 671 } else if (format == ImageFormat.RAW12) { 672 jsonSurface.put("format", "raw12"); 673 } else if (format == ImageFormat.JPEG) { 674 jsonSurface.put("format", "jpeg"); 675 } else if (format == ImageFormat.YUV_420_888) { 676 jsonSurface.put("format", "yuv"); 677 } else { 678 throw new ItsException("Invalid format"); 679 } 680 jsonSurfaces.put(jsonSurface); 681 } 682 683 Object objs[] = new Object[5]; 684 objs[0] = "captureResults"; 685 objs[1] = props; 686 objs[2] = request; 687 objs[3] = result; 688 objs[4] = jsonSurfaces; 689 mSerializerQueue.put(objs); 690 } catch (org.json.JSONException e) { 691 throw new ItsException("JSON error: ", e); 692 } catch (InterruptedException e) { 693 throw new ItsException("Interrupted: ", e); 694 } 695 } 696 } 697 698 public ImageReader.OnImageAvailableListener createAvailableListener(final CaptureCallback listener)699 createAvailableListener(final CaptureCallback listener) { 700 return new ImageReader.OnImageAvailableListener() { 701 @Override 702 public void onImageAvailable(ImageReader reader) { 703 Image i = null; 704 try { 705 i = reader.acquireNextImage(); 706 listener.onCaptureAvailable(i); 707 } finally { 708 if (i != null) { 709 i.close(); 710 } 711 } 712 } 713 }; 714 } 715 716 private ImageReader.OnImageAvailableListener 717 createAvailableListenerDropper(final CaptureCallback listener) { 718 return new ImageReader.OnImageAvailableListener() { 719 @Override 720 public void onImageAvailable(ImageReader reader) { 721 Image i = reader.acquireNextImage(); 722 i.close(); 723 } 724 }; 725 } 726 727 private void doStartSensorEvents() throws ItsException { 728 synchronized(mEventLock) { 729 mEventsEnabled = true; 730 } 731 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 732 } 733 734 private void doGetSensorEvents() throws ItsException { 735 synchronized(mEventLock) { 736 mSocketRunnableObj.sendResponse(mEvents); 737 mEvents.clear(); 738 mEventsEnabled = false; 739 } 740 } 741 742 private void doGetProps() throws ItsException { 743 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 744 } 745 746 private void doGetCameraIds() throws ItsException { 747 String[] devices; 748 try { 749 devices = mCameraManager.getCameraIdList(); 750 if (devices == null || devices.length == 0) { 751 throw new ItsException("No camera devices"); 752 } 753 } catch (CameraAccessException e) { 754 throw new ItsException("Failed to get device ID list", e); 755 } 756 757 try { 758 JSONObject obj = new JSONObject(); 759 JSONArray array = new JSONArray(); 760 for (String id : devices) { 761 CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id); 762 // Only supply camera Id for non-legacy cameras since legacy camera does not 763 // support ITS 764 if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) != 765 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 766 array.put(id); 767 } 768 } 769 obj.put("cameraIdArray", array); 770 mSocketRunnableObj.sendResponse("cameraIds", obj); 771 } catch (org.json.JSONException e) { 772 throw new ItsException("JSON error: ", e); 773 } catch (android.hardware.camera2.CameraAccessException e) { 774 throw new ItsException("Access error: ", e); 775 } 776 } 777 778 private void prepareImageReaders(Size[] outputSizes, int[] outputFormats, Size inputSize, 779 int inputFormat, int maxInputBuffers) { 780 closeImageReaders(); 781 mOutputImageReaders = new ImageReader[outputSizes.length]; 782 for (int i = 0; i < outputSizes.length; i++) { 783 // Check if the output image reader can be shared with the input image reader. 784 if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) { 785 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 786 outputSizes[i].getHeight(), outputFormats[i], 787 MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers); 788 mInputImageReader = mOutputImageReaders[i]; 789 } else { 790 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 791 outputSizes[i].getHeight(), outputFormats[i], 792 MAX_CONCURRENT_READER_BUFFERS); 793 } 794 } 795 796 if (inputSize != null && mInputImageReader == null) { 797 mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(), 798 inputFormat, maxInputBuffers); 799 } 800 } 801 802 private void closeImageReaders() { 803 if (mOutputImageReaders != null) { 804 for (int i = 0; i < mOutputImageReaders.length; i++) { 805 if (mOutputImageReaders[i] != null) { 806 mOutputImageReaders[i].close(); 807 mOutputImageReaders[i] = null; 808 } 809 } 810 } 811 if (mInputImageReader != null) { 812 mInputImageReader.close(); 813 mInputImageReader = null; 814 } 815 } 816 817 private void do3A(JSONObject params) throws ItsException { 818 try { 819 // Start a 3A action, and wait for it to converge. 820 // Get the converged values for each "A", and package into JSON result for caller. 821 822 // 3A happens on full-res frames. 823 Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 824 int outputFormats[] = new int[1]; 825 outputFormats[0] = ImageFormat.YUV_420_888; 826 Size[] outputSizes = new Size[1]; 827 outputSizes[0] = sizes[0]; 828 int width = outputSizes[0].getWidth(); 829 int height = outputSizes[0].getHeight(); 830 831 prepareImageReaders(outputSizes, outputFormats, /*inputSize*/null, /*inputFormat*/0, 832 /*maxInputBuffers*/0); 833 List<Surface> outputSurfaces = new ArrayList<Surface>(1); 834 outputSurfaces.add(mOutputImageReaders[0].getSurface()); 835 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 836 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 837 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 838 839 // Add a listener that just recycles buffers; they aren't saved anywhere. 840 ImageReader.OnImageAvailableListener readerListener = 841 createAvailableListenerDropper(mCaptureCallback); 842 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 843 844 // Get the user-specified regions for AE, AWB, AF. 845 // Note that the user specifies normalized [x,y,w,h], which is converted below 846 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 847 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 848 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 849 new MeteringRectangle(0,0,width,height,1)}; 850 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 851 new MeteringRectangle(0,0,width,height,1)}; 852 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 853 new MeteringRectangle(0,0,width,height,1)}; 854 if (params.has(REGION_KEY)) { 855 JSONObject regions = params.getJSONObject(REGION_KEY); 856 if (regions.has(REGION_AE_KEY)) { 857 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 858 regions.getJSONArray(REGION_AE_KEY), true, width, height); 859 } 860 if (regions.has(REGION_AF_KEY)) { 861 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 862 regions.getJSONArray(REGION_AF_KEY), true, width, height); 863 } 864 if (regions.has(REGION_AWB_KEY)) { 865 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 866 regions.getJSONArray(REGION_AWB_KEY), true, width, height); 867 } 868 } 869 870 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 871 // values, waiting until the HAL has reported that the lock was successful. 872 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 873 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 874 875 // An EV compensation can be specified as part of AE convergence. 876 int evComp = params.optInt(EVCOMP_KEY, 0); 877 if (evComp != 0) { 878 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 879 } 880 881 // By default, AE and AF both get triggered, but the user can optionally override this. 882 // Also, AF won't get triggered if the lens is fixed-focus. 883 boolean doAE = true; 884 boolean doAF = true; 885 if (params.has(TRIGGER_KEY)) { 886 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 887 if (triggers.has(TRIGGER_AE_KEY)) { 888 doAE = triggers.getBoolean(TRIGGER_AE_KEY); 889 } 890 if (triggers.has(TRIGGER_AF_KEY)) { 891 doAF = triggers.getBoolean(TRIGGER_AF_KEY); 892 } 893 } 894 Float minFocusDistance = mCameraCharacteristics.get( 895 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); 896 boolean isFixedFocusLens = minFocusDistance != null && minFocusDistance == 0.0; 897 if (doAF && isFixedFocusLens) { 898 // Send a dummy result back for the code that is waiting for this message to see 899 // that AF has converged. 900 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 901 mSocketRunnableObj.sendResponse("afResult", "0.0"); 902 doAF = false; 903 } 904 905 mInterlock3A.open(); 906 mIssuedRequest3A = false; 907 mConvergedAE = false; 908 mConvergedAWB = false; 909 mConvergedAF = false; 910 mLockedAE = false; 911 mLockedAWB = false; 912 long tstart = System.currentTimeMillis(); 913 boolean triggeredAE = false; 914 boolean triggeredAF = false; 915 916 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 917 doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 918 919 // Keep issuing capture requests until 3A has converged. 920 while (true) { 921 922 // Block until can take the next 3A frame. Only want one outstanding frame 923 // at a time, to simplify the logic here. 924 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 925 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 926 throw new ItsException( 927 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 928 "AE converge state: " + mConvergedAE + ", \n" + 929 "AF convergence state: " + mConvergedAF + ", \n" + 930 "AWB convergence state: " + mConvergedAWB + "."); 931 } 932 mInterlock3A.close(); 933 934 // If not converged yet, issue another capture request. 935 if ( (doAE && (!triggeredAE || !mConvergedAE)) 936 || !mConvergedAWB 937 || (doAF && (!triggeredAF || !mConvergedAF)) 938 || (doAE && mNeedsLockedAE && !mLockedAE) 939 || (mNeedsLockedAWB && !mLockedAWB)) { 940 941 // Baseline capture request for 3A. 942 CaptureRequest.Builder req = mCamera.createCaptureRequest( 943 CameraDevice.TEMPLATE_PREVIEW); 944 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 945 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 946 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 947 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 948 req.set(CaptureRequest.CONTROL_AE_MODE, 949 CaptureRequest.CONTROL_AE_MODE_ON); 950 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 951 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 952 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 953 req.set(CaptureRequest.CONTROL_AF_MODE, 954 CaptureRequest.CONTROL_AF_MODE_AUTO); 955 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 956 req.set(CaptureRequest.CONTROL_AWB_MODE, 957 CaptureRequest.CONTROL_AWB_MODE_AUTO); 958 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 959 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 960 961 if (evComp != 0) { 962 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 963 } 964 965 if (mConvergedAE && mNeedsLockedAE) { 966 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 967 } 968 if (mConvergedAWB && mNeedsLockedAWB) { 969 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 970 } 971 972 // Trigger AE first. 973 if (doAE && !triggeredAE) { 974 Logt.i(TAG, "Triggering AE"); 975 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 976 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 977 triggeredAE = true; 978 } 979 980 // After AE has converged, trigger AF. 981 if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) { 982 Logt.i(TAG, "Triggering AF"); 983 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 984 CaptureRequest.CONTROL_AF_TRIGGER_START); 985 triggeredAF = true; 986 } 987 988 req.addTarget(mOutputImageReaders[0].getSurface()); 989 990 mIssuedRequest3A = true; 991 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 992 } else { 993 mSocketRunnableObj.sendResponse("3aConverged", ""); 994 Logt.i(TAG, "3A converged"); 995 break; 996 } 997 } 998 } catch (android.hardware.camera2.CameraAccessException e) { 999 throw new ItsException("Access error: ", e); 1000 } catch (org.json.JSONException e) { 1001 throw new ItsException("JSON error: ", e); 1002 } finally { 1003 mSocketRunnableObj.sendResponse("3aDone", ""); 1004 } 1005 } 1006 1007 private void doVibrate(JSONObject params) throws ItsException { 1008 try { 1009 if (mVibrator == null) { 1010 throw new ItsException("Unable to start vibrator"); 1011 } 1012 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 1013 int len = patternArray.length(); 1014 long pattern[] = new long[len]; 1015 for (int i = 0; i < len; i++) { 1016 pattern[i] = patternArray.getLong(i); 1017 } 1018 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 1019 mVibrator.vibrate(pattern, -1); 1020 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 1021 } catch (org.json.JSONException e) { 1022 throw new ItsException("JSON error: ", e); 1023 } 1024 } 1025 1026 /** 1027 * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output 1028 * image readers for the parsed output surface sizes, output formats, and the given input 1029 * size and format. 1030 */ 1031 private void prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, Size inputSize, 1032 int inputFormat, int maxInputBuffers) throws ItsException { 1033 Size outputSizes[]; 1034 int outputFormats[]; 1035 int numSurfaces = 0; 1036 1037 if (jsonOutputSpecs != null) { 1038 try { 1039 numSurfaces = jsonOutputSpecs.length(); 1040 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 1041 throw new ItsException("Too many output surfaces"); 1042 } 1043 1044 outputSizes = new Size[numSurfaces]; 1045 outputFormats = new int[numSurfaces]; 1046 for (int i = 0; i < numSurfaces; i++) { 1047 // Get the specified surface. 1048 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 1049 String sformat = surfaceObj.optString("format"); 1050 Size sizes[]; 1051 if ("yuv".equals(sformat) || "".equals(sformat)) { 1052 // Default to YUV if no format is specified. 1053 outputFormats[i] = ImageFormat.YUV_420_888; 1054 sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1055 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 1056 outputFormats[i] = ImageFormat.JPEG; 1057 sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); 1058 } else if ("raw".equals(sformat)) { 1059 outputFormats[i] = ImageFormat.RAW_SENSOR; 1060 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1061 } else if ("raw10".equals(sformat)) { 1062 outputFormats[i] = ImageFormat.RAW10; 1063 sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics); 1064 } else if ("raw12".equals(sformat)) { 1065 outputFormats[i] = ImageFormat.RAW12; 1066 sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics); 1067 } else if ("dng".equals(sformat)) { 1068 outputFormats[i] = ImageFormat.RAW_SENSOR; 1069 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1070 mCaptureRawIsDng = true; 1071 } else { 1072 throw new ItsException("Unsupported format: " + sformat); 1073 } 1074 // If the size is omitted, then default to the largest allowed size for the 1075 // format. 1076 int width = surfaceObj.optInt("width"); 1077 int height = surfaceObj.optInt("height"); 1078 if (width <= 0) { 1079 if (sizes == null || sizes.length == 0) { 1080 throw new ItsException(String.format( 1081 "Zero stream configs available for requested format: %s", 1082 sformat)); 1083 } 1084 width = ItsUtils.getMaxSize(sizes).getWidth(); 1085 } 1086 if (height <= 0) { 1087 height = ItsUtils.getMaxSize(sizes).getHeight(); 1088 } 1089 1090 outputSizes[i] = new Size(width, height); 1091 } 1092 } catch (org.json.JSONException e) { 1093 throw new ItsException("JSON error", e); 1094 } 1095 } else { 1096 // No surface(s) specified at all. 1097 // Default: a single output surface which is full-res YUV. 1098 Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1099 numSurfaces = 1; 1100 1101 outputSizes = new Size[1]; 1102 outputFormats = new int[1]; 1103 outputSizes[0] = sizes[0]; 1104 outputFormats[0] = ImageFormat.YUV_420_888; 1105 } 1106 1107 prepareImageReaders(outputSizes, outputFormats, inputSize, inputFormat, maxInputBuffers); 1108 } 1109 1110 private void doCapture(JSONObject params) throws ItsException { 1111 try { 1112 // Parse the JSON to get the list of capture requests. 1113 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 1114 mCamera, params); 1115 1116 int numSurfaces = 0; 1117 try { 1118 mCountRawOrDng.set(0); 1119 mCountJpg.set(0); 1120 mCountYuv.set(0); 1121 mCountRaw10.set(0); 1122 mCountRaw12.set(0); 1123 mCountCapRes.set(0); 1124 mCaptureRawIsDng = false; 1125 mCaptureResults = new CaptureResult[requests.size()]; 1126 1127 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1128 1129 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 1130 /*inputFormat*/0, /*maxInputBuffers*/0); 1131 numSurfaces = mOutputImageReaders.length; 1132 1133 List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces); 1134 for (int i = 0; i < numSurfaces; i++) { 1135 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1136 } 1137 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1138 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 1139 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1140 1141 for (int i = 0; i < numSurfaces; i++) { 1142 ImageReader.OnImageAvailableListener readerListener = 1143 createAvailableListener(mCaptureCallback); 1144 mOutputImageReaders[i].setOnImageAvailableListener(readerListener, 1145 mSaveHandlers[i]); 1146 } 1147 1148 // Plan for how many callbacks need to be received throughout the duration of this 1149 // sequence of capture requests. There is one callback per image surface, and one 1150 // callback for the CaptureResult, for each capture. 1151 int numCaptures = requests.size(); 1152 mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1)); 1153 1154 } catch (CameraAccessException e) { 1155 throw new ItsException("Error configuring outputs", e); 1156 } 1157 1158 // Initiate the captures. 1159 long maxExpTimeNs = -1; 1160 for (int i = 0; i < requests.size(); i++) { 1161 CaptureRequest.Builder req = requests.get(i); 1162 // For DNG captures, need the LSC map to be available. 1163 if (mCaptureRawIsDng) { 1164 req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 1165 } 1166 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 1167 if (expTimeNs != null && expTimeNs > maxExpTimeNs) { 1168 maxExpTimeNs = expTimeNs; 1169 } 1170 1171 for (int j = 0; j < numSurfaces; j++) { 1172 req.addTarget(mOutputImageReaders[j].getSurface()); 1173 } 1174 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1175 } 1176 1177 long timeout = TIMEOUT_CALLBACK * 1000; 1178 if (maxExpTimeNs > 0) { 1179 timeout += maxExpTimeNs / 1000000; // ns to ms 1180 } 1181 // Make sure all callbacks have been hit (wait until captures are done). 1182 // If no timeouts are received after a timeout, then fail. 1183 int currentCount = mCountCallbacksRemaining.get(); 1184 while (currentCount > 0) { 1185 try { 1186 Thread.sleep(timeout); 1187 } catch (InterruptedException e) { 1188 throw new ItsException("Timeout failure", e); 1189 } 1190 int newCount = mCountCallbacksRemaining.get(); 1191 if (newCount == currentCount) { 1192 throw new ItsException( 1193 "No callback received within timeout"); 1194 } 1195 currentCount = newCount; 1196 } 1197 } catch (android.hardware.camera2.CameraAccessException e) { 1198 throw new ItsException("Access error: ", e); 1199 } 1200 } 1201 1202 /** 1203 * Perform reprocess captures. 1204 * 1205 * It takes captureRequests in a JSON object and perform capture requests in two steps: 1206 * regular capture request to get reprocess input and reprocess capture request to get 1207 * reprocess outputs. 1208 * 1209 * Regular capture requests: 1210 * 1. For each capture request in the JSON object, create a full-size capture request with 1211 * the settings in the JSON object. 1212 * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor 1213 * from the regular capture requests. (Those settings will be used for reprocess requests.) 1214 * 3. Submit the regular capture requests. 1215 * 1216 * Reprocess capture requests: 1217 * 4. Wait for the regular capture results and use them to create reprocess capture requests. 1218 * 5. Wait for the regular capture output images and queue them to the image writer. 1219 * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2. 1220 * 7. Submit the reprocess capture requests. 1221 * 1222 * The output images and results for the regular capture requests won't be written to socket. 1223 * The output images and results for the reprocess capture requests will be written to socket. 1224 */ 1225 private void doReprocessCapture(JSONObject params) throws ItsException { 1226 ImageWriter imageWriter = null; 1227 ArrayList<Integer> noiseReductionModes = new ArrayList<>(); 1228 ArrayList<Integer> edgeModes = new ArrayList<>(); 1229 ArrayList<Float> effectiveExposureFactors = new ArrayList<>(); 1230 1231 mCountRawOrDng.set(0); 1232 mCountJpg.set(0); 1233 mCountYuv.set(0); 1234 mCountRaw10.set(0); 1235 mCountRaw12.set(0); 1236 mCountCapRes.set(0); 1237 mCaptureRawIsDng = false; 1238 1239 try { 1240 // Parse the JSON to get the list of capture requests. 1241 List<CaptureRequest.Builder> inputRequests = 1242 ItsSerializer.deserializeRequestList(mCamera, params); 1243 1244 // Prepare the image readers for reprocess input and reprocess outputs. 1245 int inputFormat = getReprocessInputFormat(params); 1246 Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat); 1247 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1248 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat, 1249 inputRequests.size()); 1250 1251 // Prepare a reprocessable session. 1252 int numOutputSurfaces = mOutputImageReaders.length; 1253 InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(), 1254 inputSize.getHeight(), inputFormat); 1255 List<Surface> outputSurfaces = new ArrayList<Surface>(); 1256 boolean addSurfaceForInput = true; 1257 for (int i = 0; i < numOutputSurfaces; i++) { 1258 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1259 if (mOutputImageReaders[i] == mInputImageReader) { 1260 // If input and one of the outputs share the same image reader, avoid 1261 // adding the same surfaces twice. 1262 addSurfaceForInput = false; 1263 } 1264 } 1265 1266 if (addSurfaceForInput) { 1267 // Besides the output surfaces specified in JSON object, add an additional one 1268 // for reprocess input. 1269 outputSurfaces.add(mInputImageReader.getSurface()); 1270 } 1271 1272 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1273 mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener, 1274 mCameraHandler); 1275 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1276 1277 // Create an image writer for reprocess input. 1278 Surface inputSurface = mSession.getInputSurface(); 1279 imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size()); 1280 1281 // Set up input reader listener and capture callback listener to get 1282 // reprocess input buffers and the results in order to create reprocess capture 1283 // requests. 1284 ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter(); 1285 mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]); 1286 1287 CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter(); 1288 // Prepare the reprocess input request 1289 for (CaptureRequest.Builder inputReqest : inputRequests) { 1290 // Remember and clear noise reduction, edge enhancement, and effective exposure 1291 // factors. 1292 noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE)); 1293 edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE)); 1294 effectiveExposureFactors.add(inputReqest.get( 1295 CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)); 1296 1297 inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE, 1298 CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG); 1299 inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG); 1300 inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null); 1301 inputReqest.addTarget(mInputImageReader.getSurface()); 1302 mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler); 1303 } 1304 1305 // Wait for reprocess input images 1306 ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>(); 1307 for (int i = 0; i < inputRequests.size(); i++) { 1308 TotalCaptureResult result = 1309 captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000); 1310 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result)); 1311 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000)); 1312 } 1313 1314 // Start performing reprocess captures. 1315 1316 mCaptureResults = new CaptureResult[inputRequests.size()]; 1317 1318 // Prepare reprocess capture requests. 1319 for (int i = 0; i < numOutputSurfaces; i++) { 1320 ImageReader.OnImageAvailableListener outputReaderListener = 1321 createAvailableListener(mCaptureCallback); 1322 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener, 1323 mSaveHandlers[i]); 1324 } 1325 1326 // Initiate the captures. 1327 for (int i = 0; i < reprocessOutputRequests.size(); i++) { 1328 CaptureRequest.Builder req = reprocessOutputRequests.get(i); 1329 for (ImageReader outputImageReader : mOutputImageReaders) { 1330 req.addTarget(outputImageReader.getSurface()); 1331 } 1332 1333 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i)); 1334 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i)); 1335 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, 1336 effectiveExposureFactors.get(i)); 1337 1338 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1339 } 1340 1341 // Plan for how many callbacks need to be received throughout the duration of this 1342 // sequence of capture requests. There is one callback per image surface, and one 1343 // callback for the CaptureResult, for each capture. 1344 int numCaptures = reprocessOutputRequests.size(); 1345 mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1)); 1346 1347 // Make sure all callbacks have been hit (wait until captures are done). 1348 // If no timeouts are received after a timeout, then fail. 1349 int currentCount = mCountCallbacksRemaining.get(); 1350 while (currentCount > 0) { 1351 try { 1352 Thread.sleep(TIMEOUT_CALLBACK*1000); 1353 } catch (InterruptedException e) { 1354 throw new ItsException("Timeout failure", e); 1355 } 1356 int newCount = mCountCallbacksRemaining.get(); 1357 if (newCount == currentCount) { 1358 throw new ItsException( 1359 "No callback received within timeout"); 1360 } 1361 currentCount = newCount; 1362 } 1363 } catch (android.hardware.camera2.CameraAccessException e) { 1364 throw new ItsException("Access error: ", e); 1365 } finally { 1366 closeImageReaders(); 1367 if (mSession != null) { 1368 mSession.close(); 1369 mSession = null; 1370 } 1371 if (imageWriter != null) { 1372 imageWriter.close(); 1373 } 1374 } 1375 } 1376 1377 @Override 1378 public final void onSensorChanged(SensorEvent event) { 1379 synchronized(mEventLock) { 1380 if (mEventsEnabled) { 1381 MySensorEvent ev2 = new MySensorEvent(); 1382 ev2.sensor = event.sensor; 1383 ev2.accuracy = event.accuracy; 1384 ev2.timestamp = event.timestamp; 1385 ev2.values = new float[event.values.length]; 1386 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 1387 mEvents.add(ev2); 1388 } 1389 } 1390 } 1391 1392 @Override 1393 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 1394 } 1395 1396 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 1397 @Override 1398 public void onCaptureAvailable(Image capture) { 1399 try { 1400 int format = capture.getFormat(); 1401 if (format == ImageFormat.JPEG) { 1402 Logt.i(TAG, "Received JPEG capture"); 1403 byte[] img = ItsUtils.getDataFromImage(capture); 1404 ByteBuffer buf = ByteBuffer.wrap(img); 1405 int count = mCountJpg.getAndIncrement(); 1406 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf); 1407 } else if (format == ImageFormat.YUV_420_888) { 1408 Logt.i(TAG, "Received YUV capture"); 1409 byte[] img = ItsUtils.getDataFromImage(capture); 1410 ByteBuffer buf = ByteBuffer.wrap(img); 1411 int count = mCountYuv.getAndIncrement(); 1412 mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf); 1413 } else if (format == ImageFormat.RAW10) { 1414 Logt.i(TAG, "Received RAW10 capture"); 1415 byte[] img = ItsUtils.getDataFromImage(capture); 1416 ByteBuffer buf = ByteBuffer.wrap(img); 1417 int count = mCountRaw10.getAndIncrement(); 1418 mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf); 1419 } else if (format == ImageFormat.RAW12) { 1420 Logt.i(TAG, "Received RAW12 capture"); 1421 byte[] img = ItsUtils.getDataFromImage(capture); 1422 ByteBuffer buf = ByteBuffer.wrap(img); 1423 int count = mCountRaw12.getAndIncrement(); 1424 mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image", buf); 1425 } else if (format == ImageFormat.RAW_SENSOR) { 1426 Logt.i(TAG, "Received RAW16 capture"); 1427 int count = mCountRawOrDng.getAndIncrement(); 1428 if (! mCaptureRawIsDng) { 1429 byte[] img = ItsUtils.getDataFromImage(capture); 1430 ByteBuffer buf = ByteBuffer.wrap(img); 1431 mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf); 1432 } else { 1433 // Wait until the corresponding capture result is ready, up to a timeout. 1434 long t0 = android.os.SystemClock.elapsedRealtime(); 1435 while (! mThreadExitFlag 1436 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) { 1437 if (mCaptureResults[count] != null) { 1438 Logt.i(TAG, "Writing capture as DNG"); 1439 DngCreator dngCreator = new DngCreator( 1440 mCameraCharacteristics, mCaptureResults[count]); 1441 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 1442 dngCreator.writeImage(dngStream, capture); 1443 byte[] dngArray = dngStream.toByteArray(); 1444 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 1445 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 1446 break; 1447 } else { 1448 Thread.sleep(1); 1449 } 1450 } 1451 } 1452 } else { 1453 throw new ItsException("Unsupported image format: " + format); 1454 } 1455 mCountCallbacksRemaining.decrementAndGet(); 1456 } catch (IOException e) { 1457 Logt.e(TAG, "Script error: ", e); 1458 } catch (InterruptedException e) { 1459 Logt.e(TAG, "Script error: ", e); 1460 } catch (ItsException e) { 1461 Logt.e(TAG, "Script error: ", e); 1462 } 1463 } 1464 }; 1465 1466 private static float r2f(Rational r) { 1467 return (float)r.getNumerator() / (float)r.getDenominator(); 1468 } 1469 1470 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 1471 @Override 1472 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1473 long timestamp, long frameNumber) { 1474 } 1475 1476 @Override 1477 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1478 TotalCaptureResult result) { 1479 try { 1480 // Currently result has all 0 values. 1481 if (request == null || result == null) { 1482 throw new ItsException("Request/result is invalid"); 1483 } 1484 1485 StringBuilder logMsg = new StringBuilder(); 1486 logMsg.append(String.format( 1487 "Capt result: AE=%d, AF=%d, AWB=%d, ", 1488 result.get(CaptureResult.CONTROL_AE_STATE), 1489 result.get(CaptureResult.CONTROL_AF_STATE), 1490 result.get(CaptureResult.CONTROL_AWB_STATE))); 1491 int[] capabilities = mCameraCharacteristics.get( 1492 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1493 if (capabilities == null) { 1494 throw new ItsException("Failed to get capabilities"); 1495 } 1496 boolean readSensorSettings = false; 1497 for (int capability : capabilities) { 1498 if (capability == 1499 CameraCharacteristics. 1500 REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS) { 1501 readSensorSettings = true; 1502 break; 1503 } 1504 } 1505 if (readSensorSettings) { 1506 logMsg.append(String.format( 1507 "sens=%d, exp=%.1fms, dur=%.1fms, ", 1508 result.get(CaptureResult.SENSOR_SENSITIVITY), 1509 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f, 1510 result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1511 1000000.0f)); 1512 } 1513 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 1514 logMsg.append(String.format( 1515 "gains=[%.1f, %.1f, %.1f, %.1f], ", 1516 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1517 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1518 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1519 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 1520 } else { 1521 logMsg.append("gains=[], "); 1522 } 1523 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1524 logMsg.append(String.format( 1525 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 1526 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1527 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1528 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1529 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1530 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1531 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1532 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1533 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1534 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 1535 } else { 1536 logMsg.append("xform=[], "); 1537 } 1538 logMsg.append(String.format( 1539 "foc=%.1f", 1540 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 1541 Logt.i(TAG, logMsg.toString()); 1542 1543 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 1544 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1545 CaptureResult.CONTROL_AE_STATE_CONVERGED || 1546 result.get(CaptureResult.CONTROL_AE_STATE) == 1547 CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED || 1548 result.get(CaptureResult.CONTROL_AE_STATE) == 1549 CaptureResult.CONTROL_AE_STATE_LOCKED; 1550 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1551 CaptureResult.CONTROL_AE_STATE_LOCKED; 1552 } 1553 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 1554 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) == 1555 CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 1556 } 1557 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 1558 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1559 CaptureResult.CONTROL_AWB_STATE_CONVERGED || 1560 result.get(CaptureResult.CONTROL_AWB_STATE) == 1561 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1562 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1563 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1564 } 1565 1566 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) { 1567 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 1568 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 1569 mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d", 1570 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 1571 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 1572 )); 1573 } else { 1574 Logt.i(TAG, String.format( 1575 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b", 1576 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 1577 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 1578 } 1579 } 1580 1581 if (mConvergedAF) { 1582 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 1583 mSocketRunnableObj.sendResponse("afResult", String.format("%f", 1584 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 1585 )); 1586 } else { 1587 Logt.i(TAG, "AF converged but NULL focus distance values"); 1588 } 1589 } 1590 1591 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) { 1592 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 1593 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1594 mSocketRunnableObj.sendResponse("awbResult", String.format( 1595 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 1596 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1597 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1598 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1599 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(), 1600 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1601 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1602 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1603 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1604 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1605 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1606 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1607 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1608 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)) 1609 )); 1610 } else { 1611 Logt.i(TAG, String.format( 1612 "AWB converged but NULL color correction values, gains:%b, ccm:%b", 1613 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 1614 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null)); 1615 } 1616 } 1617 1618 if (mIssuedRequest3A) { 1619 mIssuedRequest3A = false; 1620 mInterlock3A.open(); 1621 } else { 1622 int count = mCountCapRes.getAndIncrement(); 1623 mCaptureResults[count] = result; 1624 mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics, 1625 request, result, mOutputImageReaders); 1626 mCountCallbacksRemaining.decrementAndGet(); 1627 } 1628 } catch (ItsException e) { 1629 Logt.e(TAG, "Script error: ", e); 1630 } catch (Exception e) { 1631 Logt.e(TAG, "Script error: ", e); 1632 } 1633 } 1634 1635 @Override 1636 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1637 CaptureFailure failure) { 1638 Logt.e(TAG, "Script error: capture failed"); 1639 } 1640 }; 1641 1642 private class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback { 1643 private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue = 1644 new LinkedBlockingQueue<>(); 1645 1646 @Override 1647 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1648 long timestamp, long frameNumber) { 1649 } 1650 1651 @Override 1652 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1653 TotalCaptureResult result) { 1654 try { 1655 mResultQueue.put(result); 1656 } catch (InterruptedException e) { 1657 throw new UnsupportedOperationException( 1658 "Can't handle InterruptedException in onImageAvailable"); 1659 } 1660 } 1661 1662 @Override 1663 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1664 CaptureFailure failure) { 1665 Logt.e(TAG, "Script error: capture failed"); 1666 } 1667 1668 public TotalCaptureResult getResult(long timeoutMs) throws ItsException { 1669 TotalCaptureResult result; 1670 try { 1671 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1672 } catch (InterruptedException e) { 1673 throw new ItsException(e); 1674 } 1675 1676 if (result == null) { 1677 throw new ItsException("Getting an image timed out after " + timeoutMs + 1678 "ms"); 1679 } 1680 1681 return result; 1682 } 1683 } 1684 1685 private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener { 1686 private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>(); 1687 1688 @Override 1689 public void onImageAvailable(ImageReader reader) { 1690 try { 1691 mImageQueue.put(reader.acquireNextImage()); 1692 } catch (InterruptedException e) { 1693 throw new UnsupportedOperationException( 1694 "Can't handle InterruptedException in onImageAvailable"); 1695 } 1696 } 1697 1698 public Image getImage(long timeoutMs) throws ItsException { 1699 Image image; 1700 try { 1701 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1702 } catch (InterruptedException e) { 1703 throw new ItsException(e); 1704 } 1705 1706 if (image == null) { 1707 throw new ItsException("Getting an image timed out after " + timeoutMs + 1708 "ms"); 1709 } 1710 return image; 1711 } 1712 } 1713 1714 private int getReprocessInputFormat(JSONObject params) throws ItsException { 1715 String reprocessFormat; 1716 try { 1717 reprocessFormat = params.getString("reprocessFormat"); 1718 } catch (org.json.JSONException e) { 1719 throw new ItsException("Error parsing reprocess format: " + e); 1720 } 1721 1722 if (reprocessFormat.equals("yuv")) { 1723 return ImageFormat.YUV_420_888; 1724 } else if (reprocessFormat.equals("private")) { 1725 return ImageFormat.PRIVATE; 1726 } 1727 1728 throw new ItsException("Uknown reprocess format: " + reprocessFormat); 1729 } 1730 } 1731