1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import android.app.Notification; 20 import android.app.NotificationChannel; 21 import android.app.NotificationManager; 22 import android.app.Service; 23 import android.content.Context; 24 import android.content.Intent; 25 import android.graphics.ImageFormat; 26 import android.hardware.camera2.CameraCaptureSession; 27 import android.hardware.camera2.CameraAccessException; 28 import android.hardware.camera2.CameraCharacteristics; 29 import android.hardware.camera2.CameraDevice; 30 import android.hardware.camera2.CameraManager; 31 import android.hardware.camera2.CaptureFailure; 32 import android.hardware.camera2.CaptureRequest; 33 import android.hardware.camera2.CaptureResult; 34 import android.hardware.camera2.DngCreator; 35 import android.hardware.camera2.TotalCaptureResult; 36 import android.hardware.camera2.params.InputConfiguration; 37 import android.hardware.camera2.params.MeteringRectangle; 38 import android.hardware.camera2.params.OutputConfiguration; 39 import android.hardware.Sensor; 40 import android.hardware.SensorEvent; 41 import android.hardware.SensorEventListener; 42 import android.hardware.SensorManager; 43 import android.media.Image; 44 import android.media.ImageReader; 45 import android.media.ImageWriter; 46 import android.media.Image.Plane; 47 import android.net.Uri; 48 import android.os.ConditionVariable; 49 import android.os.Handler; 50 import android.os.HandlerThread; 51 import android.os.IBinder; 52 import android.os.Message; 53 import android.os.SystemClock; 54 import android.os.Vibrator; 55 import android.util.Log; 56 import android.util.Rational; 57 import android.util.Size; 58 import android.util.SparseArray; 59 import android.view.Surface; 60 61 import com.android.ex.camera2.blocking.BlockingCameraManager; 62 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 63 import com.android.ex.camera2.blocking.BlockingStateCallback; 64 import com.android.ex.camera2.blocking.BlockingSessionCallback; 65 66 import com.android.cts.verifier.camera.its.StatsImage; 67 import com.android.cts.verifier.R; 68 69 import org.json.JSONArray; 70 import org.json.JSONObject; 71 72 import java.io.BufferedReader; 73 import java.io.BufferedWriter; 74 import java.io.ByteArrayOutputStream; 75 import java.io.IOException; 76 import java.io.InputStreamReader; 77 import java.io.OutputStreamWriter; 78 import java.io.PrintWriter; 79 import java.math.BigInteger; 80 import java.net.ServerSocket; 81 import java.net.Socket; 82 import java.nio.ByteBuffer; 83 import java.nio.ByteOrder; 84 import java.nio.FloatBuffer; 85 import java.nio.charset.Charset; 86 import java.security.MessageDigest; 87 import java.util.ArrayList; 88 import java.util.Arrays; 89 import java.util.HashMap; 90 import java.util.LinkedList; 91 import java.util.List; 92 import java.util.Map; 93 import java.util.concurrent.BlockingQueue; 94 import java.util.concurrent.CountDownLatch; 95 import java.util.concurrent.LinkedBlockingDeque; 96 import java.util.concurrent.LinkedBlockingQueue; 97 import java.util.concurrent.Semaphore; 98 import java.util.concurrent.TimeUnit; 99 import java.util.concurrent.atomic.AtomicInteger; 100 101 public class ItsService extends Service implements SensorEventListener { 102 public static final String TAG = ItsService.class.getSimpleName(); 103 104 private final int SERVICE_NOTIFICATION_ID = 37; // random int that is unique within app 105 private NotificationChannel mChannel; 106 107 // Timeouts, in seconds. 108 private static final int TIMEOUT_CALLBACK = 20; 109 private static final int TIMEOUT_3A = 10; 110 111 // Time given for background requests to warm up pipeline 112 private static final long PIPELINE_WARMUP_TIME_MS = 2000; 113 114 // State transition timeouts, in ms. 115 private static final long TIMEOUT_IDLE_MS = 2000; 116 private static final long TIMEOUT_STATE_MS = 500; 117 private static final long TIMEOUT_SESSION_CLOSE = 3000; 118 119 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 120 private static final long TIMEOUT_CAP_RES = 2000; 121 122 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 123 124 // Supports at most RAW+YUV+JPEG, one surface each, plus optional background stream 125 private static final int MAX_NUM_OUTPUT_SURFACES = 4; 126 127 public static final int SERVERPORT = 6000; 128 129 public static final String REGION_KEY = "regions"; 130 public static final String REGION_AE_KEY = "ae"; 131 public static final String REGION_AWB_KEY = "awb"; 132 public static final String REGION_AF_KEY = "af"; 133 public static final String LOCK_AE_KEY = "aeLock"; 134 public static final String LOCK_AWB_KEY = "awbLock"; 135 public static final String TRIGGER_KEY = "triggers"; 136 public static final String TRIGGER_AE_KEY = "ae"; 137 public static final String TRIGGER_AF_KEY = "af"; 138 public static final String VIB_PATTERN_KEY = "pattern"; 139 public static final String EVCOMP_KEY = "evComp"; 140 141 private CameraManager mCameraManager = null; 142 private HandlerThread mCameraThread = null; 143 private Handler mCameraHandler = null; 144 private BlockingCameraManager mBlockingCameraManager = null; 145 private BlockingStateCallback mCameraListener = null; 146 private CameraDevice mCamera = null; 147 private CameraCaptureSession mSession = null; 148 private ImageReader[] mOutputImageReaders = null; 149 private SparseArray<String> mPhysicalStreamMap = new SparseArray<String>(); 150 private ImageReader mInputImageReader = null; 151 private CameraCharacteristics mCameraCharacteristics = null; 152 153 private Vibrator mVibrator = null; 154 155 private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 156 private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES]; 157 private HandlerThread mResultThread = null; 158 private Handler mResultHandler = null; 159 160 private volatile boolean mThreadExitFlag = false; 161 162 private volatile ServerSocket mSocket = null; 163 private volatile SocketRunnable mSocketRunnableObj = null; 164 private Semaphore mSocketQueueQuota = null; 165 private int mMemoryQuota = -1; 166 private LinkedList<Integer> mInflightImageSizes = new LinkedList<>(); 167 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = 168 new LinkedBlockingDeque<ByteBuffer>(); 169 private final Object mSocketWriteEnqueueLock = new Object(); 170 private final Object mSocketWriteDrainLock = new Object(); 171 172 private volatile BlockingQueue<Object[]> mSerializerQueue = 173 new LinkedBlockingDeque<Object[]>(); 174 175 private AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 176 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 177 private AtomicInteger mCountRaw10 = new AtomicInteger(); 178 private AtomicInteger mCountRaw12 = new AtomicInteger(); 179 private AtomicInteger mCountJpg = new AtomicInteger(); 180 private AtomicInteger mCountYuv = new AtomicInteger(); 181 private AtomicInteger mCountCapRes = new AtomicInteger(); 182 private boolean mCaptureRawIsDng; 183 private boolean mCaptureRawIsStats; 184 private int mCaptureStatsGridWidth; 185 private int mCaptureStatsGridHeight; 186 private CaptureResult mCaptureResults[] = null; 187 188 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 189 private volatile boolean mIssuedRequest3A = false; 190 private volatile boolean mConvergedAE = false; 191 private volatile boolean mConvergedAF = false; 192 private volatile boolean mConvergedAWB = false; 193 private volatile boolean mLockedAE = false; 194 private volatile boolean mLockedAWB = false; 195 private volatile boolean mNeedsLockedAE = false; 196 private volatile boolean mNeedsLockedAWB = false; 197 198 class MySensorEvent { 199 public Sensor sensor; 200 public int accuracy; 201 public long timestamp; 202 public float values[]; 203 } 204 205 // For capturing motion sensor traces. 206 private SensorManager mSensorManager = null; 207 private Sensor mAccelSensor = null; 208 private Sensor mMagSensor = null; 209 private Sensor mGyroSensor = null; 210 private volatile LinkedList<MySensorEvent> mEvents = null; 211 private volatile Object mEventLock = new Object(); 212 private volatile boolean mEventsEnabled = false; 213 private HandlerThread mSensorThread = null; 214 private Handler mSensorHandler = null; 215 216 private static final int SERIALIZER_SURFACES_ID = 2; 217 private static final int SERIALIZER_PHYSICAL_METADATA_ID = 3; 218 219 public interface CaptureCallback { onCaptureAvailable(Image capture, String physicalCameraId)220 void onCaptureAvailable(Image capture, String physicalCameraId); 221 } 222 223 public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {} 224 225 @Override onBind(Intent intent)226 public IBinder onBind(Intent intent) { 227 return null; 228 } 229 230 @Override onCreate()231 public void onCreate() { 232 try { 233 mThreadExitFlag = false; 234 235 // Get handle to camera manager. 236 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 237 if (mCameraManager == null) { 238 throw new ItsException("Failed to connect to camera manager"); 239 } 240 mBlockingCameraManager = new BlockingCameraManager(mCameraManager); 241 mCameraListener = new BlockingStateCallback(); 242 243 // Register for motion events. 244 mEvents = new LinkedList<MySensorEvent>(); 245 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 246 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 247 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 248 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 249 mSensorThread = new HandlerThread("SensorThread"); 250 mSensorThread.start(); 251 mSensorHandler = new Handler(mSensorThread.getLooper()); 252 mSensorManager.registerListener(this, mAccelSensor, 253 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 254 mSensorManager.registerListener(this, mMagSensor, 255 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 256 mSensorManager.registerListener(this, mGyroSensor, 257 /*200hz*/5000, mSensorHandler); 258 259 // Get a handle to the system vibrator. 260 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 261 262 // Create threads to receive images and save them. 263 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 264 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 265 mSaveThreads[i].start(); 266 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 267 } 268 269 // Create a thread to handle object serialization. 270 (new Thread(new SerializerRunnable())).start();; 271 272 // Create a thread to receive capture results and process them. 273 mResultThread = new HandlerThread("ResultThread"); 274 mResultThread.start(); 275 mResultHandler = new Handler(mResultThread.getLooper()); 276 277 // Create a thread for the camera device. 278 mCameraThread = new HandlerThread("ItsCameraThread"); 279 mCameraThread.start(); 280 mCameraHandler = new Handler(mCameraThread.getLooper()); 281 282 // Create a thread to process commands, listening on a TCP socket. 283 mSocketRunnableObj = new SocketRunnable(); 284 (new Thread(mSocketRunnableObj)).start(); 285 } catch (ItsException e) { 286 Logt.e(TAG, "Service failed to start: ", e); 287 } 288 289 NotificationManager notificationManager = 290 (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); 291 mChannel = new NotificationChannel( 292 "ItsServiceChannel", "ItsService", NotificationManager.IMPORTANCE_LOW); 293 // Configure the notification channel. 294 mChannel.setDescription("ItsServiceChannel"); 295 mChannel.enableVibration(false); 296 notificationManager.createNotificationChannel(mChannel); 297 } 298 299 @Override onStartCommand(Intent intent, int flags, int startId)300 public int onStartCommand(Intent intent, int flags, int startId) { 301 try { 302 // Just log a message indicating that the service is running and is able to accept 303 // socket connections. 304 while (!mThreadExitFlag && mSocket==null) { 305 Thread.sleep(1); 306 } 307 if (!mThreadExitFlag){ 308 Logt.i(TAG, "ItsService ready"); 309 } else { 310 Logt.e(TAG, "Starting ItsService in bad state"); 311 } 312 313 Notification notification = new Notification.Builder(this, mChannel.getId()) 314 .setContentTitle("CameraITS Service") 315 .setContentText("CameraITS Service is running") 316 .setSmallIcon(R.drawable.icon) 317 .setOngoing(true).build(); 318 startForeground(SERVICE_NOTIFICATION_ID, notification); 319 } catch (java.lang.InterruptedException e) { 320 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 321 } 322 return START_STICKY; 323 } 324 325 @Override onDestroy()326 public void onDestroy() { 327 mThreadExitFlag = true; 328 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 329 if (mSaveThreads[i] != null) { 330 mSaveThreads[i].quit(); 331 mSaveThreads[i] = null; 332 } 333 } 334 if (mSensorThread != null) { 335 mSensorThread.quitSafely(); 336 mSensorThread = null; 337 } 338 if (mResultThread != null) { 339 mResultThread.quitSafely(); 340 mResultThread = null; 341 } 342 if (mCameraThread != null) { 343 mCameraThread.quitSafely(); 344 mCameraThread = null; 345 } 346 } 347 openCameraDevice(int cameraId)348 public void openCameraDevice(int cameraId) throws ItsException { 349 Logt.i(TAG, String.format("Opening camera %d", cameraId)); 350 351 String[] devices; 352 try { 353 devices = mCameraManager.getCameraIdList(); 354 if (devices == null || devices.length == 0) { 355 throw new ItsException("No camera devices"); 356 } 357 if (mMemoryQuota == -1) { 358 // Initialize memory quota on this device 359 for (String camId : devices) { 360 CameraCharacteristics chars = mCameraManager.getCameraCharacteristics(camId); 361 Size maxYuvSize = ItsUtils.getYuvOutputSizes(chars)[0]; 362 // 4 bytes per pixel for RGBA8888 Bitmap and at least 3 Bitmaps per CDD 363 int quota = maxYuvSize.getWidth() * maxYuvSize.getHeight() * 4 * 3; 364 if (quota > mMemoryQuota) { 365 mMemoryQuota = quota; 366 } 367 } 368 } 369 } catch (CameraAccessException e) { 370 throw new ItsException("Failed to get device ID list", e); 371 } 372 373 try { 374 mCamera = mBlockingCameraManager.openCamera(devices[cameraId], 375 mCameraListener, mCameraHandler); 376 mCameraCharacteristics = mCameraManager.getCameraCharacteristics( 377 devices[cameraId]); 378 mSocketQueueQuota = new Semaphore(mMemoryQuota, true); 379 } catch (CameraAccessException e) { 380 throw new ItsException("Failed to open camera", e); 381 } catch (BlockingOpenException e) { 382 throw new ItsException("Failed to open camera (after blocking)", e); 383 } 384 mSocketRunnableObj.sendResponse("cameraOpened", ""); 385 } 386 closeCameraDevice()387 public void closeCameraDevice() throws ItsException { 388 try { 389 if (mCamera != null) { 390 Logt.i(TAG, "Closing camera"); 391 mCamera.close(); 392 mCamera = null; 393 } 394 } catch (Exception e) { 395 throw new ItsException("Failed to close device"); 396 } 397 mSocketRunnableObj.sendResponse("cameraClosed", ""); 398 } 399 400 class SerializerRunnable implements Runnable { 401 // Use a separate thread to perform JSON serialization (since this can be slow due to 402 // the reflection). 403 @Override run()404 public void run() { 405 Logt.i(TAG, "Serializer thread starting"); 406 while (! mThreadExitFlag) { 407 try { 408 Object objs[] = mSerializerQueue.take(); 409 JSONObject jsonObj = new JSONObject(); 410 String tag = null; 411 for (int i = 0; i < objs.length; i++) { 412 Object obj = objs[i]; 413 if (obj instanceof String) { 414 if (tag != null) { 415 throw new ItsException("Multiple tags for socket response"); 416 } 417 tag = (String)obj; 418 } else if (obj instanceof CameraCharacteristics) { 419 jsonObj.put("cameraProperties", ItsSerializer.serialize( 420 (CameraCharacteristics)obj)); 421 } else if (obj instanceof CaptureRequest) { 422 jsonObj.put("captureRequest", ItsSerializer.serialize( 423 (CaptureRequest)obj)); 424 } else if (obj instanceof CaptureResult) { 425 jsonObj.put("captureResult", ItsSerializer.serialize( 426 (CaptureResult)obj)); 427 } else if (obj instanceof JSONArray) { 428 if (tag == "captureResults") { 429 if (i == SERIALIZER_SURFACES_ID) { 430 jsonObj.put("outputs", (JSONArray)obj); 431 } else if (i == SERIALIZER_PHYSICAL_METADATA_ID) { 432 jsonObj.put("physicalResults", (JSONArray)obj); 433 } else { 434 throw new ItsException( 435 "Unsupported JSONArray for captureResults"); 436 } 437 } else { 438 jsonObj.put("outputs", (JSONArray)obj); 439 } 440 } else { 441 throw new ItsException("Invalid object received for serialization"); 442 } 443 } 444 if (tag == null) { 445 throw new ItsException("No tag provided for socket response"); 446 } 447 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 448 Logt.i(TAG, String.format("Serialized %s", tag)); 449 } catch (org.json.JSONException e) { 450 Logt.e(TAG, "Error serializing object", e); 451 break; 452 } catch (ItsException e) { 453 Logt.e(TAG, "Error serializing object", e); 454 break; 455 } catch (java.lang.InterruptedException e) { 456 Logt.e(TAG, "Error serializing object (interrupted)", e); 457 break; 458 } 459 } 460 Logt.i(TAG, "Serializer thread terminated"); 461 } 462 } 463 464 class SocketWriteRunnable implements Runnable { 465 466 // Use a separate thread to service a queue of objects to be written to the socket, 467 // writing each sequentially in order. This is needed since different handler functions 468 // (called on different threads) will need to send data back to the host script. 469 470 public Socket mOpenSocket = null; 471 private Thread mThread = null; 472 SocketWriteRunnable(Socket openSocket)473 public SocketWriteRunnable(Socket openSocket) { 474 mOpenSocket = openSocket; 475 } 476 setOpenSocket(Socket openSocket)477 public void setOpenSocket(Socket openSocket) { 478 mOpenSocket = openSocket; 479 } 480 481 @Override run()482 public void run() { 483 Logt.i(TAG, "Socket writer thread starting"); 484 while (true) { 485 try { 486 ByteBuffer b = mSocketWriteQueue.take(); 487 synchronized(mSocketWriteDrainLock) { 488 if (mOpenSocket == null) { 489 Logt.e(TAG, "No open socket connection!"); 490 continue; 491 } 492 if (b.hasArray()) { 493 mOpenSocket.getOutputStream().write(b.array(), 0, b.capacity()); 494 } else { 495 byte[] barray = new byte[b.capacity()]; 496 b.get(barray); 497 mOpenSocket.getOutputStream().write(barray); 498 } 499 mOpenSocket.getOutputStream().flush(); 500 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 501 Integer imgBufSize = mInflightImageSizes.peek(); 502 if (imgBufSize != null && imgBufSize == b.capacity()) { 503 mInflightImageSizes.removeFirst(); 504 if (mSocketQueueQuota != null) { 505 mSocketQueueQuota.release(imgBufSize); 506 } 507 } 508 } 509 } catch (IOException e) { 510 Logt.e(TAG, "Error writing to socket", e); 511 mOpenSocket = null; 512 break; 513 } catch (java.lang.InterruptedException e) { 514 Logt.e(TAG, "Error writing to socket (interrupted)", e); 515 mOpenSocket = null; 516 break; 517 } 518 } 519 Logt.i(TAG, "Socket writer thread terminated"); 520 } 521 checkAndStartThread()522 public synchronized void checkAndStartThread() { 523 if (mThread == null || mThread.getState() == Thread.State.TERMINATED) { 524 mThread = new Thread(this); 525 } 526 if (mThread.getState() == Thread.State.NEW) { 527 mThread.start(); 528 } 529 } 530 531 } 532 533 class SocketRunnable implements Runnable { 534 535 // Format of sent messages (over the socket): 536 // * Serialized JSON object on a single line (newline-terminated) 537 // * For byte buffers, the binary data then follows 538 // 539 // Format of received messages (from the socket): 540 // * Serialized JSON object on a single line (newline-terminated) 541 542 private Socket mOpenSocket = null; 543 private SocketWriteRunnable mSocketWriteRunnable = null; 544 545 @Override run()546 public void run() { 547 Logt.i(TAG, "Socket thread starting"); 548 try { 549 mSocket = new ServerSocket(SERVERPORT); 550 } catch (IOException e) { 551 Logt.e(TAG, "Failed to create socket", e); 552 } 553 554 // Create a new thread to handle writes to this socket. 555 mSocketWriteRunnable = new SocketWriteRunnable(null); 556 557 while (!mThreadExitFlag) { 558 // Receive the socket-open request from the host. 559 try { 560 Logt.i(TAG, "Waiting for client to connect to socket"); 561 mOpenSocket = mSocket.accept(); 562 if (mOpenSocket == null) { 563 Logt.e(TAG, "Socket connection error"); 564 break; 565 } 566 mSocketWriteQueue.clear(); 567 mInflightImageSizes.clear(); 568 mSocketWriteRunnable.setOpenSocket(mOpenSocket); 569 mSocketWriteRunnable.checkAndStartThread(); 570 Logt.i(TAG, "Socket connected"); 571 } catch (IOException e) { 572 Logt.e(TAG, "Socket open error: ", e); 573 break; 574 } 575 576 // Process commands over the open socket. 577 while (!mThreadExitFlag) { 578 try { 579 BufferedReader input = new BufferedReader( 580 new InputStreamReader(mOpenSocket.getInputStream())); 581 if (input == null) { 582 Logt.e(TAG, "Failed to get socket input stream"); 583 break; 584 } 585 String line = input.readLine(); 586 if (line == null) { 587 Logt.i(TAG, "Socket readline retuned null (host disconnected)"); 588 break; 589 } 590 processSocketCommand(line); 591 } catch (IOException e) { 592 Logt.e(TAG, "Socket read error: ", e); 593 break; 594 } catch (ItsException e) { 595 Logt.e(TAG, "Script error: ", e); 596 break; 597 } 598 } 599 600 // Close socket and go back to waiting for a new connection. 601 try { 602 synchronized(mSocketWriteDrainLock) { 603 mSocketWriteQueue.clear(); 604 mInflightImageSizes.clear(); 605 mOpenSocket.close(); 606 mOpenSocket = null; 607 mSocketWriteRunnable.setOpenSocket(null); 608 Logt.i(TAG, "Socket disconnected"); 609 } 610 } catch (java.io.IOException e) { 611 Logt.e(TAG, "Exception closing socket"); 612 } 613 } 614 615 // It's an overall error state if the code gets here; no recevery. 616 // Try to do some cleanup, but the service probably needs to be restarted. 617 Logt.i(TAG, "Socket server loop exited"); 618 mThreadExitFlag = true; 619 try { 620 synchronized(mSocketWriteDrainLock) { 621 if (mOpenSocket != null) { 622 mOpenSocket.close(); 623 mOpenSocket = null; 624 mSocketWriteRunnable.setOpenSocket(null); 625 } 626 } 627 } catch (java.io.IOException e) { 628 Logt.w(TAG, "Exception closing socket"); 629 } 630 try { 631 if (mSocket != null) { 632 mSocket.close(); 633 mSocket = null; 634 } 635 } catch (java.io.IOException e) { 636 Logt.w(TAG, "Exception closing socket"); 637 } 638 } 639 processSocketCommand(String cmd)640 public void processSocketCommand(String cmd) 641 throws ItsException { 642 // Each command is a serialized JSON object. 643 try { 644 JSONObject cmdObj = new JSONObject(cmd); 645 Logt.i(TAG, "Start processing command" + cmdObj.getString("cmdName")); 646 if ("open".equals(cmdObj.getString("cmdName"))) { 647 int cameraId = cmdObj.getInt("cameraId"); 648 openCameraDevice(cameraId); 649 } else if ("close".equals(cmdObj.getString("cmdName"))) { 650 closeCameraDevice(); 651 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 652 doGetProps(); 653 } else if ("getCameraPropertiesById".equals(cmdObj.getString("cmdName"))) { 654 doGetPropsById(cmdObj); 655 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 656 doStartSensorEvents(); 657 } else if ("checkSensorExistence".equals(cmdObj.getString("cmdName"))) { 658 doCheckSensorExistence(); 659 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 660 doGetSensorEvents(); 661 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 662 do3A(cmdObj); 663 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 664 doCapture(cmdObj); 665 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 666 doVibrate(cmdObj); 667 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) { 668 doGetCameraIds(); 669 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) { 670 doReprocessCapture(cmdObj); 671 } else { 672 throw new ItsException("Unknown command: " + cmd); 673 } 674 Logt.i(TAG, "Finish processing command" + cmdObj.getString("cmdName")); 675 } catch (org.json.JSONException e) { 676 Logt.e(TAG, "Invalid command: ", e); 677 } 678 } 679 sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)680 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 681 throws ItsException { 682 try { 683 JSONObject jsonObj = new JSONObject(); 684 jsonObj.put("tag", tag); 685 if (str != null) { 686 jsonObj.put("strValue", str); 687 } 688 if (obj != null) { 689 jsonObj.put("objValue", obj); 690 } 691 if (bbuf != null) { 692 jsonObj.put("bufValueSize", bbuf.capacity()); 693 } 694 ByteBuffer bstr = ByteBuffer.wrap( 695 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 696 synchronized(mSocketWriteEnqueueLock) { 697 if (bstr != null) { 698 mSocketWriteQueue.put(bstr); 699 } 700 if (bbuf != null) { 701 mInflightImageSizes.add(bbuf.capacity()); 702 mSocketWriteQueue.put(bbuf); 703 } 704 } 705 } catch (org.json.JSONException e) { 706 throw new ItsException("JSON error: ", e); 707 } catch (java.lang.InterruptedException e) { 708 throw new ItsException("Socket error: ", e); 709 } 710 } 711 sendResponse(String tag, String str)712 public void sendResponse(String tag, String str) 713 throws ItsException { 714 sendResponse(tag, str, null, null); 715 } 716 sendResponse(String tag, JSONObject obj)717 public void sendResponse(String tag, JSONObject obj) 718 throws ItsException { 719 sendResponse(tag, null, obj, null); 720 } 721 sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)722 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 723 throws ItsException { 724 sendResponse(tag, null, null, bbuf); 725 } 726 sendResponse(LinkedList<MySensorEvent> events)727 public void sendResponse(LinkedList<MySensorEvent> events) 728 throws ItsException { 729 Logt.i(TAG, "Sending " + events.size() + " sensor events"); 730 try { 731 JSONArray accels = new JSONArray(); 732 JSONArray mags = new JSONArray(); 733 JSONArray gyros = new JSONArray(); 734 for (MySensorEvent event : events) { 735 JSONObject obj = new JSONObject(); 736 obj.put("time", event.timestamp); 737 obj.put("x", event.values[0]); 738 obj.put("y", event.values[1]); 739 obj.put("z", event.values[2]); 740 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 741 accels.put(obj); 742 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 743 mags.put(obj); 744 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 745 gyros.put(obj); 746 } 747 } 748 JSONObject obj = new JSONObject(); 749 obj.put("accel", accels); 750 obj.put("mag", mags); 751 obj.put("gyro", gyros); 752 sendResponse("sensorEvents", null, obj, null); 753 } catch (org.json.JSONException e) { 754 throw new ItsException("JSON error: ", e); 755 } 756 Logt.i(TAG, "Sent sensor events"); 757 } 758 sendResponse(CameraCharacteristics props)759 public void sendResponse(CameraCharacteristics props) 760 throws ItsException { 761 try { 762 Object objs[] = new Object[2]; 763 objs[0] = "cameraProperties"; 764 objs[1] = props; 765 mSerializerQueue.put(objs); 766 } catch (InterruptedException e) { 767 throw new ItsException("Interrupted: ", e); 768 } 769 } 770 sendResponseCaptureResult(CameraCharacteristics props, CaptureRequest request, TotalCaptureResult result, ImageReader[] readers)771 public void sendResponseCaptureResult(CameraCharacteristics props, 772 CaptureRequest request, 773 TotalCaptureResult result, 774 ImageReader[] readers) 775 throws ItsException { 776 try { 777 JSONArray jsonSurfaces = new JSONArray(); 778 for (int i = 0; i < readers.length; i++) { 779 JSONObject jsonSurface = new JSONObject(); 780 jsonSurface.put("width", readers[i].getWidth()); 781 jsonSurface.put("height", readers[i].getHeight()); 782 int format = readers[i].getImageFormat(); 783 if (format == ImageFormat.RAW_SENSOR) { 784 if (mCaptureRawIsStats) { 785 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 786 .width(); 787 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 788 .height(); 789 jsonSurface.put("format", "rawStats"); 790 jsonSurface.put("width", aaw/mCaptureStatsGridWidth); 791 jsonSurface.put("height", aah/mCaptureStatsGridHeight); 792 } else if (mCaptureRawIsDng) { 793 jsonSurface.put("format", "dng"); 794 } else { 795 jsonSurface.put("format", "raw"); 796 } 797 } else if (format == ImageFormat.RAW10) { 798 jsonSurface.put("format", "raw10"); 799 } else if (format == ImageFormat.RAW12) { 800 jsonSurface.put("format", "raw12"); 801 } else if (format == ImageFormat.JPEG) { 802 jsonSurface.put("format", "jpeg"); 803 } else if (format == ImageFormat.YUV_420_888) { 804 jsonSurface.put("format", "yuv"); 805 } else { 806 throw new ItsException("Invalid format"); 807 } 808 jsonSurfaces.put(jsonSurface); 809 } 810 811 Map<String, CaptureResult> physicalMetadata = 812 result.getPhysicalCameraResults(); 813 JSONArray jsonPhysicalMetadata = new JSONArray(); 814 for (Map.Entry<String, CaptureResult> pair : physicalMetadata.entrySet()) { 815 JSONObject jsonOneMetadata = new JSONObject(); 816 jsonOneMetadata.put(pair.getKey(), ItsSerializer.serialize(pair.getValue())); 817 jsonPhysicalMetadata.put(jsonOneMetadata); 818 } 819 Object objs[] = new Object[4]; 820 objs[0] = "captureResults"; 821 objs[1] = result; 822 objs[SERIALIZER_SURFACES_ID] = jsonSurfaces; 823 objs[SERIALIZER_PHYSICAL_METADATA_ID] = jsonPhysicalMetadata; 824 mSerializerQueue.put(objs); 825 } catch (org.json.JSONException e) { 826 throw new ItsException("JSON error: ", e); 827 } catch (InterruptedException e) { 828 throw new ItsException("Interrupted: ", e); 829 } 830 } 831 } 832 833 public ImageReader.OnImageAvailableListener createAvailableListener(final CaptureCallback listener)834 createAvailableListener(final CaptureCallback listener) { 835 return new ImageReader.OnImageAvailableListener() { 836 @Override 837 public void onImageAvailable(ImageReader reader) { 838 Image i = null; 839 try { 840 i = reader.acquireNextImage(); 841 String physicalCameraId = new String(); 842 for (int idx = 0; idx < mOutputImageReaders.length; idx++) { 843 if (mOutputImageReaders[idx] == reader) { 844 physicalCameraId = mPhysicalStreamMap.get(idx); 845 } 846 } 847 listener.onCaptureAvailable(i, physicalCameraId); 848 } finally { 849 if (i != null) { 850 i.close(); 851 } 852 } 853 } 854 }; 855 } 856 857 private ImageReader.OnImageAvailableListener 858 createAvailableListenerDropper() { 859 return new ImageReader.OnImageAvailableListener() { 860 @Override 861 public void onImageAvailable(ImageReader reader) { 862 Image i = reader.acquireNextImage(); 863 i.close(); 864 } 865 }; 866 } 867 868 private void doStartSensorEvents() throws ItsException { 869 synchronized(mEventLock) { 870 mEventsEnabled = true; 871 } 872 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 873 } 874 875 private void doCheckSensorExistence() throws ItsException { 876 try { 877 JSONObject obj = new JSONObject(); 878 obj.put("accel", mAccelSensor != null); 879 obj.put("mag", mMagSensor != null); 880 obj.put("gyro", mGyroSensor != null); 881 mSocketRunnableObj.sendResponse("sensorExistence", null, obj, null); 882 } catch (org.json.JSONException e) { 883 throw new ItsException("JSON error: ", e); 884 } 885 } 886 887 private void doGetSensorEvents() throws ItsException { 888 synchronized(mEventLock) { 889 mSocketRunnableObj.sendResponse(mEvents); 890 mEvents.clear(); 891 mEventsEnabled = false; 892 } 893 } 894 895 private void doGetProps() throws ItsException { 896 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 897 } 898 899 private void doGetPropsById(JSONObject params) throws ItsException { 900 String[] devices; 901 try { 902 devices = mCameraManager.getCameraIdList(); 903 if (devices == null || devices.length == 0) { 904 throw new ItsException("No camera devices"); 905 } 906 } catch (CameraAccessException e) { 907 throw new ItsException("Failed to get device ID list", e); 908 } 909 910 try { 911 String cameraId = params.getString("cameraId"); 912 if (Arrays.asList(devices).contains(cameraId)) { 913 CameraCharacteristics characteristics = 914 mCameraManager.getCameraCharacteristics(cameraId); 915 mSocketRunnableObj.sendResponse(characteristics); 916 } else { 917 Log.e(TAG, "Invalid camera ID: " + cameraId); 918 throw new ItsException("Invalid cameraId:" + cameraId); 919 } 920 } catch (org.json.JSONException e) { 921 throw new ItsException("JSON error: ", e); 922 } catch (CameraAccessException e) { 923 throw new ItsException("Access error: ", e); 924 } 925 } 926 927 private void doGetCameraIds() throws ItsException { 928 String[] devices; 929 try { 930 devices = mCameraManager.getCameraIdList(); 931 if (devices == null || devices.length == 0) { 932 throw new ItsException("No camera devices"); 933 } 934 } catch (CameraAccessException e) { 935 throw new ItsException("Failed to get device ID list", e); 936 } 937 938 try { 939 JSONObject obj = new JSONObject(); 940 JSONArray array = new JSONArray(); 941 for (String id : devices) { 942 CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id); 943 // Only supply camera Id for non-legacy cameras since legacy camera does not 944 // support ITS 945 if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) != 946 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 947 array.put(id); 948 } 949 } 950 obj.put("cameraIdArray", array); 951 mSocketRunnableObj.sendResponse("cameraIds", obj); 952 } catch (org.json.JSONException e) { 953 throw new ItsException("JSON error: ", e); 954 } catch (android.hardware.camera2.CameraAccessException e) { 955 throw new ItsException("Access error: ", e); 956 } 957 } 958 959 private void prepareImageReaders(Size[] outputSizes, int[] outputFormats, Size inputSize, 960 int inputFormat, int maxInputBuffers) { 961 closeImageReaders(); 962 mOutputImageReaders = new ImageReader[outputSizes.length]; 963 for (int i = 0; i < outputSizes.length; i++) { 964 // Check if the output image reader can be shared with the input image reader. 965 if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) { 966 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 967 outputSizes[i].getHeight(), outputFormats[i], 968 MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers); 969 mInputImageReader = mOutputImageReaders[i]; 970 } else { 971 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 972 outputSizes[i].getHeight(), outputFormats[i], 973 MAX_CONCURRENT_READER_BUFFERS); 974 } 975 } 976 977 if (inputSize != null && mInputImageReader == null) { 978 mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(), 979 inputFormat, maxInputBuffers); 980 } 981 } 982 983 private void closeImageReaders() { 984 if (mOutputImageReaders != null) { 985 for (int i = 0; i < mOutputImageReaders.length; i++) { 986 if (mOutputImageReaders[i] != null) { 987 mOutputImageReaders[i].close(); 988 mOutputImageReaders[i] = null; 989 } 990 } 991 } 992 if (mInputImageReader != null) { 993 mInputImageReader.close(); 994 mInputImageReader = null; 995 } 996 } 997 998 private void do3A(JSONObject params) throws ItsException { 999 try { 1000 // Start a 3A action, and wait for it to converge. 1001 // Get the converged values for each "A", and package into JSON result for caller. 1002 1003 // 3A happens on full-res frames. 1004 Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1005 int outputFormats[] = new int[1]; 1006 outputFormats[0] = ImageFormat.YUV_420_888; 1007 Size[] outputSizes = new Size[1]; 1008 outputSizes[0] = sizes[0]; 1009 int width = outputSizes[0].getWidth(); 1010 int height = outputSizes[0].getHeight(); 1011 1012 prepareImageReaders(outputSizes, outputFormats, /*inputSize*/null, /*inputFormat*/0, 1013 /*maxInputBuffers*/0); 1014 List<Surface> outputSurfaces = new ArrayList<Surface>(1); 1015 outputSurfaces.add(mOutputImageReaders[0].getSurface()); 1016 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1017 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 1018 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1019 1020 // Add a listener that just recycles buffers; they aren't saved anywhere. 1021 ImageReader.OnImageAvailableListener readerListener = 1022 createAvailableListenerDropper(); 1023 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 1024 1025 // Get the user-specified regions for AE, AWB, AF. 1026 // Note that the user specifies normalized [x,y,w,h], which is converted below 1027 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 1028 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 1029 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 1030 new MeteringRectangle(0,0,width,height,1)}; 1031 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 1032 new MeteringRectangle(0,0,width,height,1)}; 1033 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 1034 new MeteringRectangle(0,0,width,height,1)}; 1035 if (params.has(REGION_KEY)) { 1036 JSONObject regions = params.getJSONObject(REGION_KEY); 1037 if (regions.has(REGION_AE_KEY)) { 1038 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 1039 regions.getJSONArray(REGION_AE_KEY), true, width, height); 1040 } 1041 if (regions.has(REGION_AF_KEY)) { 1042 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 1043 regions.getJSONArray(REGION_AF_KEY), true, width, height); 1044 } 1045 if (regions.has(REGION_AWB_KEY)) { 1046 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 1047 regions.getJSONArray(REGION_AWB_KEY), true, width, height); 1048 } 1049 } 1050 1051 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 1052 // values, waiting until the HAL has reported that the lock was successful. 1053 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 1054 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 1055 1056 // An EV compensation can be specified as part of AE convergence. 1057 int evComp = params.optInt(EVCOMP_KEY, 0); 1058 if (evComp != 0) { 1059 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 1060 } 1061 1062 // By default, AE and AF both get triggered, but the user can optionally override this. 1063 // Also, AF won't get triggered if the lens is fixed-focus. 1064 boolean doAE = true; 1065 boolean doAF = true; 1066 if (params.has(TRIGGER_KEY)) { 1067 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 1068 if (triggers.has(TRIGGER_AE_KEY)) { 1069 doAE = triggers.getBoolean(TRIGGER_AE_KEY); 1070 } 1071 if (triggers.has(TRIGGER_AF_KEY)) { 1072 doAF = triggers.getBoolean(TRIGGER_AF_KEY); 1073 } 1074 } 1075 Float minFocusDistance = mCameraCharacteristics.get( 1076 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); 1077 boolean isFixedFocusLens = minFocusDistance != null && minFocusDistance == 0.0; 1078 if (doAF && isFixedFocusLens) { 1079 // Send a dummy result back for the code that is waiting for this message to see 1080 // that AF has converged. 1081 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 1082 mSocketRunnableObj.sendResponse("afResult", "0.0"); 1083 doAF = false; 1084 } 1085 1086 mInterlock3A.open(); 1087 mIssuedRequest3A = false; 1088 mConvergedAE = false; 1089 mConvergedAWB = false; 1090 mConvergedAF = false; 1091 mLockedAE = false; 1092 mLockedAWB = false; 1093 long tstart = System.currentTimeMillis(); 1094 boolean triggeredAE = false; 1095 boolean triggeredAF = false; 1096 1097 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 1098 doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 1099 1100 // Keep issuing capture requests until 3A has converged. 1101 while (true) { 1102 1103 // Block until can take the next 3A frame. Only want one outstanding frame 1104 // at a time, to simplify the logic here. 1105 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 1106 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 1107 throw new ItsException( 1108 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 1109 "AE converge state: " + mConvergedAE + ", \n" + 1110 "AF convergence state: " + mConvergedAF + ", \n" + 1111 "AWB convergence state: " + mConvergedAWB + "."); 1112 } 1113 mInterlock3A.close(); 1114 1115 // If not converged yet, issue another capture request. 1116 if ( (doAE && (!triggeredAE || !mConvergedAE)) 1117 || !mConvergedAWB 1118 || (doAF && (!triggeredAF || !mConvergedAF)) 1119 || (doAE && mNeedsLockedAE && !mLockedAE) 1120 || (mNeedsLockedAWB && !mLockedAWB)) { 1121 1122 // Baseline capture request for 3A. 1123 CaptureRequest.Builder req = mCamera.createCaptureRequest( 1124 CameraDevice.TEMPLATE_PREVIEW); 1125 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1126 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1127 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 1128 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 1129 req.set(CaptureRequest.CONTROL_AE_MODE, 1130 CaptureRequest.CONTROL_AE_MODE_ON); 1131 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 1132 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 1133 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 1134 req.set(CaptureRequest.CONTROL_AF_MODE, 1135 CaptureRequest.CONTROL_AF_MODE_AUTO); 1136 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 1137 req.set(CaptureRequest.CONTROL_AWB_MODE, 1138 CaptureRequest.CONTROL_AWB_MODE_AUTO); 1139 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 1140 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 1141 // ITS only turns OIS on when it's explicitly requested 1142 req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 1143 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 1144 1145 if (evComp != 0) { 1146 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 1147 } 1148 1149 if (mConvergedAE && mNeedsLockedAE) { 1150 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 1151 } 1152 if (mConvergedAWB && mNeedsLockedAWB) { 1153 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 1154 } 1155 1156 // Trigger AE first. 1157 if (doAE && !triggeredAE) { 1158 Logt.i(TAG, "Triggering AE"); 1159 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 1160 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 1161 triggeredAE = true; 1162 } 1163 1164 // After AE has converged, trigger AF. 1165 if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) { 1166 Logt.i(TAG, "Triggering AF"); 1167 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 1168 CaptureRequest.CONTROL_AF_TRIGGER_START); 1169 triggeredAF = true; 1170 } 1171 1172 req.addTarget(mOutputImageReaders[0].getSurface()); 1173 1174 mIssuedRequest3A = true; 1175 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1176 } else { 1177 mSocketRunnableObj.sendResponse("3aConverged", ""); 1178 Logt.i(TAG, "3A converged"); 1179 break; 1180 } 1181 } 1182 } catch (android.hardware.camera2.CameraAccessException e) { 1183 throw new ItsException("Access error: ", e); 1184 } catch (org.json.JSONException e) { 1185 throw new ItsException("JSON error: ", e); 1186 } finally { 1187 mSocketRunnableObj.sendResponse("3aDone", ""); 1188 } 1189 } 1190 1191 private void doVibrate(JSONObject params) throws ItsException { 1192 try { 1193 if (mVibrator == null) { 1194 throw new ItsException("Unable to start vibrator"); 1195 } 1196 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 1197 int len = patternArray.length(); 1198 long pattern[] = new long[len]; 1199 for (int i = 0; i < len; i++) { 1200 pattern[i] = patternArray.getLong(i); 1201 } 1202 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 1203 mVibrator.vibrate(pattern, -1); 1204 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 1205 } catch (org.json.JSONException e) { 1206 throw new ItsException("JSON error: ", e); 1207 } 1208 } 1209 1210 /** 1211 * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output 1212 * image readers for the parsed output surface sizes, output formats, and the given input 1213 * size and format. 1214 */ 1215 private void prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, Size inputSize, 1216 int inputFormat, int maxInputBuffers, boolean backgroundRequest) throws ItsException { 1217 Size outputSizes[]; 1218 int outputFormats[]; 1219 int numSurfaces = 0; 1220 mPhysicalStreamMap.clear(); 1221 1222 if (jsonOutputSpecs != null) { 1223 try { 1224 numSurfaces = jsonOutputSpecs.length(); 1225 if (backgroundRequest) { 1226 numSurfaces += 1; 1227 } 1228 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 1229 throw new ItsException("Too many output surfaces"); 1230 } 1231 1232 outputSizes = new Size[numSurfaces]; 1233 outputFormats = new int[numSurfaces]; 1234 for (int i = 0; i < numSurfaces; i++) { 1235 // Append optional background stream at the end 1236 if (backgroundRequest && i == numSurfaces - 1) { 1237 outputFormats[i] = ImageFormat.YUV_420_888; 1238 outputSizes[i] = new Size(640, 480); 1239 continue; 1240 } 1241 // Get the specified surface. 1242 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 1243 String sformat = surfaceObj.optString("format"); 1244 Size sizes[]; 1245 if ("yuv".equals(sformat) || "".equals(sformat)) { 1246 // Default to YUV if no format is specified. 1247 outputFormats[i] = ImageFormat.YUV_420_888; 1248 sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1249 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 1250 outputFormats[i] = ImageFormat.JPEG; 1251 sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); 1252 } else if ("raw".equals(sformat)) { 1253 outputFormats[i] = ImageFormat.RAW_SENSOR; 1254 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1255 } else if ("raw10".equals(sformat)) { 1256 outputFormats[i] = ImageFormat.RAW10; 1257 sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics); 1258 } else if ("raw12".equals(sformat)) { 1259 outputFormats[i] = ImageFormat.RAW12; 1260 sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics); 1261 } else if ("dng".equals(sformat)) { 1262 outputFormats[i] = ImageFormat.RAW_SENSOR; 1263 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1264 mCaptureRawIsDng = true; 1265 } else if ("rawStats".equals(sformat)) { 1266 outputFormats[i] = ImageFormat.RAW_SENSOR; 1267 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1268 mCaptureRawIsStats = true; 1269 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 1270 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 1271 } else { 1272 throw new ItsException("Unsupported format: " + sformat); 1273 } 1274 // If the size is omitted, then default to the largest allowed size for the 1275 // format. 1276 int width = surfaceObj.optInt("width"); 1277 int height = surfaceObj.optInt("height"); 1278 if (width <= 0) { 1279 if (sizes == null || sizes.length == 0) { 1280 throw new ItsException(String.format( 1281 "Zero stream configs available for requested format: %s", 1282 sformat)); 1283 } 1284 width = ItsUtils.getMaxSize(sizes).getWidth(); 1285 } 1286 if (height <= 0) { 1287 height = ItsUtils.getMaxSize(sizes).getHeight(); 1288 } 1289 String physicalCameraId = surfaceObj.optString("physicalCamera"); 1290 if (physicalCameraId != null) { 1291 mPhysicalStreamMap.put(i, physicalCameraId); 1292 } 1293 1294 // The stats computation only applies to the active array region. 1295 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics).width(); 1296 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics).height(); 1297 if (mCaptureStatsGridWidth <= 0 || mCaptureStatsGridWidth > aaw) { 1298 mCaptureStatsGridWidth = aaw; 1299 } 1300 if (mCaptureStatsGridHeight <= 0 || mCaptureStatsGridHeight > aah) { 1301 mCaptureStatsGridHeight = aah; 1302 } 1303 1304 outputSizes[i] = new Size(width, height); 1305 } 1306 } catch (org.json.JSONException e) { 1307 throw new ItsException("JSON error", e); 1308 } 1309 } else { 1310 // No surface(s) specified at all. 1311 // Default: a single output surface which is full-res YUV. 1312 Size maxYuvSize = ItsUtils.getMaxOutputSize( 1313 mCameraCharacteristics, ImageFormat.YUV_420_888); 1314 numSurfaces = backgroundRequest ? 2 : 1; 1315 1316 outputSizes = new Size[numSurfaces]; 1317 outputFormats = new int[numSurfaces]; 1318 outputSizes[0] = maxYuvSize; 1319 outputFormats[0] = ImageFormat.YUV_420_888; 1320 if (backgroundRequest) { 1321 outputSizes[1] = new Size(640, 480); 1322 outputFormats[1] = ImageFormat.YUV_420_888; 1323 } 1324 } 1325 1326 prepareImageReaders(outputSizes, outputFormats, inputSize, inputFormat, maxInputBuffers); 1327 } 1328 1329 /** 1330 * Wait until mCountCallbacksRemaining is 0 or a specified amount of time has elapsed between 1331 * each callback. 1332 */ 1333 private void waitForCallbacks(long timeoutMs) throws ItsException { 1334 synchronized(mCountCallbacksRemaining) { 1335 int currentCount = mCountCallbacksRemaining.get(); 1336 while (currentCount > 0) { 1337 try { 1338 mCountCallbacksRemaining.wait(timeoutMs); 1339 } catch (InterruptedException e) { 1340 throw new ItsException("Waiting for callbacks was interrupted.", e); 1341 } 1342 1343 int newCount = mCountCallbacksRemaining.get(); 1344 if (newCount == currentCount) { 1345 throw new ItsException("No callback received within timeout " + 1346 timeoutMs + "ms"); 1347 } 1348 currentCount = newCount; 1349 } 1350 } 1351 } 1352 1353 private void doCapture(JSONObject params) throws ItsException { 1354 try { 1355 // Parse the JSON to get the list of capture requests. 1356 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 1357 mCamera, params, "captureRequests"); 1358 1359 // optional background preview requests 1360 List<CaptureRequest.Builder> backgroundRequests = ItsSerializer.deserializeRequestList( 1361 mCamera, params, "repeatRequests"); 1362 boolean backgroundRequest = backgroundRequests.size() > 0; 1363 1364 int numSurfaces = 0; 1365 int numCaptureSurfaces = 0; 1366 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1367 try { 1368 mCountRawOrDng.set(0); 1369 mCountJpg.set(0); 1370 mCountYuv.set(0); 1371 mCountRaw10.set(0); 1372 mCountRaw12.set(0); 1373 mCountCapRes.set(0); 1374 mCaptureRawIsDng = false; 1375 mCaptureRawIsStats = false; 1376 mCaptureResults = new CaptureResult[requests.size()]; 1377 1378 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1379 1380 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 1381 /*inputFormat*/0, /*maxInputBuffers*/0, backgroundRequest); 1382 numSurfaces = mOutputImageReaders.length; 1383 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0); 1384 1385 List<OutputConfiguration> outputConfigs = 1386 new ArrayList<OutputConfiguration>(numSurfaces); 1387 for (int i = 0; i < numSurfaces; i++) { 1388 OutputConfiguration config = new OutputConfiguration( 1389 mOutputImageReaders[i].getSurface()); 1390 if (mPhysicalStreamMap.get(i) != null) { 1391 config.setPhysicalCameraId(mPhysicalStreamMap.get(i)); 1392 } 1393 outputConfigs.add(config); 1394 } 1395 mCamera.createCaptureSessionByOutputConfigurations(outputConfigs, 1396 sessionListener, mCameraHandler); 1397 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1398 1399 for (int i = 0; i < numSurfaces; i++) { 1400 ImageReader.OnImageAvailableListener readerListener; 1401 if (backgroundRequest && i == numSurfaces - 1) { 1402 readerListener = createAvailableListenerDropper(); 1403 } else { 1404 readerListener = createAvailableListener(mCaptureCallback); 1405 } 1406 mOutputImageReaders[i].setOnImageAvailableListener(readerListener, 1407 mSaveHandlers[i]); 1408 } 1409 1410 // Plan for how many callbacks need to be received throughout the duration of this 1411 // sequence of capture requests. There is one callback per image surface, and one 1412 // callback for the CaptureResult, for each capture. 1413 int numCaptures = requests.size(); 1414 mCountCallbacksRemaining.set(numCaptures * (numCaptureSurfaces + 1)); 1415 1416 } catch (CameraAccessException e) { 1417 throw new ItsException("Error configuring outputs", e); 1418 } 1419 1420 // Start background requests and let it warm up pipeline 1421 if (backgroundRequest) { 1422 List<CaptureRequest> bgRequestList = 1423 new ArrayList<CaptureRequest>(backgroundRequests.size()); 1424 for (int i = 0; i < backgroundRequests.size(); i++) { 1425 CaptureRequest.Builder req = backgroundRequests.get(i); 1426 req.addTarget(mOutputImageReaders[numCaptureSurfaces].getSurface()); 1427 bgRequestList.add(req.build()); 1428 } 1429 mSession.setRepeatingBurst(bgRequestList, null, null); 1430 // warm up the pipeline 1431 Thread.sleep(PIPELINE_WARMUP_TIME_MS); 1432 } 1433 1434 // Initiate the captures. 1435 long maxExpTimeNs = -1; 1436 List<CaptureRequest> requestList = 1437 new ArrayList<>(requests.size()); 1438 for (int i = 0; i < requests.size(); i++) { 1439 CaptureRequest.Builder req = requests.get(i); 1440 // For DNG captures, need the LSC map to be available. 1441 if (mCaptureRawIsDng) { 1442 req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 1443 } 1444 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 1445 if (expTimeNs != null && expTimeNs > maxExpTimeNs) { 1446 maxExpTimeNs = expTimeNs; 1447 } 1448 1449 for (int j = 0; j < numCaptureSurfaces; j++) { 1450 req.addTarget(mOutputImageReaders[j].getSurface()); 1451 } 1452 requestList.add(req.build()); 1453 } 1454 mSession.captureBurst(requestList, mCaptureResultListener, mResultHandler); 1455 1456 long timeout = TIMEOUT_CALLBACK * 1000; 1457 if (maxExpTimeNs > 0) { 1458 timeout += maxExpTimeNs / 1000000; // ns to ms 1459 } 1460 // Make sure all callbacks have been hit (wait until captures are done). 1461 // If no timeouts are received after a timeout, then fail. 1462 waitForCallbacks(timeout); 1463 1464 // Close session and wait until session is fully closed 1465 mSession.close(); 1466 sessionListener.getStateWaiter().waitForState( 1467 BlockingSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 1468 1469 } catch (android.hardware.camera2.CameraAccessException e) { 1470 throw new ItsException("Access error: ", e); 1471 } catch (InterruptedException e) { 1472 throw new ItsException("Unexpected InterruptedException: ", e); 1473 } 1474 } 1475 1476 /** 1477 * Perform reprocess captures. 1478 * 1479 * It takes captureRequests in a JSON object and perform capture requests in two steps: 1480 * regular capture request to get reprocess input and reprocess capture request to get 1481 * reprocess outputs. 1482 * 1483 * Regular capture requests: 1484 * 1. For each capture request in the JSON object, create a full-size capture request with 1485 * the settings in the JSON object. 1486 * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor 1487 * from the regular capture requests. (Those settings will be used for reprocess requests.) 1488 * 3. Submit the regular capture requests. 1489 * 1490 * Reprocess capture requests: 1491 * 4. Wait for the regular capture results and use them to create reprocess capture requests. 1492 * 5. Wait for the regular capture output images and queue them to the image writer. 1493 * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2. 1494 * 7. Submit the reprocess capture requests. 1495 * 1496 * The output images and results for the regular capture requests won't be written to socket. 1497 * The output images and results for the reprocess capture requests will be written to socket. 1498 */ 1499 private void doReprocessCapture(JSONObject params) throws ItsException { 1500 ImageWriter imageWriter = null; 1501 ArrayList<Integer> noiseReductionModes = new ArrayList<>(); 1502 ArrayList<Integer> edgeModes = new ArrayList<>(); 1503 ArrayList<Float> effectiveExposureFactors = new ArrayList<>(); 1504 1505 mCountRawOrDng.set(0); 1506 mCountJpg.set(0); 1507 mCountYuv.set(0); 1508 mCountRaw10.set(0); 1509 mCountRaw12.set(0); 1510 mCountCapRes.set(0); 1511 mCaptureRawIsDng = false; 1512 mCaptureRawIsStats = false; 1513 1514 try { 1515 // Parse the JSON to get the list of capture requests. 1516 List<CaptureRequest.Builder> inputRequests = 1517 ItsSerializer.deserializeRequestList(mCamera, params, "captureRequests"); 1518 1519 // Prepare the image readers for reprocess input and reprocess outputs. 1520 int inputFormat = getReprocessInputFormat(params); 1521 Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat); 1522 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1523 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat, 1524 inputRequests.size(), /*backgroundRequest*/false); 1525 1526 // Prepare a reprocessable session. 1527 int numOutputSurfaces = mOutputImageReaders.length; 1528 InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(), 1529 inputSize.getHeight(), inputFormat); 1530 List<Surface> outputSurfaces = new ArrayList<Surface>(); 1531 boolean addSurfaceForInput = true; 1532 for (int i = 0; i < numOutputSurfaces; i++) { 1533 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1534 if (mOutputImageReaders[i] == mInputImageReader) { 1535 // If input and one of the outputs share the same image reader, avoid 1536 // adding the same surfaces twice. 1537 addSurfaceForInput = false; 1538 } 1539 } 1540 1541 if (addSurfaceForInput) { 1542 // Besides the output surfaces specified in JSON object, add an additional one 1543 // for reprocess input. 1544 outputSurfaces.add(mInputImageReader.getSurface()); 1545 } 1546 1547 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1548 mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener, 1549 mCameraHandler); 1550 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1551 1552 // Create an image writer for reprocess input. 1553 Surface inputSurface = mSession.getInputSurface(); 1554 imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size()); 1555 1556 // Set up input reader listener and capture callback listener to get 1557 // reprocess input buffers and the results in order to create reprocess capture 1558 // requests. 1559 ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter(); 1560 mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]); 1561 1562 CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter(); 1563 // Prepare the reprocess input request 1564 for (CaptureRequest.Builder inputReqest : inputRequests) { 1565 // Remember and clear noise reduction, edge enhancement, and effective exposure 1566 // factors. 1567 noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE)); 1568 edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE)); 1569 effectiveExposureFactors.add(inputReqest.get( 1570 CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)); 1571 1572 inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE, 1573 CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG); 1574 inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG); 1575 inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null); 1576 inputReqest.addTarget(mInputImageReader.getSurface()); 1577 mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler); 1578 } 1579 1580 // Wait for reprocess input images 1581 ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>(); 1582 for (int i = 0; i < inputRequests.size(); i++) { 1583 TotalCaptureResult result = 1584 captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000); 1585 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result)); 1586 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000)); 1587 } 1588 1589 // Start performing reprocess captures. 1590 1591 mCaptureResults = new CaptureResult[inputRequests.size()]; 1592 1593 // Prepare reprocess capture requests. 1594 for (int i = 0; i < numOutputSurfaces; i++) { 1595 ImageReader.OnImageAvailableListener outputReaderListener = 1596 createAvailableListener(mCaptureCallback); 1597 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener, 1598 mSaveHandlers[i]); 1599 } 1600 1601 // Plan for how many callbacks need to be received throughout the duration of this 1602 // sequence of capture requests. There is one callback per image surface, and one 1603 // callback for the CaptureResult, for each capture. 1604 int numCaptures = reprocessOutputRequests.size(); 1605 mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1)); 1606 1607 // Initiate the captures. 1608 for (int i = 0; i < reprocessOutputRequests.size(); i++) { 1609 CaptureRequest.Builder req = reprocessOutputRequests.get(i); 1610 for (ImageReader outputImageReader : mOutputImageReaders) { 1611 req.addTarget(outputImageReader.getSurface()); 1612 } 1613 1614 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i)); 1615 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i)); 1616 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, 1617 effectiveExposureFactors.get(i)); 1618 1619 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1620 } 1621 1622 // Make sure all callbacks have been hit (wait until captures are done). 1623 // If no timeouts are received after a timeout, then fail. 1624 waitForCallbacks(TIMEOUT_CALLBACK * 1000); 1625 } catch (android.hardware.camera2.CameraAccessException e) { 1626 throw new ItsException("Access error: ", e); 1627 } finally { 1628 closeImageReaders(); 1629 if (mSession != null) { 1630 mSession.close(); 1631 mSession = null; 1632 } 1633 if (imageWriter != null) { 1634 imageWriter.close(); 1635 } 1636 } 1637 } 1638 1639 @Override 1640 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 1641 Logt.i(TAG, "Sensor " + sensor.getName() + " accuracy changed to " + accuracy); 1642 } 1643 1644 @Override 1645 public final void onSensorChanged(SensorEvent event) { 1646 synchronized(mEventLock) { 1647 if (mEventsEnabled) { 1648 MySensorEvent ev2 = new MySensorEvent(); 1649 ev2.sensor = event.sensor; 1650 ev2.accuracy = event.accuracy; 1651 ev2.timestamp = event.timestamp; 1652 ev2.values = new float[event.values.length]; 1653 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 1654 mEvents.add(ev2); 1655 } 1656 } 1657 } 1658 1659 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 1660 @Override 1661 public void onCaptureAvailable(Image capture, String physicalCameraId) { 1662 try { 1663 int format = capture.getFormat(); 1664 if (format == ImageFormat.JPEG) { 1665 Logt.i(TAG, "Received JPEG capture"); 1666 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1667 ByteBuffer buf = ByteBuffer.wrap(img); 1668 int count = mCountJpg.getAndIncrement(); 1669 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf); 1670 } else if (format == ImageFormat.YUV_420_888) { 1671 Logt.i(TAG, "Received YUV capture"); 1672 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1673 ByteBuffer buf = ByteBuffer.wrap(img); 1674 mSocketRunnableObj.sendResponseCaptureBuffer( 1675 "yuvImage"+physicalCameraId, buf); 1676 } else if (format == ImageFormat.RAW10) { 1677 Logt.i(TAG, "Received RAW10 capture"); 1678 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1679 ByteBuffer buf = ByteBuffer.wrap(img); 1680 int count = mCountRaw10.getAndIncrement(); 1681 mSocketRunnableObj.sendResponseCaptureBuffer( 1682 "raw10Image"+physicalCameraId, buf); 1683 } else if (format == ImageFormat.RAW12) { 1684 Logt.i(TAG, "Received RAW12 capture"); 1685 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1686 ByteBuffer buf = ByteBuffer.wrap(img); 1687 int count = mCountRaw12.getAndIncrement(); 1688 mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image"+physicalCameraId, buf); 1689 } else if (format == ImageFormat.RAW_SENSOR) { 1690 Logt.i(TAG, "Received RAW16 capture"); 1691 int count = mCountRawOrDng.getAndIncrement(); 1692 if (! mCaptureRawIsDng) { 1693 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1694 if (! mCaptureRawIsStats) { 1695 ByteBuffer buf = ByteBuffer.wrap(img); 1696 mSocketRunnableObj.sendResponseCaptureBuffer( 1697 "rawImage" + physicalCameraId, buf); 1698 } else { 1699 // Compute the requested stats on the raw frame, and return the results 1700 // in a new "stats image". 1701 long startTimeMs = SystemClock.elapsedRealtime(); 1702 int w = capture.getWidth(); 1703 int h = capture.getHeight(); 1704 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1705 .width(); 1706 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1707 .height(); 1708 int aax = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1709 .left; 1710 int aay = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1711 .top; 1712 int gw = mCaptureStatsGridWidth; 1713 int gh = mCaptureStatsGridHeight; 1714 float[] stats = StatsImage.computeStatsImage( 1715 img, w, h, aax, aay, aaw, aah, gw, gh); 1716 long endTimeMs = SystemClock.elapsedRealtime(); 1717 Log.e(TAG, "Raw stats computation takes " + (endTimeMs - startTimeMs) + " ms"); 1718 int statsImgSize = stats.length * 4; 1719 if (mSocketQueueQuota != null) { 1720 mSocketQueueQuota.release(img.length); 1721 mSocketQueueQuota.acquire(statsImgSize); 1722 } 1723 ByteBuffer bBuf = ByteBuffer.allocate(statsImgSize); 1724 bBuf.order(ByteOrder.nativeOrder()); 1725 FloatBuffer fBuf = bBuf.asFloatBuffer(); 1726 fBuf.put(stats); 1727 fBuf.position(0); 1728 mSocketRunnableObj.sendResponseCaptureBuffer("rawStatsImage", bBuf); 1729 } 1730 } else { 1731 // Wait until the corresponding capture result is ready, up to a timeout. 1732 long t0 = android.os.SystemClock.elapsedRealtime(); 1733 while (! mThreadExitFlag 1734 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) { 1735 if (mCaptureResults[count] != null) { 1736 Logt.i(TAG, "Writing capture as DNG"); 1737 DngCreator dngCreator = new DngCreator( 1738 mCameraCharacteristics, mCaptureResults[count]); 1739 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 1740 dngCreator.writeImage(dngStream, capture); 1741 byte[] dngArray = dngStream.toByteArray(); 1742 if (mSocketQueueQuota != null) { 1743 // Ideally we should acquire before allocating memory, but 1744 // here the DNG size is unknown before toByteArray call, so 1745 // we have to register the size afterward. This should still 1746 // works most of the time since all DNG images are handled by 1747 // the same handler thread, so we are at most one buffer over 1748 // the quota. 1749 mSocketQueueQuota.acquire(dngArray.length); 1750 } 1751 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 1752 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 1753 break; 1754 } else { 1755 Thread.sleep(1); 1756 } 1757 } 1758 } 1759 } else { 1760 throw new ItsException("Unsupported image format: " + format); 1761 } 1762 1763 synchronized(mCountCallbacksRemaining) { 1764 mCountCallbacksRemaining.decrementAndGet(); 1765 mCountCallbacksRemaining.notify(); 1766 } 1767 } catch (IOException e) { 1768 Logt.e(TAG, "Script error: ", e); 1769 } catch (InterruptedException e) { 1770 Logt.e(TAG, "Script error: ", e); 1771 } catch (ItsException e) { 1772 Logt.e(TAG, "Script error: ", e); 1773 } 1774 } 1775 }; 1776 1777 private static float r2f(Rational r) { 1778 return (float)r.getNumerator() / (float)r.getDenominator(); 1779 } 1780 1781 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 1782 @Override 1783 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1784 long timestamp, long frameNumber) { 1785 } 1786 1787 @Override 1788 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1789 TotalCaptureResult result) { 1790 try { 1791 // Currently result has all 0 values. 1792 if (request == null || result == null) { 1793 throw new ItsException("Request/result is invalid"); 1794 } 1795 1796 StringBuilder logMsg = new StringBuilder(); 1797 logMsg.append(String.format( 1798 "Capt result: AE=%d, AF=%d, AWB=%d, ", 1799 result.get(CaptureResult.CONTROL_AE_STATE), 1800 result.get(CaptureResult.CONTROL_AF_STATE), 1801 result.get(CaptureResult.CONTROL_AWB_STATE))); 1802 int[] capabilities = mCameraCharacteristics.get( 1803 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1804 if (capabilities == null) { 1805 throw new ItsException("Failed to get capabilities"); 1806 } 1807 boolean readSensorSettings = false; 1808 for (int capability : capabilities) { 1809 if (capability == 1810 CameraCharacteristics. 1811 REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS) { 1812 readSensorSettings = true; 1813 break; 1814 } 1815 } 1816 if (readSensorSettings) { 1817 logMsg.append(String.format( 1818 "sens=%d, exp=%.1fms, dur=%.1fms, ", 1819 result.get(CaptureResult.SENSOR_SENSITIVITY), 1820 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).longValue() / 1000000.0f, 1821 result.get(CaptureResult.SENSOR_FRAME_DURATION).longValue() / 1822 1000000.0f)); 1823 } 1824 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 1825 logMsg.append(String.format( 1826 "gains=[%.1f, %.1f, %.1f, %.1f], ", 1827 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1828 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1829 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1830 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 1831 } else { 1832 logMsg.append("gains=[], "); 1833 } 1834 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1835 logMsg.append(String.format( 1836 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 1837 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1838 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1839 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1840 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1841 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1842 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1843 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1844 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1845 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 1846 } else { 1847 logMsg.append("xform=[], "); 1848 } 1849 logMsg.append(String.format( 1850 "foc=%.1f", 1851 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 1852 Logt.i(TAG, logMsg.toString()); 1853 1854 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 1855 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1856 CaptureResult.CONTROL_AE_STATE_CONVERGED || 1857 result.get(CaptureResult.CONTROL_AE_STATE) == 1858 CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED || 1859 result.get(CaptureResult.CONTROL_AE_STATE) == 1860 CaptureResult.CONTROL_AE_STATE_LOCKED; 1861 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1862 CaptureResult.CONTROL_AE_STATE_LOCKED; 1863 } 1864 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 1865 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) == 1866 CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 1867 } 1868 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 1869 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1870 CaptureResult.CONTROL_AWB_STATE_CONVERGED || 1871 result.get(CaptureResult.CONTROL_AWB_STATE) == 1872 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1873 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1874 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1875 } 1876 1877 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) { 1878 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 1879 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 1880 mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d", 1881 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 1882 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 1883 )); 1884 } else { 1885 Logt.i(TAG, String.format( 1886 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b", 1887 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 1888 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 1889 } 1890 } 1891 1892 if (mConvergedAF) { 1893 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 1894 mSocketRunnableObj.sendResponse("afResult", String.format("%f", 1895 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 1896 )); 1897 } else { 1898 Logt.i(TAG, "AF converged but NULL focus distance values"); 1899 } 1900 } 1901 1902 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) { 1903 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 1904 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1905 mSocketRunnableObj.sendResponse("awbResult", String.format( 1906 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 1907 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1908 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1909 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1910 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(), 1911 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1912 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1913 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1914 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1915 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1916 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1917 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1918 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1919 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)) 1920 )); 1921 } else { 1922 Logt.i(TAG, String.format( 1923 "AWB converged but NULL color correction values, gains:%b, ccm:%b", 1924 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 1925 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null)); 1926 } 1927 } 1928 1929 if (mIssuedRequest3A) { 1930 mIssuedRequest3A = false; 1931 mInterlock3A.open(); 1932 } else { 1933 int count = mCountCapRes.getAndIncrement(); 1934 mCaptureResults[count] = result; 1935 mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics, 1936 request, result, mOutputImageReaders); 1937 synchronized(mCountCallbacksRemaining) { 1938 mCountCallbacksRemaining.decrementAndGet(); 1939 mCountCallbacksRemaining.notify(); 1940 } 1941 } 1942 } catch (ItsException e) { 1943 Logt.e(TAG, "Script error: ", e); 1944 } catch (Exception e) { 1945 Logt.e(TAG, "Script error: ", e); 1946 } 1947 } 1948 1949 @Override 1950 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1951 CaptureFailure failure) { 1952 Logt.e(TAG, "Script error: capture failed"); 1953 } 1954 }; 1955 1956 private class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback { 1957 private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue = 1958 new LinkedBlockingQueue<>(); 1959 1960 @Override 1961 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1962 long timestamp, long frameNumber) { 1963 } 1964 1965 @Override 1966 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1967 TotalCaptureResult result) { 1968 try { 1969 mResultQueue.put(result); 1970 } catch (InterruptedException e) { 1971 throw new UnsupportedOperationException( 1972 "Can't handle InterruptedException in onImageAvailable"); 1973 } 1974 } 1975 1976 @Override 1977 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1978 CaptureFailure failure) { 1979 Logt.e(TAG, "Script error: capture failed"); 1980 } 1981 1982 public TotalCaptureResult getResult(long timeoutMs) throws ItsException { 1983 TotalCaptureResult result; 1984 try { 1985 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1986 } catch (InterruptedException e) { 1987 throw new ItsException(e); 1988 } 1989 1990 if (result == null) { 1991 throw new ItsException("Getting an image timed out after " + timeoutMs + 1992 "ms"); 1993 } 1994 1995 return result; 1996 } 1997 } 1998 1999 private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener { 2000 private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>(); 2001 2002 @Override 2003 public void onImageAvailable(ImageReader reader) { 2004 try { 2005 mImageQueue.put(reader.acquireNextImage()); 2006 } catch (InterruptedException e) { 2007 throw new UnsupportedOperationException( 2008 "Can't handle InterruptedException in onImageAvailable"); 2009 } 2010 } 2011 2012 public Image getImage(long timeoutMs) throws ItsException { 2013 Image image; 2014 try { 2015 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 2016 } catch (InterruptedException e) { 2017 throw new ItsException(e); 2018 } 2019 2020 if (image == null) { 2021 throw new ItsException("Getting an image timed out after " + timeoutMs + 2022 "ms"); 2023 } 2024 return image; 2025 } 2026 } 2027 2028 private int getReprocessInputFormat(JSONObject params) throws ItsException { 2029 String reprocessFormat; 2030 try { 2031 reprocessFormat = params.getString("reprocessFormat"); 2032 } catch (org.json.JSONException e) { 2033 throw new ItsException("Error parsing reprocess format: " + e); 2034 } 2035 2036 if (reprocessFormat.equals("yuv")) { 2037 return ImageFormat.YUV_420_888; 2038 } else if (reprocessFormat.equals("private")) { 2039 return ImageFormat.PRIVATE; 2040 } 2041 2042 throw new ItsException("Uknown reprocess format: " + reprocessFormat); 2043 } 2044 } 2045