1 /* 2 * Copyright 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import android.graphics.Bitmap; 20 import android.graphics.BitmapFactory; 21 import android.graphics.ImageFormat; 22 import android.graphics.PointF; 23 import android.graphics.Rect; 24 import android.graphics.SurfaceTexture; 25 import android.hardware.camera2.CameraAccessException; 26 import android.hardware.camera2.CameraCaptureSession; 27 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession; 28 import android.hardware.camera2.CameraDevice; 29 import android.hardware.camera2.CameraManager; 30 import android.hardware.camera2.CameraMetadata; 31 import android.hardware.camera2.CameraCharacteristics; 32 import android.hardware.camera2.CaptureFailure; 33 import android.hardware.camera2.CaptureRequest; 34 import android.hardware.camera2.CaptureResult; 35 import android.hardware.camera2.MultiResolutionImageReader; 36 import android.hardware.camera2.cts.helpers.CameraErrorCollector; 37 import android.hardware.camera2.cts.helpers.StaticMetadata; 38 import android.hardware.camera2.params.InputConfiguration; 39 import android.hardware.camera2.TotalCaptureResult; 40 import android.hardware.cts.helpers.CameraUtils; 41 import android.hardware.camera2.params.MeteringRectangle; 42 import android.hardware.camera2.params.MandatoryStreamCombination; 43 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation; 44 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 45 import android.hardware.camera2.params.MultiResolutionStreamInfo; 46 import android.hardware.camera2.params.OutputConfiguration; 47 import android.hardware.camera2.params.SessionConfiguration; 48 import android.hardware.camera2.params.StreamConfigurationMap; 49 import android.location.Location; 50 import android.location.LocationManager; 51 import android.media.ExifInterface; 52 import android.media.Image; 53 import android.media.ImageReader; 54 import android.media.ImageWriter; 55 import android.media.Image.Plane; 56 import android.os.Build; 57 import android.os.ConditionVariable; 58 import android.os.Handler; 59 import android.util.Log; 60 import android.util.Pair; 61 import android.util.Size; 62 import android.util.Range; 63 import android.view.Display; 64 import android.view.Surface; 65 import android.view.WindowManager; 66 67 import com.android.ex.camera2.blocking.BlockingCameraManager; 68 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 69 import com.android.ex.camera2.blocking.BlockingSessionCallback; 70 import com.android.ex.camera2.blocking.BlockingStateCallback; 71 import com.android.ex.camera2.exceptions.TimeoutRuntimeException; 72 73 import junit.framework.Assert; 74 75 import org.mockito.Mockito; 76 77 import java.io.FileOutputStream; 78 import java.io.IOException; 79 import java.lang.reflect.Array; 80 import java.nio.ByteBuffer; 81 import java.util.ArrayList; 82 import java.util.Arrays; 83 import java.util.Collection; 84 import java.util.Collections; 85 import java.util.Comparator; 86 import java.util.Date; 87 import java.util.HashMap; 88 import java.util.HashSet; 89 import java.util.List; 90 import java.util.Set; 91 import java.util.concurrent.atomic.AtomicLong; 92 import java.util.concurrent.Executor; 93 import java.util.concurrent.LinkedBlockingQueue; 94 import java.util.concurrent.Semaphore; 95 import java.util.concurrent.TimeUnit; 96 import java.text.ParseException; 97 import java.text.SimpleDateFormat; 98 99 /** 100 * A package private utility class for wrapping up the camera2 cts test common utility functions 101 */ 102 public class CameraTestUtils extends Assert { 103 private static final String TAG = "CameraTestUtils"; 104 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 105 private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG); 106 public static final Size SIZE_BOUND_1080P = new Size(1920, 1088); 107 public static final Size SIZE_BOUND_2K = new Size(2048, 1088); 108 public static final Size SIZE_BOUND_QHD = new Size(2560, 1440); 109 public static final Size SIZE_BOUND_2160P = new Size(3840, 2160); 110 // Only test the preview size that is no larger than 1080p. 111 public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P; 112 // Default timeouts for reaching various states 113 public static final int CAMERA_OPEN_TIMEOUT_MS = 3000; 114 public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000; 115 public static final int CAMERA_IDLE_TIMEOUT_MS = 3000; 116 public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000; 117 public static final int CAMERA_BUSY_TIMEOUT_MS = 1000; 118 public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000; 119 public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000; 120 public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000; 121 public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000; 122 123 public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000; 124 public static final int SESSION_CLOSE_TIMEOUT_MS = 3000; 125 public static final int SESSION_READY_TIMEOUT_MS = 5000; 126 public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000; 127 128 public static final int MAX_READER_IMAGES = 5; 129 130 public static final String OFFLINE_CAMERA_ID = "offline_camera_id"; 131 public static final String REPORT_LOG_NAME = "CtsCameraTestCases"; 132 133 private static final int EXIF_DATETIME_LENGTH = 19; 134 private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60; 135 private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f; 136 private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f; 137 private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f; 138 private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f; 139 140 private static final float ZOOM_RATIO_THRESHOLD = 0.01f; 141 142 private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER); 143 private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER); 144 private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER); 145 146 static { 147 sTestLocation0.setTime(1199145600000L); 148 sTestLocation0.setLatitude(37.736071); 149 sTestLocation0.setLongitude(-122.441983); 150 sTestLocation0.setAltitude(21.0); 151 152 sTestLocation1.setTime(1199145601000L); 153 sTestLocation1.setLatitude(0.736071); 154 sTestLocation1.setLongitude(0.441983); 155 sTestLocation1.setAltitude(1.0); 156 157 sTestLocation2.setTime(1199145602000L); 158 sTestLocation2.setLatitude(-89.736071); 159 sTestLocation2.setLongitude(-179.441983); 160 sTestLocation2.setAltitude(100000.0); 161 } 162 163 // Exif test data vectors. 164 public static final ExifTestData[] EXIF_TEST_DATA = { 165 new ExifTestData( 166 /*gpsLocation*/ sTestLocation0, 167 /* orientation */90, 168 /* jpgQuality */(byte) 80, 169 /* thumbQuality */(byte) 75), 170 new ExifTestData( 171 /*gpsLocation*/ sTestLocation1, 172 /* orientation */180, 173 /* jpgQuality */(byte) 90, 174 /* thumbQuality */(byte) 85), 175 new ExifTestData( 176 /*gpsLocation*/ sTestLocation2, 177 /* orientation */270, 178 /* jpgQuality */(byte) 100, 179 /* thumbQuality */(byte) 100) 180 }; 181 182 /** 183 * Create an {@link android.media.ImageReader} object and get the surface. 184 * 185 * @param size The size of this ImageReader to be created. 186 * @param format The format of this ImageReader to be created 187 * @param maxNumImages The max number of images that can be acquired simultaneously. 188 * @param listener The listener used by this ImageReader to notify callbacks. 189 * @param handler The handler to use for any listener callbacks. 190 */ makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)191 public static ImageReader makeImageReader(Size size, int format, int maxNumImages, 192 ImageReader.OnImageAvailableListener listener, Handler handler) { 193 ImageReader reader; 194 reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format, 195 maxNumImages); 196 reader.setOnImageAvailableListener(listener, handler); 197 if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size); 198 return reader; 199 } 200 201 /** 202 * Create an ImageWriter and hook up the ImageListener. 203 * 204 * @param inputSurface The input surface of the ImageWriter. 205 * @param maxImages The max number of Images that can be dequeued simultaneously. 206 * @param listener The listener used by this ImageWriter to notify callbacks 207 * @param handler The handler to post listener callbacks. 208 * @return ImageWriter object created. 209 */ makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)210 public static ImageWriter makeImageWriter( 211 Surface inputSurface, int maxImages, 212 ImageWriter.OnImageReleasedListener listener, Handler handler) { 213 ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages); 214 writer.setOnImageReleasedListener(listener, handler); 215 return writer; 216 } 217 218 /** 219 * Utility class to store the targets for mandatory stream combination test. 220 */ 221 public static class StreamCombinationTargets { 222 public List<SurfaceTexture> mPrivTargets = new ArrayList<>(); 223 public List<ImageReader> mJpegTargets = new ArrayList<>(); 224 public List<ImageReader> mYuvTargets = new ArrayList<>(); 225 public List<ImageReader> mY8Targets = new ArrayList<>(); 226 public List<ImageReader> mRawTargets = new ArrayList<>(); 227 public List<ImageReader> mHeicTargets = new ArrayList<>(); 228 public List<ImageReader> mDepth16Targets = new ArrayList<>(); 229 230 public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>(); 231 public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>(); 232 public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>(); 233 public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>(); 234 close()235 public void close() { 236 for (SurfaceTexture target : mPrivTargets) { 237 target.release(); 238 } 239 for (ImageReader target : mJpegTargets) { 240 target.close(); 241 } 242 for (ImageReader target : mYuvTargets) { 243 target.close(); 244 } 245 for (ImageReader target : mY8Targets) { 246 target.close(); 247 } 248 for (ImageReader target : mRawTargets) { 249 target.close(); 250 } 251 for (ImageReader target : mHeicTargets) { 252 target.close(); 253 } 254 for (ImageReader target : mDepth16Targets) { 255 target.close(); 256 } 257 258 for (MultiResolutionImageReader target : mPrivMultiResTargets) { 259 target.close(); 260 } 261 for (MultiResolutionImageReader target : mJpegMultiResTargets) { 262 target.close(); 263 } 264 for (MultiResolutionImageReader target : mYuvMultiResTargets) { 265 target.close(); 266 } 267 for (MultiResolutionImageReader target : mRawMultiResTargets) { 268 target.close(); 269 } 270 } 271 } 272 configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler)273 private static void configureTarget(StreamCombinationTargets targets, 274 List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, 275 int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, 276 MultiResolutionStreamConfigurationMap multiResStreamConfig, 277 boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler) { 278 if (createMultiResiStreamConfig) { 279 Collection<MultiResolutionStreamInfo> multiResolutionStreams = 280 multiResStreamConfig.getOutputInfo(format); 281 MultiResolutionImageReader multiResReader = new MultiResolutionImageReader( 282 multiResolutionStreams, format, numBuffers); 283 multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler)); 284 Collection<OutputConfiguration> configs = 285 OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader); 286 outputConfigs.addAll(configs); 287 outputSurfaces.add(multiResReader.getSurface()); 288 switch (format) { 289 case ImageFormat.PRIVATE: 290 targets.mPrivMultiResTargets.add(multiResReader); 291 break; 292 case ImageFormat.JPEG: 293 targets.mJpegMultiResTargets.add(multiResReader); 294 break; 295 case ImageFormat.YUV_420_888: 296 targets.mYuvMultiResTargets.add(multiResReader); 297 break; 298 case ImageFormat.RAW_SENSOR: 299 targets.mRawMultiResTargets.add(multiResReader); 300 break; 301 default: 302 fail("Unknown/Unsupported output format " + format); 303 } 304 } else { 305 if (format == ImageFormat.PRIVATE) { 306 SurfaceTexture target = new SurfaceTexture(/*random int*/1); 307 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight()); 308 OutputConfiguration config = new OutputConfiguration(new Surface(target)); 309 if (overridePhysicalCameraId != null) { 310 config.setPhysicalCameraId(overridePhysicalCameraId); 311 } 312 outputConfigs.add(config); 313 outputSurfaces.add(config.getSurface()); 314 targets.mPrivTargets.add(target); 315 } else { 316 ImageReader target = ImageReader.newInstance(targetSize.getWidth(), 317 targetSize.getHeight(), format, numBuffers); 318 target.setOnImageAvailableListener(listener, handler); 319 OutputConfiguration config = new OutputConfiguration(target.getSurface()); 320 if (overridePhysicalCameraId != null) { 321 config.setPhysicalCameraId(overridePhysicalCameraId); 322 } 323 outputConfigs.add(config); 324 outputSurfaces.add(config.getSurface()); 325 326 switch (format) { 327 case ImageFormat.JPEG: 328 targets.mJpegTargets.add(target); 329 break; 330 case ImageFormat.YUV_420_888: 331 targets.mYuvTargets.add(target); 332 break; 333 case ImageFormat.Y8: 334 targets.mY8Targets.add(target); 335 break; 336 case ImageFormat.RAW_SENSOR: 337 targets.mRawTargets.add(target); 338 break; 339 case ImageFormat.HEIC: 340 targets.mHeicTargets.add(target); 341 break; 342 case ImageFormat.DEPTH16: 343 targets.mDepth16Targets.add(target); 344 break; 345 default: 346 fail("Unknown/Unsupported output format " + format); 347 } 348 } 349 } 350 } 351 setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)352 public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, 353 StreamCombinationTargets targets, 354 List<OutputConfiguration> outputConfigs, 355 List<Surface> outputSurfaces, int numBuffers, 356 boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, 357 MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) { 358 List<Surface> uhSurfaces = new ArrayList<Surface>(); 359 setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces, 360 numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId, 361 multiResStreamConfig, handler); 362 } 363 setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)364 public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, 365 StreamCombinationTargets targets, 366 List<OutputConfiguration> outputConfigs, 367 List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, 368 boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, 369 MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) { 370 371 ImageDropperListener imageDropperListener = new ImageDropperListener(); 372 List<Surface> chosenSurfaces; 373 for (MandatoryStreamInformation streamInfo : streamsInfo) { 374 if (streamInfo.isInput()) { 375 continue; 376 } 377 chosenSurfaces = outputSurfaces; 378 if (streamInfo.isUltraHighResolution()) { 379 chosenSurfaces = uhSurfaces; 380 } 381 int format = streamInfo.getFormat(); 382 if (substituteY8 && (format == ImageFormat.YUV_420_888)) { 383 format = ImageFormat.Y8; 384 } else if (substituteHeic && (format == ImageFormat.JPEG)) { 385 format = ImageFormat.HEIC; 386 } 387 Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()]; 388 availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes); 389 Size targetSize = CameraTestUtils.getMaxSize(availableSizes); 390 boolean createMultiResReader = 391 (multiResStreamConfig != null && 392 !multiResStreamConfig.getOutputInfo(format).isEmpty() && 393 streamInfo.isMaximumSize()); 394 switch (format) { 395 case ImageFormat.PRIVATE: 396 case ImageFormat.JPEG: 397 case ImageFormat.YUV_420_888: 398 case ImageFormat.Y8: 399 case ImageFormat.HEIC: 400 case ImageFormat.DEPTH16: 401 { 402 configureTarget(targets, outputConfigs, chosenSurfaces, format, 403 targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig, 404 createMultiResReader, imageDropperListener, handler); 405 break; 406 } 407 case ImageFormat.RAW_SENSOR: { 408 // targetSize could be null in the logical camera case where only 409 // physical camera supports RAW stream. 410 if (targetSize != null) { 411 configureTarget(targets, outputConfigs, chosenSurfaces, format, 412 targetSize, numBuffers, overridePhysicalCameraId, 413 multiResStreamConfig, createMultiResReader, imageDropperListener, 414 handler); 415 } 416 break; 417 } 418 default: 419 fail("Unknown output format " + format); 420 } 421 } 422 } 423 424 /** 425 * Close pending images and clean up an {@link android.media.ImageReader} object. 426 * @param reader an {@link android.media.ImageReader} to close. 427 */ closeImageReader(ImageReader reader)428 public static void closeImageReader(ImageReader reader) { 429 if (reader != null) { 430 reader.close(); 431 } 432 } 433 434 /** 435 * Close the pending images then close current active {@link ImageReader} objects. 436 */ closeImageReaders(ImageReader[] readers)437 public static void closeImageReaders(ImageReader[] readers) { 438 if ((readers != null) && (readers.length > 0)) { 439 for (ImageReader reader : readers) { 440 CameraTestUtils.closeImageReader(reader); 441 } 442 } 443 } 444 445 /** 446 * Close pending images and clean up an {@link android.media.ImageWriter} object. 447 * @param writer an {@link android.media.ImageWriter} to close. 448 */ closeImageWriter(ImageWriter writer)449 public static void closeImageWriter(ImageWriter writer) { 450 if (writer != null) { 451 writer.close(); 452 } 453 } 454 455 /** 456 * Dummy listener that release the image immediately once it is available. 457 * 458 * <p> 459 * It can be used for the case where we don't care the image data at all. 460 * </p> 461 */ 462 public static class ImageDropperListener implements ImageReader.OnImageAvailableListener { 463 @Override onImageAvailable(ImageReader reader)464 public synchronized void onImageAvailable(ImageReader reader) { 465 Image image = null; 466 try { 467 image = reader.acquireNextImage(); 468 } finally { 469 if (image != null) { 470 image.close(); 471 mImagesDropped++; 472 } 473 } 474 } 475 getImageCount()476 public synchronized int getImageCount() { 477 return mImagesDropped; 478 } 479 resetImageCount()480 public synchronized void resetImageCount() { 481 mImagesDropped = 0; 482 } 483 484 private int mImagesDropped = 0; 485 } 486 487 /** 488 * Image listener that release the image immediately after validating the image 489 */ 490 public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener { 491 private Size mSize; 492 private int mFormat; 493 ImageVerifierListener(Size sz, int format)494 public ImageVerifierListener(Size sz, int format) { 495 mSize = sz; 496 mFormat = format; 497 } 498 499 @Override onImageAvailable(ImageReader reader)500 public void onImageAvailable(ImageReader reader) { 501 Image image = null; 502 try { 503 image = reader.acquireNextImage(); 504 } finally { 505 if (image != null) { 506 // Should only do some quick validity checks in callback, as the ImageReader 507 // could be closed asynchronously, which will close all images acquired from 508 // this ImageReader. 509 checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat); 510 checkAndroidImageFormat(image); 511 image.close(); 512 } 513 } 514 } 515 } 516 517 public static class SimpleImageReaderListener 518 implements ImageReader.OnImageAvailableListener { 519 private final LinkedBlockingQueue<Image> mQueue = 520 new LinkedBlockingQueue<Image>(); 521 // Indicate whether this listener will drop images or not, 522 // when the queued images reaches the reader maxImages 523 private final boolean mAsyncMode; 524 // maxImages held by the queue in async mode. 525 private final int mMaxImages; 526 527 /** 528 * Create a synchronous SimpleImageReaderListener that queues the images 529 * automatically when they are available, no image will be dropped. If 530 * the caller doesn't call getImage(), the producer will eventually run 531 * into buffer starvation. 532 */ SimpleImageReaderListener()533 public SimpleImageReaderListener() { 534 mAsyncMode = false; 535 mMaxImages = 0; 536 } 537 538 /** 539 * Create a synchronous/asynchronous SimpleImageReaderListener that 540 * queues the images automatically when they are available. For 541 * asynchronous listener, image will be dropped if the queued images 542 * reach to maxImages queued. If the caller doesn't call getImage(), the 543 * producer will not be blocked. For synchronous listener, no image will 544 * be dropped. If the caller doesn't call getImage(), the producer will 545 * eventually run into buffer starvation. 546 * 547 * @param asyncMode If the listener is operating at asynchronous mode. 548 * @param maxImages The max number of images held by this listener. 549 */ 550 /** 551 * 552 * @param asyncMode 553 */ SimpleImageReaderListener(boolean asyncMode, int maxImages)554 public SimpleImageReaderListener(boolean asyncMode, int maxImages) { 555 mAsyncMode = asyncMode; 556 mMaxImages = maxImages; 557 } 558 559 @Override onImageAvailable(ImageReader reader)560 public void onImageAvailable(ImageReader reader) { 561 try { 562 Image imge = reader.acquireNextImage(); 563 if (imge == null) { 564 return; 565 } 566 mQueue.put(imge); 567 if (mAsyncMode && mQueue.size() >= mMaxImages) { 568 Image img = mQueue.poll(); 569 img.close(); 570 } 571 } catch (InterruptedException e) { 572 throw new UnsupportedOperationException( 573 "Can't handle InterruptedException in onImageAvailable"); 574 } 575 } 576 577 /** 578 * Get an image from the image reader. 579 * 580 * @param timeout Timeout value for the wait. 581 * @return The image from the image reader. 582 */ getImage(long timeout)583 public Image getImage(long timeout) throws InterruptedException { 584 Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 585 assertNotNull("Wait for an image timed out in " + timeout + "ms", image); 586 return image; 587 } 588 589 /** 590 * Drain the pending images held by this listener currently. 591 * 592 */ drain()593 public void drain() { 594 while (!mQueue.isEmpty()) { 595 Image image = mQueue.poll(); 596 assertNotNull("Unable to get an image", image); 597 image.close(); 598 } 599 } 600 } 601 602 public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener { 603 private final Semaphore mImageReleasedSema = new Semaphore(0); 604 private final ImageWriter mWriter; 605 @Override onImageReleased(ImageWriter writer)606 public void onImageReleased(ImageWriter writer) { 607 if (writer != mWriter) { 608 return; 609 } 610 611 if (VERBOSE) { 612 Log.v(TAG, "Input image is released"); 613 } 614 mImageReleasedSema.release(); 615 } 616 SimpleImageWriterListener(ImageWriter writer)617 public SimpleImageWriterListener(ImageWriter writer) { 618 if (writer == null) { 619 throw new IllegalArgumentException("writer cannot be null"); 620 } 621 mWriter = writer; 622 } 623 waitForImageReleased(long timeoutMs)624 public void waitForImageReleased(long timeoutMs) throws InterruptedException { 625 if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) { 626 fail("wait for image available timed out after " + timeoutMs + "ms"); 627 } 628 } 629 } 630 631 public static class ImageAndMultiResStreamInfo { 632 public final Image image; 633 public final MultiResolutionStreamInfo streamInfo; 634 ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)635 public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) { 636 this.image = image; 637 this.streamInfo = streamInfo; 638 } 639 } 640 641 public static class SimpleMultiResolutionImageReaderListener 642 implements ImageReader.OnImageAvailableListener { SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)643 public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, 644 int maxBuffers, boolean acquireLatest) { 645 mOwner = owner; 646 mMaxBuffers = maxBuffers; 647 mAcquireLatest = acquireLatest; 648 } 649 650 @Override onImageAvailable(ImageReader reader)651 public void onImageAvailable(ImageReader reader) { 652 if (VERBOSE) Log.v(TAG, "new image available"); 653 654 if (mAcquireLatest) { 655 mLastReader = reader; 656 mImageAvailable.open(); 657 } else { 658 if (mQueue.size() < mMaxBuffers) { 659 Image image = reader.acquireNextImage(); 660 MultiResolutionStreamInfo multiResStreamInfo = 661 mOwner.getStreamInfoForImageReader(reader); 662 mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo)); 663 } 664 } 665 } 666 getAnyImageAndInfoAvailable(long timeoutMs)667 public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs) 668 throws Exception { 669 if (mAcquireLatest) { 670 Image image = null; 671 if (mImageAvailable.block(timeoutMs)) { 672 if (mLastReader != null) { 673 image = mLastReader.acquireLatestImage(); 674 if (VERBOSE) Log.v(TAG, "acquireLatestImage"); 675 } else { 676 fail("invalid image reader"); 677 } 678 mImageAvailable.close(); 679 } else { 680 fail("wait for image available time out after " + timeoutMs + "ms"); 681 } 682 return new ImageAndMultiResStreamInfo(image, 683 mOwner.getStreamInfoForImageReader(mLastReader)); 684 } else { 685 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs, 686 java.util.concurrent.TimeUnit.MILLISECONDS); 687 if (imageAndInfo == null) { 688 fail("wait for image available timed out after " + timeoutMs + "ms"); 689 } 690 return imageAndInfo; 691 } 692 } 693 reset()694 public void reset() { 695 while (!mQueue.isEmpty()) { 696 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(); 697 assertNotNull("Acquired image is not valid", imageAndInfo.image); 698 imageAndInfo.image.close(); 699 } 700 mImageAvailable.close(); 701 mLastReader = null; 702 } 703 704 private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue = 705 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>(); 706 private final MultiResolutionImageReader mOwner; 707 private final int mMaxBuffers; 708 private final boolean mAcquireLatest; 709 private ConditionVariable mImageAvailable = new ConditionVariable(); 710 private ImageReader mLastReader = null; 711 } 712 713 public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback { 714 private final LinkedBlockingQueue<TotalCaptureResult> mQueue = 715 new LinkedBlockingQueue<TotalCaptureResult>(); 716 private final LinkedBlockingQueue<CaptureFailure> mFailureQueue = 717 new LinkedBlockingQueue<>(); 718 // (Surface, framenumber) pair for lost buffers 719 private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue = 720 new LinkedBlockingQueue<>(); 721 private final LinkedBlockingQueue<Integer> mAbortQueue = 722 new LinkedBlockingQueue<>(); 723 // Pair<CaptureRequest, Long> is a pair of capture request and timestamp. 724 private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue = 725 new LinkedBlockingQueue<>(); 726 // Pair<Int, Long> is a pair of sequence id and frame number 727 private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue = 728 new LinkedBlockingQueue<>(); 729 730 private AtomicLong mNumFramesArrived = new AtomicLong(0); 731 732 @Override onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)733 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 734 long timestamp, long frameNumber) { 735 try { 736 mCaptureStartQueue.put(new Pair(request, timestamp)); 737 } catch (InterruptedException e) { 738 throw new UnsupportedOperationException( 739 "Can't handle InterruptedException in onCaptureStarted"); 740 } 741 } 742 743 @Override onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)744 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 745 TotalCaptureResult result) { 746 try { 747 mNumFramesArrived.incrementAndGet(); 748 mQueue.put(result); 749 } catch (InterruptedException e) { 750 throw new UnsupportedOperationException( 751 "Can't handle InterruptedException in onCaptureCompleted"); 752 } 753 } 754 755 @Override onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)756 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 757 CaptureFailure failure) { 758 try { 759 mFailureQueue.put(failure); 760 } catch (InterruptedException e) { 761 throw new UnsupportedOperationException( 762 "Can't handle InterruptedException in onCaptureFailed"); 763 } 764 } 765 766 @Override onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)767 public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) { 768 try { 769 mAbortQueue.put(sequenceId); 770 } catch (InterruptedException e) { 771 throw new UnsupportedOperationException( 772 "Can't handle InterruptedException in onCaptureAborted"); 773 } 774 } 775 776 @Override onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)777 public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, 778 long frameNumber) { 779 try { 780 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber)); 781 } catch (InterruptedException e) { 782 throw new UnsupportedOperationException( 783 "Can't handle InterruptedException in onCaptureSequenceCompleted"); 784 } 785 } 786 787 @Override onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)788 public void onCaptureBufferLost(CameraCaptureSession session, 789 CaptureRequest request, Surface target, long frameNumber) { 790 try { 791 mBufferLostQueue.put(new Pair<>(target, frameNumber)); 792 } catch (InterruptedException e) { 793 throw new UnsupportedOperationException( 794 "Can't handle InterruptedException in onCaptureBufferLost"); 795 } 796 } 797 getTotalNumFrames()798 public long getTotalNumFrames() { 799 return mNumFramesArrived.get(); 800 } 801 getCaptureResult(long timeout)802 public CaptureResult getCaptureResult(long timeout) { 803 return getTotalCaptureResult(timeout); 804 } 805 getCaptureResult(long timeout, long timestamp)806 public TotalCaptureResult getCaptureResult(long timeout, long timestamp) { 807 try { 808 long currentTs = -1L; 809 TotalCaptureResult result; 810 while (true) { 811 result = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 812 if (result == null) { 813 throw new RuntimeException( 814 "Wait for a capture result timed out in " + timeout + "ms"); 815 } 816 currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP); 817 if (currentTs == timestamp) { 818 return result; 819 } 820 } 821 822 } catch (InterruptedException e) { 823 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 824 } 825 } 826 getTotalCaptureResult(long timeout)827 public TotalCaptureResult getTotalCaptureResult(long timeout) { 828 try { 829 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 830 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result); 831 return result; 832 } catch (InterruptedException e) { 833 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 834 } 835 } 836 837 /** 838 * Get the {@link #CaptureResult capture result} for a given 839 * {@link #CaptureRequest capture request}. 840 * 841 * @param myRequest The {@link #CaptureRequest capture request} whose 842 * corresponding {@link #CaptureResult capture result} was 843 * being waited for 844 * @param numResultsWait Number of frames to wait for the capture result 845 * before timeout. 846 * @throws TimeoutRuntimeException If more than numResultsWait results are 847 * seen before the result matching myRequest arrives, or each 848 * individual wait for result times out after 849 * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms. 850 */ getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)851 public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest, 852 int numResultsWait) { 853 return getTotalCaptureResultForRequest(myRequest, numResultsWait); 854 } 855 856 /** 857 * Get the {@link #TotalCaptureResult total capture result} for a given 858 * {@link #CaptureRequest capture request}. 859 * 860 * @param myRequest The {@link #CaptureRequest capture request} whose 861 * corresponding {@link #TotalCaptureResult capture result} was 862 * being waited for 863 * @param numResultsWait Number of frames to wait for the capture result 864 * before timeout. 865 * @throws TimeoutRuntimeException If more than numResultsWait results are 866 * seen before the result matching myRequest arrives, or each 867 * individual wait for result times out after 868 * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms. 869 */ getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)870 public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest, 871 int numResultsWait) { 872 ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1); 873 captureRequests.add(myRequest); 874 return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0]; 875 } 876 877 /** 878 * Get an array of {@link #TotalCaptureResult total capture results} for a given list of 879 * {@link #CaptureRequest capture requests}. This can be used when the order of results 880 * may not the same as the order of requests. 881 * 882 * @param captureRequests The list of {@link #CaptureRequest capture requests} whose 883 * corresponding {@link #TotalCaptureResult capture results} are 884 * being waited for. 885 * @param numResultsWait Number of frames to wait for the capture results 886 * before timeout. 887 * @throws TimeoutRuntimeException If more than numResultsWait results are 888 * seen before all the results matching captureRequests arrives. 889 */ getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)890 public TotalCaptureResult[] getTotalCaptureResultsForRequests( 891 List<CaptureRequest> captureRequests, int numResultsWait) { 892 if (numResultsWait < 0) { 893 throw new IllegalArgumentException("numResultsWait must be no less than 0"); 894 } 895 if (captureRequests == null || captureRequests.size() == 0) { 896 throw new IllegalArgumentException("captureRequests must have at least 1 request."); 897 } 898 899 // Create a request -> a list of result indices map that it will wait for. 900 HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>(); 901 for (int i = 0; i < captureRequests.size(); i++) { 902 CaptureRequest request = captureRequests.get(i); 903 ArrayList<Integer> indices = remainingResultIndicesMap.get(request); 904 if (indices == null) { 905 indices = new ArrayList<>(); 906 remainingResultIndicesMap.put(request, indices); 907 } 908 indices.add(i); 909 } 910 911 TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()]; 912 int i = 0; 913 do { 914 TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 915 CaptureRequest request = result.getRequest(); 916 ArrayList<Integer> indices = remainingResultIndicesMap.get(request); 917 if (indices != null) { 918 results[indices.get(0)] = result; 919 indices.remove(0); 920 921 // Remove the entry if all results for this request has been fulfilled. 922 if (indices.isEmpty()) { 923 remainingResultIndicesMap.remove(request); 924 } 925 } 926 927 if (remainingResultIndicesMap.isEmpty()) { 928 return results; 929 } 930 } while (i++ < numResultsWait); 931 932 throw new TimeoutRuntimeException("Unable to get the expected capture result after " 933 + "waiting for " + numResultsWait + " results"); 934 } 935 936 /** 937 * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries 938 * at most. If it times out before maxNumFailures failures are received, return the failures 939 * received so far. 940 * 941 * @param maxNumFailures The maximal number of failures to return. If it times out before 942 * the maximal number of failures are received, return the received 943 * failures so far. 944 * @throws UnsupportedOperationException If an error happens while waiting on the failure. 945 */ getCaptureFailures(long maxNumFailures)946 public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) { 947 ArrayList<CaptureFailure> failures = new ArrayList<>(); 948 try { 949 for (int i = 0; i < maxNumFailures; i++) { 950 CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 951 TimeUnit.MILLISECONDS); 952 if (failure == null) { 953 // If waiting on a failure times out, return the failures so far. 954 break; 955 } 956 failures.add(failure); 957 } 958 } catch (InterruptedException e) { 959 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 960 } 961 962 return failures; 963 } 964 965 /** 966 * Get an array list of lost buffers with maxNumLost entries at most. 967 * If it times out before maxNumLost buffer lost callbacks are received, return the 968 * lost callbacks received so far. 969 * 970 * @param maxNumLost The maximal number of buffer lost failures to return. If it times out 971 * before the maximal number of failures are received, return the received 972 * buffer lost failures so far. 973 * @throws UnsupportedOperationException If an error happens while waiting on the failure. 974 */ getLostBuffers(long maxNumLost)975 public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) { 976 ArrayList<Pair<Surface, Long>> failures = new ArrayList<>(); 977 try { 978 for (int i = 0; i < maxNumLost; i++) { 979 Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 980 TimeUnit.MILLISECONDS); 981 if (failure == null) { 982 // If waiting on a failure times out, return the failures so far. 983 break; 984 } 985 failures.add(failure); 986 } 987 } catch (InterruptedException e) { 988 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 989 } 990 991 return failures; 992 } 993 994 /** 995 * Get an array list of aborted capture sequence ids with maxNumAborts entries 996 * at most. If it times out before maxNumAborts are received, return the aborted sequences 997 * received so far. 998 * 999 * @param maxNumAborts The maximal number of aborted sequences to return. If it times out 1000 * before the maximal number of aborts are received, return the received 1001 * failed sequences so far. 1002 * @throws UnsupportedOperationException If an error happens while waiting on the failed 1003 * sequences. 1004 */ geAbortedSequences(long maxNumAborts)1005 public ArrayList<Integer> geAbortedSequences(long maxNumAborts) { 1006 ArrayList<Integer> abortList = new ArrayList<>(); 1007 try { 1008 for (int i = 0; i < maxNumAborts; i++) { 1009 Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 1010 TimeUnit.MILLISECONDS); 1011 if (abortSequence == null) { 1012 break; 1013 } 1014 abortList.add(abortSequence); 1015 } 1016 } catch (InterruptedException e) { 1017 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 1018 } 1019 1020 return abortList; 1021 } 1022 1023 /** 1024 * Wait until the capture start of a request and expected timestamp arrives or it times 1025 * out after a number of capture starts. 1026 * 1027 * @param request The request for the capture start to wait for. 1028 * @param timestamp The timestamp for the capture start to wait for. 1029 * @param numCaptureStartsWait The number of capture start events to wait for before timing 1030 * out. 1031 */ waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1032 public void waitForCaptureStart(CaptureRequest request, Long timestamp, 1033 int numCaptureStartsWait) throws Exception { 1034 Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp); 1035 1036 int i = 0; 1037 do { 1038 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll( 1039 CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1040 1041 if (shutter == null) { 1042 throw new TimeoutRuntimeException("Unable to get any more capture start " + 1043 "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms."); 1044 } else if (expectedShutter.equals(shutter)) { 1045 return; 1046 } 1047 1048 } while (i++ < numCaptureStartsWait); 1049 1050 throw new TimeoutRuntimeException("Unable to get the expected capture start " + 1051 "event after waiting for " + numCaptureStartsWait + " capture starts"); 1052 } 1053 1054 /** 1055 * Wait until it receives capture sequence completed callback for a given squence ID. 1056 * 1057 * @param sequenceId The sequence ID of the capture sequence completed callback to wait for. 1058 * @param timeoutMs Time to wait for each capture sequence complete callback before 1059 * timing out. 1060 */ getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1061 public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) { 1062 try { 1063 while (true) { 1064 Pair<Integer, Long> completedSequence = 1065 mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1066 assertNotNull("Wait for a capture sequence completed timed out in " + 1067 timeoutMs + "ms", completedSequence); 1068 1069 if (completedSequence.first.equals(sequenceId)) { 1070 return completedSequence.second.longValue(); 1071 } 1072 } 1073 } catch (InterruptedException e) { 1074 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 1075 } 1076 } 1077 hasMoreResults()1078 public boolean hasMoreResults() 1079 { 1080 return !mQueue.isEmpty(); 1081 } 1082 hasMoreFailures()1083 public boolean hasMoreFailures() 1084 { 1085 return !mFailureQueue.isEmpty(); 1086 } 1087 getNumLostBuffers()1088 public int getNumLostBuffers() 1089 { 1090 return mBufferLostQueue.size(); 1091 } 1092 hasMoreAbortedSequences()1093 public boolean hasMoreAbortedSequences() 1094 { 1095 return !mAbortQueue.isEmpty(); 1096 } 1097 drain()1098 public void drain() { 1099 mQueue.clear(); 1100 mNumFramesArrived.getAndSet(0); 1101 mFailureQueue.clear(); 1102 mBufferLostQueue.clear(); 1103 mCaptureStartQueue.clear(); 1104 mAbortQueue.clear(); 1105 } 1106 } 1107 hasCapability(CameraCharacteristics characteristics, int capability)1108 public static boolean hasCapability(CameraCharacteristics characteristics, int capability) { 1109 int [] capabilities = 1110 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1111 for (int c : capabilities) { 1112 if (c == capability) { 1113 return true; 1114 } 1115 } 1116 return false; 1117 } 1118 isSystemCamera(CameraManager manager, String cameraId)1119 public static boolean isSystemCamera(CameraManager manager, String cameraId) 1120 throws CameraAccessException { 1121 CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); 1122 return hasCapability(characteristics, 1123 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA); 1124 } 1125 getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1126 public static String[] getCameraIdListForTesting(CameraManager manager, 1127 boolean getSystemCameras) 1128 throws CameraAccessException { 1129 String [] ids = manager.getCameraIdListNoLazy(); 1130 List<String> idsForTesting = new ArrayList<String>(); 1131 for (String id : ids) { 1132 boolean isSystemCamera = isSystemCamera(manager, id); 1133 if (getSystemCameras == isSystemCamera) { 1134 idsForTesting.add(id); 1135 } 1136 } 1137 return idsForTesting.toArray(new String[idsForTesting.size()]); 1138 } 1139 getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1140 public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager, 1141 boolean getSystemCameras) 1142 throws CameraAccessException { 1143 Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras))); 1144 Set<Set<String>> combinations = manager.getConcurrentCameraIds(); 1145 Set<Set<String>> correctComb = new HashSet<Set<String>>(); 1146 for (Set<String> comb : combinations) { 1147 Set<String> filteredIds = new HashSet<String>(); 1148 for (String id : comb) { 1149 if (cameraIds.contains(id)) { 1150 filteredIds.add(id); 1151 } 1152 } 1153 if (filteredIds.isEmpty()) { 1154 continue; 1155 } 1156 correctComb.add(filteredIds); 1157 } 1158 return correctComb; 1159 } 1160 1161 /** 1162 * Block until the camera is opened. 1163 * 1164 * <p>Don't use this to test #onDisconnected/#onError since this will throw 1165 * an AssertionError if it fails to open the camera device.</p> 1166 * 1167 * @return CameraDevice opened camera device 1168 * 1169 * @throws IllegalArgumentException 1170 * If the handler is null, or if the handler's looper is current. 1171 * @throws CameraAccessException 1172 * If open fails immediately. 1173 * @throws BlockingOpenException 1174 * If open fails after blocking for some amount of time. 1175 * @throws TimeoutRuntimeException 1176 * If opening times out. Typically unrecoverable. 1177 */ openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1178 public static CameraDevice openCamera(CameraManager manager, String cameraId, 1179 CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException, 1180 BlockingOpenException { 1181 1182 /** 1183 * Although camera2 API allows 'null' Handler (it will just use the current 1184 * thread's Looper), this is not what we want for CTS. 1185 * 1186 * In CTS the default looper is used only to process events in between test runs, 1187 * so anything sent there would not be executed inside a test and the test would fail. 1188 * 1189 * In this case, BlockingCameraManager#openCamera performs the check for us. 1190 */ 1191 return (new BlockingCameraManager(manager)).openCamera(cameraId, listener, handler); 1192 } 1193 1194 1195 /** 1196 * Block until the camera is opened. 1197 * 1198 * <p>Don't use this to test #onDisconnected/#onError since this will throw 1199 * an AssertionError if it fails to open the camera device.</p> 1200 * 1201 * @throws IllegalArgumentException 1202 * If the handler is null, or if the handler's looper is current. 1203 * @throws CameraAccessException 1204 * If open fails immediately. 1205 * @throws BlockingOpenException 1206 * If open fails after blocking for some amount of time. 1207 * @throws TimeoutRuntimeException 1208 * If opening times out. Typically unrecoverable. 1209 */ openCamera(CameraManager manager, String cameraId, Handler handler)1210 public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler) 1211 throws CameraAccessException, 1212 BlockingOpenException { 1213 return openCamera(manager, cameraId, /*listener*/null, handler); 1214 } 1215 1216 /** 1217 * Configure a new camera session with output surfaces and type. 1218 * 1219 * @param camera The CameraDevice to be configured. 1220 * @param outputSurfaces The surface list that used for camera output. 1221 * @param listener The callback CameraDevice will notify when capture results are available. 1222 */ configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1223 public static CameraCaptureSession configureCameraSession(CameraDevice camera, 1224 List<Surface> outputSurfaces, boolean isHighSpeed, 1225 CameraCaptureSession.StateCallback listener, Handler handler) 1226 throws CameraAccessException { 1227 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1228 if (isHighSpeed) { 1229 camera.createConstrainedHighSpeedCaptureSession(outputSurfaces, 1230 sessionListener, handler); 1231 } else { 1232 camera.createCaptureSession(outputSurfaces, sessionListener, handler); 1233 } 1234 CameraCaptureSession session = 1235 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1236 assertFalse("Camera session should not be a reprocessable session", 1237 session.isReprocessable()); 1238 String sessionType = isHighSpeed ? "High Speed" : "Normal"; 1239 assertTrue("Capture session type must be " + sessionType, 1240 isHighSpeed == 1241 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass())); 1242 1243 return session; 1244 } 1245 1246 /** 1247 * Build a new constrained camera session with output surfaces, type and recording session 1248 * parameters. 1249 * 1250 * @param camera The CameraDevice to be configured. 1251 * @param outputSurfaces The surface list that used for camera output. 1252 * @param listener The callback CameraDevice will notify when capture results are available. 1253 * @param initialRequest Initial request settings to use as session parameters. 1254 */ buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1255 public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera, 1256 List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, 1257 Handler handler, CaptureRequest initialRequest) throws CameraAccessException { 1258 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1259 1260 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 1261 for (Surface surface : outputSurfaces) { 1262 outConfigurations.add(new OutputConfiguration(surface)); 1263 } 1264 SessionConfiguration sessionConfig = new SessionConfiguration( 1265 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations, 1266 new HandlerExecutor(handler), sessionListener); 1267 sessionConfig.setSessionParameters(initialRequest); 1268 camera.createCaptureSession(sessionConfig); 1269 1270 CameraCaptureSession session = 1271 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1272 assertFalse("Camera session should not be a reprocessable session", 1273 session.isReprocessable()); 1274 assertTrue("Capture session type must be High Speed", 1275 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom( 1276 session.getClass())); 1277 1278 return session; 1279 } 1280 1281 /** 1282 * Configure a new camera session with output configurations. 1283 * 1284 * @param camera The CameraDevice to be configured. 1285 * @param outputs The OutputConfiguration list that is used for camera output. 1286 * @param listener The callback CameraDevice will notify when capture results are available. 1287 */ configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1288 public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera, 1289 List<OutputConfiguration> outputs, 1290 CameraCaptureSession.StateCallback listener, Handler handler) 1291 throws CameraAccessException { 1292 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1293 camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler); 1294 CameraCaptureSession session = 1295 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1296 assertFalse("Camera session should not be a reprocessable session", 1297 session.isReprocessable()); 1298 return session; 1299 } 1300 1301 /** 1302 * Try configure a new camera session with output configurations. 1303 * 1304 * @param camera The CameraDevice to be configured. 1305 * @param outputs The OutputConfiguration list that is used for camera output. 1306 * @param initialRequest The session parameters passed in during stream configuration 1307 * @param listener The callback CameraDevice will notify when capture results are available. 1308 */ tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1309 public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera, 1310 List<OutputConfiguration> outputs, CaptureRequest initialRequest, 1311 CameraCaptureSession.StateCallback listener, Handler handler) 1312 throws CameraAccessException { 1313 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1314 SessionConfiguration sessionConfig = new SessionConfiguration( 1315 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler), 1316 sessionListener); 1317 sessionConfig.setSessionParameters(initialRequest); 1318 camera.createCaptureSession(sessionConfig); 1319 1320 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1321 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1322 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1323 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1324 1325 CameraCaptureSession session = null; 1326 if (state == BlockingSessionCallback.SESSION_READY) { 1327 session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1328 assertFalse("Camera session should not be a reprocessable session", 1329 session.isReprocessable()); 1330 } 1331 return session; 1332 } 1333 1334 /** 1335 * Configure a new camera session with output surfaces and initial session parameters. 1336 * 1337 * @param camera The CameraDevice to be configured. 1338 * @param outputSurfaces The surface list that used for camera output. 1339 * @param listener The callback CameraDevice will notify when session is available. 1340 * @param handler The handler used to notify callbacks. 1341 * @param initialRequest Initial request settings to use as session parameters. 1342 */ configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1343 public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera, 1344 List<Surface> outputSurfaces, BlockingSessionCallback listener, 1345 Handler handler, CaptureRequest initialRequest) throws CameraAccessException { 1346 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 1347 for (Surface surface : outputSurfaces) { 1348 outConfigurations.add(new OutputConfiguration(surface)); 1349 } 1350 SessionConfiguration sessionConfig = new SessionConfiguration( 1351 SessionConfiguration.SESSION_REGULAR, outConfigurations, 1352 new HandlerExecutor(handler), listener); 1353 sessionConfig.setSessionParameters(initialRequest); 1354 camera.createCaptureSession(sessionConfig); 1355 1356 CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1357 assertFalse("Camera session should not be a reprocessable session", 1358 session.isReprocessable()); 1359 assertFalse("Capture session type must be regular", 1360 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom( 1361 session.getClass())); 1362 1363 return session; 1364 } 1365 1366 /** 1367 * Configure a new camera session with output surfaces. 1368 * 1369 * @param camera The CameraDevice to be configured. 1370 * @param outputSurfaces The surface list that used for camera output. 1371 * @param listener The callback CameraDevice will notify when capture results are available. 1372 */ configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1373 public static CameraCaptureSession configureCameraSession(CameraDevice camera, 1374 List<Surface> outputSurfaces, 1375 CameraCaptureSession.StateCallback listener, Handler handler) 1376 throws CameraAccessException { 1377 1378 return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false, 1379 listener, handler); 1380 } 1381 configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1382 public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera, 1383 InputConfiguration inputConfiguration, List<Surface> outputSurfaces, 1384 CameraCaptureSession.StateCallback listener, Handler handler) 1385 throws CameraAccessException { 1386 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(); 1387 for (Surface surface : outputSurfaces) { 1388 outputConfigs.add(new OutputConfiguration(surface)); 1389 } 1390 CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations( 1391 camera, inputConfiguration, outputConfigs, listener, handler); 1392 1393 return session; 1394 } 1395 configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1396 public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations( 1397 CameraDevice camera, InputConfiguration inputConfiguration, 1398 List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, 1399 Handler handler) throws CameraAccessException { 1400 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1401 SessionConfiguration sessionConfig = new SessionConfiguration( 1402 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler), 1403 sessionListener); 1404 sessionConfig.setInputConfiguration(inputConfiguration); 1405 camera.createCaptureSession(sessionConfig); 1406 1407 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1408 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1409 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1410 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1411 1412 assertTrue("Creating a reprocessable session failed.", 1413 state == BlockingSessionCallback.SESSION_READY); 1414 CameraCaptureSession session = 1415 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1416 assertTrue("Camera session should be a reprocessable session", session.isReprocessable()); 1417 1418 return session; 1419 } 1420 1421 /** 1422 * Create a reprocessable camera session with input and output configurations. 1423 * 1424 * @param camera The CameraDevice to be configured. 1425 * @param inputConfiguration The input configuration used to create this session. 1426 * @param outputs The output configurations used to create this session. 1427 * @param listener The callback CameraDevice will notify when capture results are available. 1428 * @param handler The handler used to notify callbacks. 1429 * @return The session ready to use. 1430 * @throws CameraAccessException 1431 */ configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1432 public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera, 1433 InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, 1434 CameraCaptureSession.StateCallback listener, Handler handler) 1435 throws CameraAccessException { 1436 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1437 camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs, 1438 sessionListener, handler); 1439 1440 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1441 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1442 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1443 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1444 1445 assertTrue("Creating a reprocessable session failed.", 1446 state == BlockingSessionCallback.SESSION_READY); 1447 1448 CameraCaptureSession session = 1449 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1450 assertTrue("Camera session should be a reprocessable session", session.isReprocessable()); 1451 1452 return session; 1453 } 1454 assertArrayNotEmpty(T arr, String message)1455 public static <T> void assertArrayNotEmpty(T arr, String message) { 1456 assertTrue(message, arr != null && Array.getLength(arr) > 0); 1457 } 1458 1459 /** 1460 * Check if the format is a legal YUV format camera supported. 1461 */ checkYuvFormat(int format)1462 public static void checkYuvFormat(int format) { 1463 if ((format != ImageFormat.YUV_420_888) && 1464 (format != ImageFormat.NV21) && 1465 (format != ImageFormat.YV12)) { 1466 fail("Wrong formats: " + format); 1467 } 1468 } 1469 1470 /** 1471 * Check if image size and format match given size and format. 1472 */ checkImage(Image image, int width, int height, int format)1473 public static void checkImage(Image image, int width, int height, int format) { 1474 // Image reader will wrap YV12/NV21 image by YUV_420_888 1475 if (format == ImageFormat.NV21 || format == ImageFormat.YV12) { 1476 format = ImageFormat.YUV_420_888; 1477 } 1478 assertNotNull("Input image is invalid", image); 1479 assertEquals("Format doesn't match", format, image.getFormat()); 1480 assertEquals("Width doesn't match", width, image.getWidth()); 1481 assertEquals("Height doesn't match", height, image.getHeight()); 1482 } 1483 1484 /** 1485 * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked 1486 * 1-D linear byte array, such that it can be write into disk, or accessed by 1487 * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input 1488 * Image format.</p> 1489 * 1490 * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains 1491 * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any 1492 * (xstride = width, ystride = height for chroma and luma components).</p> 1493 * 1494 * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p> 1495 * 1496 * <p>For YUV P010, it returns a byte array that contains Y plane first, followed 1497 * by the interleaved U(Cb)/V(Cr) plane.</p> 1498 */ getDataFromImage(Image image)1499 public static byte[] getDataFromImage(Image image) { 1500 assertNotNull("Invalid image:", image); 1501 int format = image.getFormat(); 1502 int width = image.getWidth(); 1503 int height = image.getHeight(); 1504 int rowStride, pixelStride; 1505 byte[] data = null; 1506 1507 // Read image data 1508 Plane[] planes = image.getPlanes(); 1509 assertTrue("Fail to get image planes", planes != null && planes.length > 0); 1510 1511 // Check image validity 1512 checkAndroidImageFormat(image); 1513 1514 ByteBuffer buffer = null; 1515 // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. 1516 // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC 1517 if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD || 1518 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG || 1519 format == ImageFormat.HEIC) { 1520 buffer = planes[0].getBuffer(); 1521 assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer); 1522 data = new byte[buffer.remaining()]; 1523 buffer.get(data); 1524 buffer.rewind(); 1525 return data; 1526 } else if (format == ImageFormat.YCBCR_P010) { 1527 // P010 samples are stored within 16 bit values 1528 int offset = 0; 1529 int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8; 1530 data = new byte[width * height * bytesPerPixelRounded]; 1531 assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length, 1532 planes.length == 3); 1533 for (int i = 0; i < 2; i++) { 1534 buffer = planes[i].getBuffer(); 1535 assertNotNull("Fail to get bytebuffer from plane", buffer); 1536 buffer.rewind(); 1537 rowStride = planes[i].getRowStride(); 1538 if (VERBOSE) { 1539 Log.v(TAG, "rowStride " + rowStride); 1540 Log.v(TAG, "width " + width); 1541 Log.v(TAG, "height " + height); 1542 } 1543 int h = (i == 0) ? height : height / 2; 1544 for (int row = 0; row < h; row++) { 1545 int length = rowStride; 1546 buffer.get(data, offset, length); 1547 offset += length; 1548 } 1549 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i); 1550 buffer.rewind(); 1551 } 1552 return data; 1553 } 1554 1555 int offset = 0; 1556 data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; 1557 int maxRowSize = planes[0].getRowStride(); 1558 for (int i = 0; i < planes.length; i++) { 1559 if (maxRowSize < planes[i].getRowStride()) { 1560 maxRowSize = planes[i].getRowStride(); 1561 } 1562 } 1563 byte[] rowData = new byte[maxRowSize]; 1564 if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes"); 1565 for (int i = 0; i < planes.length; i++) { 1566 buffer = planes[i].getBuffer(); 1567 assertNotNull("Fail to get bytebuffer from plane", buffer); 1568 buffer.rewind(); 1569 rowStride = planes[i].getRowStride(); 1570 pixelStride = planes[i].getPixelStride(); 1571 assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0); 1572 if (VERBOSE) { 1573 Log.v(TAG, "pixelStride " + pixelStride); 1574 Log.v(TAG, "rowStride " + rowStride); 1575 Log.v(TAG, "width " + width); 1576 Log.v(TAG, "height " + height); 1577 } 1578 // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. 1579 int w = (i == 0) ? width : width / 2; 1580 int h = (i == 0) ? height : height / 2; 1581 assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w); 1582 for (int row = 0; row < h; row++) { 1583 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; 1584 int length; 1585 if (pixelStride == bytesPerPixel) { 1586 // Special case: optimized read of the entire row 1587 length = w * bytesPerPixel; 1588 buffer.get(data, offset, length); 1589 offset += length; 1590 } else { 1591 // Generic case: should work for any pixelStride but slower. 1592 // Use intermediate buffer to avoid read byte-by-byte from 1593 // DirectByteBuffer, which is very bad for performance 1594 length = (w - 1) * pixelStride + bytesPerPixel; 1595 buffer.get(rowData, 0, length); 1596 for (int col = 0; col < w; col++) { 1597 data[offset++] = rowData[col * pixelStride]; 1598 } 1599 } 1600 // Advance buffer the remainder of the row stride 1601 if (row < h - 1) { 1602 buffer.position(buffer.position() + rowStride - length); 1603 } 1604 } 1605 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i); 1606 buffer.rewind(); 1607 } 1608 return data; 1609 } 1610 1611 /** 1612 * <p>Check android image format validity for an image, only support below formats:</p> 1613 * 1614 * <p>YUV_420_888/NV21/YV12, can add more for future</p> 1615 */ checkAndroidImageFormat(Image image)1616 public static void checkAndroidImageFormat(Image image) { 1617 int format = image.getFormat(); 1618 Plane[] planes = image.getPlanes(); 1619 switch (format) { 1620 case ImageFormat.YUV_420_888: 1621 case ImageFormat.NV21: 1622 case ImageFormat.YV12: 1623 case ImageFormat.YCBCR_P010: 1624 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length); 1625 break; 1626 case ImageFormat.JPEG: 1627 case ImageFormat.RAW_SENSOR: 1628 case ImageFormat.RAW_PRIVATE: 1629 case ImageFormat.DEPTH16: 1630 case ImageFormat.DEPTH_POINT_CLOUD: 1631 case ImageFormat.DEPTH_JPEG: 1632 case ImageFormat.Y8: 1633 case ImageFormat.HEIC: 1634 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length); 1635 break; 1636 default: 1637 fail("Unsupported Image Format: " + format); 1638 } 1639 } 1640 dumpFile(String fileName, Bitmap data)1641 public static void dumpFile(String fileName, Bitmap data) { 1642 FileOutputStream outStream; 1643 try { 1644 Log.v(TAG, "output will be saved as " + fileName); 1645 outStream = new FileOutputStream(fileName); 1646 } catch (IOException ioe) { 1647 throw new RuntimeException("Unable to create debug output file " + fileName, ioe); 1648 } 1649 1650 try { 1651 data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream); 1652 outStream.close(); 1653 } catch (IOException ioe) { 1654 throw new RuntimeException("failed writing data to file " + fileName, ioe); 1655 } 1656 } 1657 dumpFile(String fileName, byte[] data)1658 public static void dumpFile(String fileName, byte[] data) { 1659 FileOutputStream outStream; 1660 try { 1661 Log.v(TAG, "output will be saved as " + fileName); 1662 outStream = new FileOutputStream(fileName); 1663 } catch (IOException ioe) { 1664 throw new RuntimeException("Unable to create debug output file " + fileName, ioe); 1665 } 1666 1667 try { 1668 outStream.write(data); 1669 outStream.close(); 1670 } catch (IOException ioe) { 1671 throw new RuntimeException("failed writing data to file " + fileName, ioe); 1672 } 1673 } 1674 1675 /** 1676 * Get the available output sizes for the user-defined {@code format}. 1677 * 1678 * <p>Note that implementation-defined/hidden formats are not supported.</p> 1679 */ getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)1680 public static Size[] getSupportedSizeForFormat(int format, String cameraId, 1681 CameraManager cameraManager) throws CameraAccessException { 1682 CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId); 1683 assertNotNull("Can't get camera characteristics!", properties); 1684 if (VERBOSE) { 1685 Log.v(TAG, "get camera characteristics for camera: " + cameraId); 1686 } 1687 StreamConfigurationMap configMap = 1688 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 1689 Size[] availableSizes = configMap.getOutputSizes(format); 1690 assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " 1691 + format); 1692 Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format); 1693 if (highResAvailableSizes != null && highResAvailableSizes.length > 0) { 1694 Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length]; 1695 System.arraycopy(availableSizes, 0, allSizes, 0, 1696 availableSizes.length); 1697 System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, 1698 highResAvailableSizes.length); 1699 availableSizes = allSizes; 1700 } 1701 if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes)); 1702 return availableSizes; 1703 } 1704 1705 /** 1706 * Get the available output sizes for the given class. 1707 * 1708 */ getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)1709 public static Size[] getSupportedSizeForClass(Class klass, String cameraId, 1710 CameraManager cameraManager) throws CameraAccessException { 1711 CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId); 1712 assertNotNull("Can't get camera characteristics!", properties); 1713 if (VERBOSE) { 1714 Log.v(TAG, "get camera characteristics for camera: " + cameraId); 1715 } 1716 StreamConfigurationMap configMap = 1717 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 1718 Size[] availableSizes = configMap.getOutputSizes(klass); 1719 assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: " 1720 + klass); 1721 Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE); 1722 if (highResAvailableSizes != null && highResAvailableSizes.length > 0) { 1723 Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length]; 1724 System.arraycopy(availableSizes, 0, allSizes, 0, 1725 availableSizes.length); 1726 System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, 1727 highResAvailableSizes.length); 1728 availableSizes = allSizes; 1729 } 1730 if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes)); 1731 return availableSizes; 1732 } 1733 1734 /** 1735 * Size comparator that compares the number of pixels it covers. 1736 * 1737 * <p>If two the areas of two sizes are same, compare the widths.</p> 1738 */ 1739 public static class SizeComparator implements Comparator<Size> { 1740 @Override compare(Size lhs, Size rhs)1741 public int compare(Size lhs, Size rhs) { 1742 return CameraUtils 1743 .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight()); 1744 } 1745 } 1746 1747 /** 1748 * Get sorted size list in descending order. Remove the sizes larger than 1749 * the bound. If the bound is null, don't do the size bound filtering. 1750 */ getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)1751 static public List<Size> getSupportedPreviewSizes(String cameraId, 1752 CameraManager cameraManager, Size bound) throws CameraAccessException { 1753 1754 Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId, 1755 cameraManager); 1756 assertArrayNotEmpty(rawSizes, 1757 "Available sizes for SurfaceHolder class should not be empty"); 1758 if (VERBOSE) { 1759 Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes)); 1760 } 1761 1762 if (bound == null) { 1763 return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false); 1764 } 1765 1766 List<Size> sizes = new ArrayList<Size>(); 1767 for (Size sz: rawSizes) { 1768 if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) { 1769 sizes.add(sz); 1770 } 1771 } 1772 return getAscendingOrderSizes(sizes, /*ascending*/false); 1773 } 1774 1775 /** 1776 * Get a sorted list of sizes from a given size list. 1777 * 1778 * <p> 1779 * The size is compare by area it covers, if the areas are same, then 1780 * compare the widths. 1781 * </p> 1782 * 1783 * @param sizeList The input size list to be sorted 1784 * @param ascending True if the order is ascending, otherwise descending order 1785 * @return The ordered list of sizes 1786 */ getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)1787 static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) { 1788 if (sizeList == null) { 1789 throw new IllegalArgumentException("sizeList shouldn't be null"); 1790 } 1791 1792 Comparator<Size> comparator = new SizeComparator(); 1793 List<Size> sortedSizes = new ArrayList<Size>(); 1794 sortedSizes.addAll(sizeList); 1795 Collections.sort(sortedSizes, comparator); 1796 if (!ascending) { 1797 Collections.reverse(sortedSizes); 1798 } 1799 1800 return sortedSizes; 1801 } 1802 1803 /** 1804 * Get sorted (descending order) size list for given format. Remove the sizes larger than 1805 * the bound. If the bound is null, don't do the size bound filtering. 1806 */ getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)1807 static public List<Size> getSortedSizesForFormat(String cameraId, 1808 CameraManager cameraManager, int format, Size bound) throws CameraAccessException { 1809 Comparator<Size> comparator = new SizeComparator(); 1810 Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager); 1811 List<Size> sortedSizes = null; 1812 if (bound != null) { 1813 sortedSizes = new ArrayList<Size>(/*capacity*/1); 1814 for (Size sz : sizes) { 1815 if (comparator.compare(sz, bound) <= 0) { 1816 sortedSizes.add(sz); 1817 } 1818 } 1819 } else { 1820 sortedSizes = Arrays.asList(sizes); 1821 } 1822 assertTrue("Supported size list should have at least one element", 1823 sortedSizes.size() > 0); 1824 1825 Collections.sort(sortedSizes, comparator); 1826 // Make it in descending order. 1827 Collections.reverse(sortedSizes); 1828 return sortedSizes; 1829 } 1830 1831 /** 1832 * Get supported video size list for a given camera device. 1833 * 1834 * <p> 1835 * Filter out the sizes that are larger than the bound. If the bound is 1836 * null, don't do the size bound filtering. 1837 * </p> 1838 */ getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)1839 static public List<Size> getSupportedVideoSizes(String cameraId, 1840 CameraManager cameraManager, Size bound) throws CameraAccessException { 1841 1842 Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class, 1843 cameraId, cameraManager); 1844 assertArrayNotEmpty(rawSizes, 1845 "Available sizes for MediaRecorder class should not be empty"); 1846 if (VERBOSE) { 1847 Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes)); 1848 } 1849 1850 if (bound == null) { 1851 return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false); 1852 } 1853 1854 List<Size> sizes = new ArrayList<Size>(); 1855 for (Size sz: rawSizes) { 1856 if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) { 1857 sizes.add(sz); 1858 } 1859 } 1860 return getAscendingOrderSizes(sizes, /*ascending*/false); 1861 } 1862 1863 /** 1864 * Get supported video size list (descending order) for a given camera device. 1865 * 1866 * <p> 1867 * Filter out the sizes that are larger than the bound. If the bound is 1868 * null, don't do the size bound filtering. 1869 * </p> 1870 */ getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)1871 static public List<Size> getSupportedStillSizes(String cameraId, 1872 CameraManager cameraManager, Size bound) throws CameraAccessException { 1873 return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound); 1874 } 1875 getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)1876 static public List<Size> getSupportedHeicSizes(String cameraId, 1877 CameraManager cameraManager, Size bound) throws CameraAccessException { 1878 return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound); 1879 } 1880 getMinPreviewSize(String cameraId, CameraManager cameraManager)1881 static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager) 1882 throws CameraAccessException { 1883 List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null); 1884 return sizes.get(sizes.size() - 1); 1885 } 1886 1887 /** 1888 * Get max supported preview size for a camera device. 1889 */ getMaxPreviewSize(String cameraId, CameraManager cameraManager)1890 static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager) 1891 throws CameraAccessException { 1892 return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null); 1893 } 1894 1895 /** 1896 * Get max preview size for a camera device in the supported sizes that are no larger 1897 * than the bound. 1898 */ getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)1899 static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound) 1900 throws CameraAccessException { 1901 List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound); 1902 return sizes.get(0); 1903 } 1904 1905 /** 1906 * Get max depth size for a camera device. 1907 */ getMaxDepthSize(String cameraId, CameraManager cameraManager)1908 static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager) 1909 throws CameraAccessException { 1910 List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16, 1911 /*bound*/ null); 1912 return sizes.get(0); 1913 } 1914 1915 /** 1916 * Get the largest size by area. 1917 * 1918 * @param sizes an array of sizes, must have at least 1 element 1919 * 1920 * @return Largest Size 1921 * 1922 * @throws IllegalArgumentException if sizes was null or had 0 elements 1923 */ getMaxSize(Size... sizes)1924 public static Size getMaxSize(Size... sizes) { 1925 if (sizes == null || sizes.length == 0) { 1926 throw new IllegalArgumentException("sizes was empty"); 1927 } 1928 1929 Size sz = sizes[0]; 1930 for (Size size : sizes) { 1931 if (size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) { 1932 sz = size; 1933 } 1934 } 1935 1936 return sz; 1937 } 1938 1939 /** 1940 * Get the largest size by area within (less than) bound 1941 * 1942 * @param sizes an array of sizes, must have at least 1 element 1943 * 1944 * @return Largest Size. Null if no such size exists within bound. 1945 * 1946 * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid. 1947 */ getMaxSizeWithBound(Size[] sizes, int bound)1948 public static Size getMaxSizeWithBound(Size[] sizes, int bound) { 1949 if (sizes == null || sizes.length == 0) { 1950 throw new IllegalArgumentException("sizes was empty"); 1951 } 1952 if (bound <= 0) { 1953 throw new IllegalArgumentException("bound is invalid"); 1954 } 1955 1956 Size sz = null; 1957 for (Size size : sizes) { 1958 if (size.getWidth() * size.getHeight() >= bound) { 1959 continue; 1960 } 1961 1962 if (sz == null || 1963 size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) { 1964 sz = size; 1965 } 1966 } 1967 1968 return sz; 1969 } 1970 1971 /** 1972 * Returns true if the given {@code array} contains the given element. 1973 * 1974 * @param array {@code array} to check for {@code elem} 1975 * @param elem {@code elem} to test for 1976 * @return {@code true} if the given element is contained 1977 */ contains(int[] array, int elem)1978 public static boolean contains(int[] array, int elem) { 1979 if (array == null) return false; 1980 for (int i = 0; i < array.length; i++) { 1981 if (elem == array[i]) return true; 1982 } 1983 return false; 1984 } 1985 1986 /** 1987 * Get object array from byte array. 1988 * 1989 * @param array Input byte array to be converted 1990 * @return Byte object array converted from input byte array 1991 */ toObject(byte[] array)1992 public static Byte[] toObject(byte[] array) { 1993 return convertPrimitiveArrayToObjectArray(array, Byte.class); 1994 } 1995 1996 /** 1997 * Get object array from int array. 1998 * 1999 * @param array Input int array to be converted 2000 * @return Integer object array converted from input int array 2001 */ toObject(int[] array)2002 public static Integer[] toObject(int[] array) { 2003 return convertPrimitiveArrayToObjectArray(array, Integer.class); 2004 } 2005 2006 /** 2007 * Get object array from float array. 2008 * 2009 * @param array Input float array to be converted 2010 * @return Float object array converted from input float array 2011 */ toObject(float[] array)2012 public static Float[] toObject(float[] array) { 2013 return convertPrimitiveArrayToObjectArray(array, Float.class); 2014 } 2015 2016 /** 2017 * Get object array from double array. 2018 * 2019 * @param array Input double array to be converted 2020 * @return Double object array converted from input double array 2021 */ toObject(double[] array)2022 public static Double[] toObject(double[] array) { 2023 return convertPrimitiveArrayToObjectArray(array, Double.class); 2024 } 2025 2026 /** 2027 * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]). 2028 * 2029 * @param array Input array object 2030 * @param wrapperClass The boxed class it converts to 2031 * @return Boxed version of primitive array 2032 */ convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2033 private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array, 2034 final Class<T> wrapperClass) { 2035 // getLength does the null check and isArray check already. 2036 int arrayLength = Array.getLength(array); 2037 if (arrayLength == 0) { 2038 throw new IllegalArgumentException("Input array shouldn't be empty"); 2039 } 2040 2041 @SuppressWarnings("unchecked") 2042 final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength); 2043 for (int i = 0; i < arrayLength; i++) { 2044 Array.set(result, i, Array.get(array, i)); 2045 } 2046 return result; 2047 } 2048 2049 /** 2050 * Validate image based on format and size. 2051 * 2052 * @param image The image to be validated. 2053 * @param width The image width. 2054 * @param height The image height. 2055 * @param format The image format. 2056 * @param filePath The debug dump file path, null if don't want to dump to 2057 * file. 2058 * @throws UnsupportedOperationException if calling with an unknown format 2059 */ validateImage(Image image, int width, int height, int format, String filePath)2060 public static void validateImage(Image image, int width, int height, int format, 2061 String filePath) { 2062 checkImage(image, width, height, format); 2063 2064 /** 2065 * TODO: validate timestamp: 2066 * 1. capture result timestamp against the image timestamp (need 2067 * consider frame drops) 2068 * 2. timestamps should be monotonically increasing for different requests 2069 */ 2070 if(VERBOSE) Log.v(TAG, "validating Image"); 2071 byte[] data = getDataFromImage(image); 2072 assertTrue("Invalid image data", data != null && data.length > 0); 2073 2074 switch (format) { 2075 // Clients must be able to process and handle depth jpeg images like any other 2076 // regular jpeg. 2077 case ImageFormat.DEPTH_JPEG: 2078 case ImageFormat.JPEG: 2079 validateJpegData(data, width, height, filePath); 2080 break; 2081 case ImageFormat.YCBCR_P010: 2082 validateP010Data(data, width, height, format, image.getTimestamp(), filePath); 2083 break; 2084 case ImageFormat.YUV_420_888: 2085 case ImageFormat.YV12: 2086 validateYuvData(data, width, height, format, image.getTimestamp(), filePath); 2087 break; 2088 case ImageFormat.RAW_SENSOR: 2089 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath); 2090 break; 2091 case ImageFormat.DEPTH16: 2092 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath); 2093 break; 2094 case ImageFormat.DEPTH_POINT_CLOUD: 2095 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath); 2096 break; 2097 case ImageFormat.RAW_PRIVATE: 2098 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath); 2099 break; 2100 case ImageFormat.Y8: 2101 validateY8Data(data, width, height, format, image.getTimestamp(), filePath); 2102 break; 2103 case ImageFormat.HEIC: 2104 validateHeicData(data, width, height, filePath); 2105 break; 2106 default: 2107 throw new UnsupportedOperationException("Unsupported format for validation: " 2108 + format); 2109 } 2110 } 2111 2112 public static class HandlerExecutor implements Executor { 2113 private final Handler mHandler; 2114 HandlerExecutor(Handler handler)2115 public HandlerExecutor(Handler handler) { 2116 assertNotNull("handler must be valid", handler); 2117 mHandler = handler; 2118 } 2119 2120 @Override execute(Runnable runCmd)2121 public void execute(Runnable runCmd) { 2122 mHandler.post(runCmd); 2123 } 2124 } 2125 2126 /** 2127 * Provide a mock for {@link CameraDevice.StateCallback}. 2128 * 2129 * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an 2130 * abstract class.</p> 2131 * 2132 * <p> 2133 * Use this instead of other classes when needing to verify interactions, since 2134 * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra 2135 * interactions which will cause false test failures. 2136 * </p> 2137 * 2138 */ 2139 public static class MockStateCallback extends CameraDevice.StateCallback { 2140 2141 @Override onOpened(CameraDevice camera)2142 public void onOpened(CameraDevice camera) { 2143 } 2144 2145 @Override onDisconnected(CameraDevice camera)2146 public void onDisconnected(CameraDevice camera) { 2147 } 2148 2149 @Override onError(CameraDevice camera, int error)2150 public void onError(CameraDevice camera, int error) { 2151 } 2152 MockStateCallback()2153 private MockStateCallback() {} 2154 2155 /** 2156 * Create a Mockito-ready mocked StateCallback. 2157 */ mock()2158 public static MockStateCallback mock() { 2159 return Mockito.spy(new MockStateCallback()); 2160 } 2161 } 2162 validateJpegData(byte[] jpegData, int width, int height, String filePath)2163 public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) { 2164 BitmapFactory.Options bmpOptions = new BitmapFactory.Options(); 2165 // DecodeBound mode: only parse the frame header to get width/height. 2166 // it doesn't decode the pixel. 2167 bmpOptions.inJustDecodeBounds = true; 2168 BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions); 2169 assertEquals(width, bmpOptions.outWidth); 2170 assertEquals(height, bmpOptions.outHeight); 2171 2172 // Pixel decoding mode: decode whole image. check if the image data 2173 // is decodable here. 2174 assertNotNull("Decoding jpeg failed", 2175 BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length)); 2176 if (DEBUG && filePath != null) { 2177 String fileName = 2178 filePath + "/" + width + "x" + height + ".jpeg"; 2179 dumpFile(fileName, jpegData); 2180 } 2181 } 2182 validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2183 private static void validateYuvData(byte[] yuvData, int width, int height, int format, 2184 long ts, String filePath) { 2185 checkYuvFormat(format); 2186 if (VERBOSE) Log.v(TAG, "Validating YUV data"); 2187 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2188 assertEquals("Yuv data doesn't match", expectedSize, yuvData.length); 2189 2190 // TODO: Can add data validation for test pattern. 2191 2192 if (DEBUG && filePath != null) { 2193 String fileName = 2194 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv"; 2195 dumpFile(fileName, yuvData); 2196 } 2197 } 2198 validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2199 private static void validateP010Data(byte[] p010Data, int width, int height, int format, 2200 long ts, String filePath) { 2201 if (VERBOSE) Log.v(TAG, "Validating P010 data"); 2202 // The P010 10 bit samples are stored in two bytes so the size needs to be adjusted 2203 // accordingly. 2204 int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8; 2205 int expectedSize = width * height * bytesPerPixelRounded; 2206 assertEquals("P010 data doesn't match", expectedSize, p010Data.length); 2207 2208 if (DEBUG && filePath != null) { 2209 String fileName = 2210 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010"; 2211 dumpFile(fileName, p010Data); 2212 } 2213 } validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2214 private static void validateRaw16Data(byte[] rawData, int width, int height, int format, 2215 long ts, String filePath) { 2216 if (VERBOSE) Log.v(TAG, "Validating raw data"); 2217 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2218 assertEquals("Raw data doesn't match", expectedSize, rawData.length); 2219 2220 // TODO: Can add data validation for test pattern. 2221 2222 if (DEBUG && filePath != null) { 2223 String fileName = 2224 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16"; 2225 dumpFile(fileName, rawData); 2226 } 2227 2228 return; 2229 } 2230 validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2231 private static void validateY8Data(byte[] rawData, int width, int height, int format, 2232 long ts, String filePath) { 2233 if (VERBOSE) Log.v(TAG, "Validating Y8 data"); 2234 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2235 assertEquals("Y8 data doesn't match", expectedSize, rawData.length); 2236 2237 // TODO: Can add data validation for test pattern. 2238 2239 if (DEBUG && filePath != null) { 2240 String fileName = 2241 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8"; 2242 dumpFile(fileName, rawData); 2243 } 2244 2245 return; 2246 } 2247 validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2248 private static void validateRawPrivateData(byte[] rawData, int width, int height, 2249 long ts, String filePath) { 2250 if (VERBOSE) Log.v(TAG, "Validating private raw data"); 2251 // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes 2252 int expectedSizeMin = width * height; 2253 int expectedSizeMax = width * height * 30; 2254 2255 assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" + 2256 expectedSizeMin + "," + expectedSizeMax + "]", 2257 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax); 2258 2259 if (DEBUG && filePath != null) { 2260 String fileName = 2261 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv"; 2262 dumpFile(fileName, rawData); 2263 } 2264 2265 return; 2266 } 2267 validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2268 private static void validateDepth16Data(byte[] depthData, int width, int height, int format, 2269 long ts, String filePath) { 2270 2271 if (VERBOSE) Log.v(TAG, "Validating depth16 data"); 2272 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2273 assertEquals("Depth data doesn't match", expectedSize, depthData.length); 2274 2275 2276 if (DEBUG && filePath != null) { 2277 String fileName = 2278 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16"; 2279 dumpFile(fileName, depthData); 2280 } 2281 2282 return; 2283 2284 } 2285 validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2286 private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format, 2287 long ts, String filePath) { 2288 2289 if (VERBOSE) Log.v(TAG, "Validating depth point cloud data"); 2290 2291 // Can't validate size since it is variable 2292 2293 if (DEBUG && filePath != null) { 2294 String fileName = 2295 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud"; 2296 dumpFile(fileName, depthData); 2297 } 2298 2299 return; 2300 2301 } 2302 validateHeicData(byte[] heicData, int width, int height, String filePath)2303 private static void validateHeicData(byte[] heicData, int width, int height, String filePath) { 2304 BitmapFactory.Options bmpOptions = new BitmapFactory.Options(); 2305 // DecodeBound mode: only parse the frame header to get width/height. 2306 // it doesn't decode the pixel. 2307 bmpOptions.inJustDecodeBounds = true; 2308 BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions); 2309 assertEquals(width, bmpOptions.outWidth); 2310 assertEquals(height, bmpOptions.outHeight); 2311 2312 // Pixel decoding mode: decode whole image. check if the image data 2313 // is decodable here. 2314 assertNotNull("Decoding heic failed", 2315 BitmapFactory.decodeByteArray(heicData, 0, heicData.length)); 2316 if (DEBUG && filePath != null) { 2317 String fileName = 2318 filePath + "/" + width + "x" + height + ".heic"; 2319 dumpFile(fileName, heicData); 2320 } 2321 } 2322 getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)2323 public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) { 2324 if (result == null) { 2325 throw new IllegalArgumentException("Result must not be null"); 2326 } 2327 2328 T value = result.get(key); 2329 assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value); 2330 return value; 2331 } 2332 getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)2333 public static <T> T getValueNotNull(CameraCharacteristics characteristics, 2334 CameraCharacteristics.Key<T> key) { 2335 if (characteristics == null) { 2336 throw new IllegalArgumentException("Camera characteristics must not be null"); 2337 } 2338 2339 T value = characteristics.get(key); 2340 assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value); 2341 return value; 2342 } 2343 2344 /** 2345 * Get a crop region for a given zoom factor and center position. 2346 * <p> 2347 * The center position is normalized position in range of [0, 1.0], where 2348 * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right 2349 * corner. The center position could limit the effective minimal zoom 2350 * factor, for example, if the center position is (0.75, 0.75), the 2351 * effective minimal zoom position becomes 2.0. If the requested zoom factor 2352 * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned. 2353 * </p> 2354 * <p> 2355 * The aspect ratio of the crop region is maintained the same as the aspect 2356 * ratio of active array. 2357 * </p> 2358 * 2359 * @param zoomFactor The zoom factor to generate the crop region, it must be 2360 * >= 1.0 2361 * @param center The normalized zoom center point that is in the range of [0, 1]. 2362 * @param maxZoom The max zoom factor supported by this device. 2363 * @param activeArray The active array size of this device. 2364 * @return crop region for the given normalized center and zoom factor. 2365 */ getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)2366 public static Rect getCropRegionForZoom(float zoomFactor, final PointF center, 2367 final float maxZoom, final Rect activeArray) { 2368 if (zoomFactor < 1.0) { 2369 throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0"); 2370 } 2371 if (center.x > 1.0 || center.x < 0) { 2372 throw new IllegalArgumentException("center.x " + center.x 2373 + " should be in range of [0, 1.0]"); 2374 } 2375 if (center.y > 1.0 || center.y < 0) { 2376 throw new IllegalArgumentException("center.y " + center.y 2377 + " should be in range of [0, 1.0]"); 2378 } 2379 if (maxZoom < 1.0) { 2380 throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0"); 2381 } 2382 if (activeArray == null) { 2383 throw new IllegalArgumentException("activeArray must not be null"); 2384 } 2385 2386 float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x), 2387 Math.min(center.y, 1.0f - center.y)); 2388 float minEffectiveZoom = 0.5f / minCenterLength; 2389 if (minEffectiveZoom > maxZoom) { 2390 throw new IllegalArgumentException("Requested center " + center.toString() + 2391 " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max" 2392 + " zoom factor " + maxZoom); 2393 } 2394 2395 if (zoomFactor < minEffectiveZoom) { 2396 Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor " 2397 + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom); 2398 zoomFactor = minEffectiveZoom; 2399 } 2400 2401 int cropCenterX = (int)(activeArray.width() * center.x); 2402 int cropCenterY = (int)(activeArray.height() * center.y); 2403 int cropWidth = (int) (activeArray.width() / zoomFactor); 2404 int cropHeight = (int) (activeArray.height() / zoomFactor); 2405 2406 return new Rect( 2407 /*left*/cropCenterX - cropWidth / 2, 2408 /*top*/cropCenterY - cropHeight / 2, 2409 /*right*/ cropCenterX + cropWidth / 2, 2410 /*bottom*/cropCenterY + cropHeight / 2); 2411 } 2412 2413 /** 2414 * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps 2415 * 2416 * @param staticInfo camera static metadata 2417 * @return AeAvailableTargetFpsRanges in descending order by max fps 2418 */ getDescendingTargetFpsRanges(StaticMetadata staticInfo)2419 public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) { 2420 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2421 Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() { 2422 public int compare(Range<Integer> r1, Range<Integer> r2) { 2423 return r2.getUpper() - r1.getUpper(); 2424 } 2425 }); 2426 return fpsRanges; 2427 } 2428 2429 /** 2430 * Get AeAvailableTargetFpsRanges with max fps not exceeding 30 2431 * 2432 * @param staticInfo camera static metadata 2433 * @return AeAvailableTargetFpsRanges with max fps not exceeding 30 2434 */ getTargetFpsRangesUpTo30(StaticMetadata staticInfo)2435 public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) { 2436 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2437 ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>(); 2438 for (Range<Integer> fpsRange : fpsRanges) { 2439 if (fpsRange.getUpper() <= 30) { 2440 fpsRangesUpTo30.add(fpsRange); 2441 } 2442 } 2443 return fpsRangesUpTo30; 2444 } 2445 2446 /** 2447 * Get AeAvailableTargetFpsRanges with max fps greater than 30 2448 * 2449 * @param staticInfo camera static metadata 2450 * @return AeAvailableTargetFpsRanges with max fps greater than 30 2451 */ getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)2452 public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) { 2453 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2454 ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>(); 2455 for (Range<Integer> fpsRange : fpsRanges) { 2456 if (fpsRange.getUpper() > 30) { 2457 fpsRangesGreaterThan30.add(fpsRange); 2458 } 2459 } 2460 return fpsRangesGreaterThan30; 2461 } 2462 2463 /** 2464 * Calculate output 3A region from the intersection of input 3A region and cropped region. 2465 * 2466 * @param requestRegions The input 3A regions 2467 * @param cropRect The cropped region 2468 * @return expected 3A regions output in capture result 2469 */ getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)2470 public static MeteringRectangle[] getExpectedOutputRegion( 2471 MeteringRectangle[] requestRegions, Rect cropRect){ 2472 MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length]; 2473 for (int i = 0; i < requestRegions.length; i++) { 2474 Rect requestRect = requestRegions[i].getRect(); 2475 Rect resultRect = new Rect(); 2476 boolean intersect = resultRect.setIntersect(requestRect, cropRect); 2477 resultRegions[i] = new MeteringRectangle( 2478 resultRect, 2479 intersect ? requestRegions[i].getMeteringWeight() : 0); 2480 } 2481 return resultRegions; 2482 } 2483 2484 /** 2485 * Copy source image data to destination image. 2486 * 2487 * @param src The source image to be copied from. 2488 * @param dst The destination image to be copied to. 2489 * @throws IllegalArgumentException If the source and destination images have 2490 * different format, size, or one of the images is not copyable. 2491 */ imageCopy(Image src, Image dst)2492 public static void imageCopy(Image src, Image dst) { 2493 if (src == null || dst == null) { 2494 throw new IllegalArgumentException("Images should be non-null"); 2495 } 2496 if (src.getFormat() != dst.getFormat()) { 2497 throw new IllegalArgumentException("Src and dst images should have the same format"); 2498 } 2499 if (src.getFormat() == ImageFormat.PRIVATE || 2500 dst.getFormat() == ImageFormat.PRIVATE) { 2501 throw new IllegalArgumentException("PRIVATE format images are not copyable"); 2502 } 2503 2504 Size srcSize = new Size(src.getWidth(), src.getHeight()); 2505 Size dstSize = new Size(dst.getWidth(), dst.getHeight()); 2506 if (!srcSize.equals(dstSize)) { 2507 throw new IllegalArgumentException("source image size " + srcSize + " is different" 2508 + " with " + "destination image size " + dstSize); 2509 } 2510 2511 // TODO: check the owner of the dst image, it must be from ImageWriter, other source may 2512 // not be writable. Maybe we should add an isWritable() method in image class. 2513 2514 Plane[] srcPlanes = src.getPlanes(); 2515 Plane[] dstPlanes = dst.getPlanes(); 2516 ByteBuffer srcBuffer = null; 2517 ByteBuffer dstBuffer = null; 2518 for (int i = 0; i < srcPlanes.length; i++) { 2519 srcBuffer = srcPlanes[i].getBuffer(); 2520 dstBuffer = dstPlanes[i].getBuffer(); 2521 int srcPos = srcBuffer.position(); 2522 srcBuffer.rewind(); 2523 dstBuffer.rewind(); 2524 int srcRowStride = srcPlanes[i].getRowStride(); 2525 int dstRowStride = dstPlanes[i].getRowStride(); 2526 int srcPixStride = srcPlanes[i].getPixelStride(); 2527 int dstPixStride = dstPlanes[i].getPixelStride(); 2528 2529 if (srcPixStride > 2 || dstPixStride > 2) { 2530 throw new IllegalArgumentException("source pixel stride " + srcPixStride + 2531 " with destination pixel stride " + dstPixStride + 2532 " is not supported"); 2533 } 2534 2535 if (srcRowStride == dstRowStride && srcPixStride == dstPixStride && 2536 srcPixStride == 1) { 2537 // Fast path, just copy the content in the byteBuffer all together. 2538 dstBuffer.put(srcBuffer); 2539 } else { 2540 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i); 2541 int srcRowByteCount = srcRowStride; 2542 int dstRowByteCount = dstRowStride; 2543 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)]; 2544 2545 if (srcPixStride == dstPixStride && srcPixStride == 1) { 2546 // Row by row copy case 2547 for (int row = 0; row < effectivePlaneSize.getHeight(); row++) { 2548 if (row == effectivePlaneSize.getHeight() - 1) { 2549 // Special case for interleaved planes: need handle the last row 2550 // carefully to avoid memory corruption. Check if we have enough bytes 2551 // to copy. 2552 srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining()); 2553 dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining()); 2554 } 2555 srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount); 2556 dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount); 2557 } 2558 } else { 2559 // Row by row per pixel copy case 2560 byte[] dstDataRow = new byte[dstRowByteCount]; 2561 for (int row = 0; row < effectivePlaneSize.getHeight(); row++) { 2562 if (row == effectivePlaneSize.getHeight() - 1) { 2563 // Special case for interleaved planes: need handle the last row 2564 // carefully to avoid memory corruption. Check if we have enough bytes 2565 // to copy. 2566 int remainingBytes = srcBuffer.remaining(); 2567 if (srcRowByteCount > remainingBytes) { 2568 srcRowByteCount = remainingBytes; 2569 } 2570 remainingBytes = dstBuffer.remaining(); 2571 if (dstRowByteCount > remainingBytes) { 2572 dstRowByteCount = remainingBytes; 2573 } 2574 } 2575 srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount); 2576 int pos = dstBuffer.position(); 2577 dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount); 2578 dstBuffer.position(pos); 2579 for (int x = 0; x < effectivePlaneSize.getWidth(); x++) { 2580 dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride]; 2581 } 2582 dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount); 2583 } 2584 } 2585 } 2586 srcBuffer.position(srcPos); 2587 dstBuffer.rewind(); 2588 } 2589 } 2590 getEffectivePlaneSizeForImage(Image image, int planeIdx)2591 private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) { 2592 switch (image.getFormat()) { 2593 case ImageFormat.YUV_420_888: 2594 if (planeIdx == 0) { 2595 return new Size(image.getWidth(), image.getHeight()); 2596 } else { 2597 return new Size(image.getWidth() / 2, image.getHeight() / 2); 2598 } 2599 case ImageFormat.JPEG: 2600 case ImageFormat.RAW_SENSOR: 2601 case ImageFormat.RAW10: 2602 case ImageFormat.RAW12: 2603 case ImageFormat.DEPTH16: 2604 return new Size(image.getWidth(), image.getHeight()); 2605 case ImageFormat.PRIVATE: 2606 return new Size(0, 0); 2607 default: 2608 throw new UnsupportedOperationException( 2609 String.format("Invalid image format %d", image.getFormat())); 2610 } 2611 } 2612 2613 /** 2614 * <p> 2615 * Checks whether the two images are strongly equal. 2616 * </p> 2617 * <p> 2618 * Two images are strongly equal if and only if the data, formats, sizes, 2619 * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format 2620 * images, the image data is not not accessible thus the data comparison is 2621 * effectively skipped as the number of planes is zero. 2622 * </p> 2623 * <p> 2624 * Note that this method compares the pixel data even outside of the crop 2625 * region, which may not be necessary for general use case. 2626 * </p> 2627 * 2628 * @param lhsImg First image to be compared with. 2629 * @param rhsImg Second image to be compared with. 2630 * @return true if the two images are equal, false otherwise. 2631 * @throws IllegalArgumentException If either of image is null. 2632 */ isImageStronglyEqual(Image lhsImg, Image rhsImg)2633 public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) { 2634 if (lhsImg == null || rhsImg == null) { 2635 throw new IllegalArgumentException("Images should be non-null"); 2636 } 2637 2638 if (lhsImg.getFormat() != rhsImg.getFormat()) { 2639 Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format " 2640 + rhsImg.getFormat()); 2641 return false; 2642 } 2643 2644 if (lhsImg.getWidth() != rhsImg.getWidth()) { 2645 Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width " 2646 + rhsImg.getWidth()); 2647 return false; 2648 } 2649 2650 if (lhsImg.getHeight() != rhsImg.getHeight()) { 2651 Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height " 2652 + rhsImg.getHeight()); 2653 return false; 2654 } 2655 2656 if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) { 2657 Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp() 2658 + " is different with rhsImg timestamp " + rhsImg.getTimestamp()); 2659 return false; 2660 } 2661 2662 if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) { 2663 Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect() 2664 + " is different with rhsImg crop rect " + rhsImg.getCropRect()); 2665 return false; 2666 } 2667 2668 // Compare data inside of the image. 2669 Plane[] lhsPlanes = lhsImg.getPlanes(); 2670 Plane[] rhsPlanes = rhsImg.getPlanes(); 2671 ByteBuffer lhsBuffer = null; 2672 ByteBuffer rhsBuffer = null; 2673 for (int i = 0; i < lhsPlanes.length; i++) { 2674 lhsBuffer = lhsPlanes[i].getBuffer(); 2675 rhsBuffer = rhsPlanes[i].getBuffer(); 2676 lhsBuffer.rewind(); 2677 rhsBuffer.rewind(); 2678 // Special case for YUV420_888 buffer with different layout or 2679 // potentially differently interleaved U/V planes. 2680 if (lhsImg.getFormat() == ImageFormat.YUV_420_888 && 2681 (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() || 2682 lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() || 2683 (lhsPlanes[i].getPixelStride() != 1))) { 2684 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth(); 2685 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight(); 2686 int rowSizeL = lhsPlanes[i].getRowStride(); 2687 int rowSizeR = rhsPlanes[i].getRowStride(); 2688 byte[] lhsRow = new byte[rowSizeL]; 2689 byte[] rhsRow = new byte[rowSizeR]; 2690 int pixStrideL = lhsPlanes[i].getPixelStride(); 2691 int pixStrideR = rhsPlanes[i].getPixelStride(); 2692 for (int r = 0; r < height; r++) { 2693 if (r == height -1) { 2694 rowSizeL = lhsBuffer.remaining(); 2695 rowSizeR = rhsBuffer.remaining(); 2696 } 2697 lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL); 2698 rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR); 2699 for (int c = 0; c < width; c++) { 2700 if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) { 2701 Log.i(TAG, String.format( 2702 "byte buffers for plane %d row %d col %d don't match.", 2703 i, r, c)); 2704 return false; 2705 } 2706 } 2707 } 2708 } else { 2709 // Compare entire buffer directly 2710 if (!lhsBuffer.equals(rhsBuffer)) { 2711 Log.i(TAG, "byte buffers for plane " + i + " don't match."); 2712 return false; 2713 } 2714 } 2715 } 2716 2717 return true; 2718 } 2719 2720 /** 2721 * Set jpeg related keys in a capture request builder. 2722 * 2723 * @param builder The capture request builder to set the keys inl 2724 * @param exifData The exif data to set. 2725 * @param thumbnailSize The thumbnail size to set. 2726 * @param collector The camera error collector to collect errors. 2727 */ setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)2728 public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, 2729 Size thumbnailSize, CameraErrorCollector collector) { 2730 builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize); 2731 builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation); 2732 builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation); 2733 builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality); 2734 builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY, 2735 exifData.thumbnailQuality); 2736 2737 // Validate request set and get. 2738 collector.expectEquals("JPEG thumbnail size request set and get should match", 2739 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE)); 2740 collector.expectTrue("GPS locations request set and get should match.", 2741 areGpsFieldsEqual(exifData.gpsLocation, 2742 builder.get(CaptureRequest.JPEG_GPS_LOCATION))); 2743 collector.expectEquals("JPEG orientation request set and get should match", 2744 exifData.jpegOrientation, 2745 builder.get(CaptureRequest.JPEG_ORIENTATION)); 2746 collector.expectEquals("JPEG quality request set and get should match", 2747 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY)); 2748 collector.expectEquals("JPEG thumbnail quality request set and get should match", 2749 exifData.thumbnailQuality, 2750 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY)); 2751 } 2752 2753 /** 2754 * Simple validation of JPEG image size and format. 2755 * <p> 2756 * Only validate the image object basic correctness. It is fast, but doesn't actually 2757 * check the buffer data. Assert is used here as it make no sense to 2758 * continue the test if the jpeg image captured has some serious failures. 2759 * </p> 2760 * 2761 * @param image The captured JPEG/HEIC image 2762 * @param expectedSize Expected capture JEPG/HEIC size 2763 * @param format JPEG/HEIC image format 2764 */ basicValidateBlobImage(Image image, Size expectedSize, int format)2765 public static void basicValidateBlobImage(Image image, Size expectedSize, int format) { 2766 Size imageSz = new Size(image.getWidth(), image.getHeight()); 2767 assertTrue( 2768 String.format("Image size doesn't match (expected %s, actual %s) ", 2769 expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz)); 2770 assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"), 2771 format, image.getFormat()); 2772 assertNotNull("Image plane shouldn't be null", image.getPlanes()); 2773 assertEquals("Image plane number should be 1", 1, image.getPlanes().length); 2774 2775 // Jpeg/Heic decoding validate was done in ImageReaderTest, 2776 // no need to duplicate the test here. 2777 } 2778 2779 /** 2780 * Verify the EXIF and JPEG related keys in a capture result are expected. 2781 * - Capture request get values are same as were set. 2782 * - capture result's exif data is the same as was set by 2783 * the capture request. 2784 * - new tags in the result set by the camera service are 2785 * present and semantically correct. 2786 * 2787 * @param image The output JPEG/HEIC image to verify. 2788 * @param captureResult The capture result to verify. 2789 * @param expectedSize The expected JPEG/HEIC size. 2790 * @param expectedThumbnailSize The expected thumbnail size. 2791 * @param expectedExifData The expected EXIF data 2792 * @param staticInfo The static metadata for the camera device. 2793 * @param blobFilename The filename to dump the jpeg/heic to. 2794 * @param collector The camera error collector to collect errors. 2795 * @param format JPEG/HEIC format 2796 */ verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)2797 public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, 2798 Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, 2799 CameraErrorCollector collector, String debugFileNameBase, int format) throws Exception { 2800 2801 basicValidateBlobImage(image, expectedSize, format); 2802 2803 byte[] blobBuffer = getDataFromImage(image); 2804 // Have to dump into a file to be able to use ExifInterface 2805 String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg"); 2806 String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix; 2807 dumpFile(blobFilename, blobBuffer); 2808 ExifInterface exif = new ExifInterface(blobFilename); 2809 2810 if (expectedThumbnailSize.equals(new Size(0,0))) { 2811 collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)", 2812 !exif.hasThumbnail()); 2813 } else { 2814 collector.expectTrue("Jpeg must have thumbnail for thumbnail size " + 2815 expectedThumbnailSize, exif.hasThumbnail()); 2816 } 2817 2818 // Validate capture result vs. request 2819 Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE); 2820 int orientationTested = expectedExifData.jpegOrientation; 2821 // Legacy shim always doesn't rotate thumbnail size 2822 if ((orientationTested == 90 || orientationTested == 270) && 2823 staticInfo.isHardwareLevelAtLeastLimited()) { 2824 int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 2825 /*defaultValue*/-1); 2826 if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) { 2827 // Device physically rotated image+thumbnail data 2828 // Expect thumbnail size to be also rotated 2829 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(), 2830 resultThumbnailSize.getWidth()); 2831 } 2832 } 2833 2834 collector.expectEquals("JPEG thumbnail size result and request should match", 2835 expectedThumbnailSize, resultThumbnailSize); 2836 if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) != 2837 null) { 2838 collector.expectTrue("GPS location result and request should match.", 2839 areGpsFieldsEqual(expectedExifData.gpsLocation, 2840 captureResult.get(CaptureResult.JPEG_GPS_LOCATION))); 2841 } 2842 collector.expectEquals("JPEG orientation result and request should match", 2843 expectedExifData.jpegOrientation, 2844 captureResult.get(CaptureResult.JPEG_ORIENTATION)); 2845 collector.expectEquals("JPEG quality result and request should match", 2846 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY)); 2847 collector.expectEquals("JPEG thumbnail quality result and request should match", 2848 expectedExifData.thumbnailQuality, 2849 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY)); 2850 2851 // Validate other exif tags for all non-legacy devices 2852 if (!staticInfo.isHardwareLevelLegacy()) { 2853 verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, collector, 2854 expectedExifData); 2855 } 2856 } 2857 2858 /** 2859 * Get the degree of an EXIF orientation. 2860 */ getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)2861 private static int getExifOrientationInDegree(int exifOrientation, 2862 CameraErrorCollector collector) { 2863 switch (exifOrientation) { 2864 case ExifInterface.ORIENTATION_NORMAL: 2865 return 0; 2866 case ExifInterface.ORIENTATION_ROTATE_90: 2867 return 90; 2868 case ExifInterface.ORIENTATION_ROTATE_180: 2869 return 180; 2870 case ExifInterface.ORIENTATION_ROTATE_270: 2871 return 270; 2872 default: 2873 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" + 2874 "info based on the request orientation range"); 2875 return 0; 2876 } 2877 } 2878 2879 /** 2880 * Validate and return the focal length. 2881 * 2882 * @param result Capture result to get the focal length 2883 * @return Focal length from capture result or -1 if focal length is not available. 2884 */ validateFocalLength(CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector)2885 private static float validateFocalLength(CaptureResult result, StaticMetadata staticInfo, 2886 CameraErrorCollector collector) { 2887 float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked(); 2888 Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH); 2889 if (collector.expectTrue("Focal length is invalid", 2890 resultFocalLength != null && resultFocalLength > 0)) { 2891 List<Float> focalLengthList = 2892 Arrays.asList(CameraTestUtils.toObject(focalLengths)); 2893 collector.expectTrue("Focal length should be one of the available focal length", 2894 focalLengthList.contains(resultFocalLength)); 2895 return resultFocalLength; 2896 } 2897 return -1; 2898 } 2899 2900 /** 2901 * Validate and return the aperture. 2902 * 2903 * @param result Capture result to get the aperture 2904 * @return Aperture from capture result or -1 if aperture is not available. 2905 */ validateAperture(CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector)2906 private static float validateAperture(CaptureResult result, StaticMetadata staticInfo, 2907 CameraErrorCollector collector) { 2908 float[] apertures = staticInfo.getAvailableAperturesChecked(); 2909 Float resultAperture = result.get(CaptureResult.LENS_APERTURE); 2910 if (collector.expectTrue("Capture result aperture is invalid", 2911 resultAperture != null && resultAperture > 0)) { 2912 List<Float> apertureList = 2913 Arrays.asList(CameraTestUtils.toObject(apertures)); 2914 collector.expectTrue("Aperture should be one of the available apertures", 2915 apertureList.contains(resultAperture)); 2916 return resultAperture; 2917 } 2918 return -1; 2919 } 2920 2921 /** 2922 * Return the closest value in an array of floats. 2923 */ getClosestValueInArray(float[] values, float target)2924 private static float getClosestValueInArray(float[] values, float target) { 2925 int minIdx = 0; 2926 float minDistance = Math.abs(values[0] - target); 2927 for(int i = 0; i < values.length; i++) { 2928 float distance = Math.abs(values[i] - target); 2929 if (minDistance > distance) { 2930 minDistance = distance; 2931 minIdx = i; 2932 } 2933 } 2934 2935 return values[minIdx]; 2936 } 2937 2938 /** 2939 * Return if two Location's GPS field are the same. 2940 */ areGpsFieldsEqual(Location a, Location b)2941 private static boolean areGpsFieldsEqual(Location a, Location b) { 2942 if (a == null || b == null) { 2943 return false; 2944 } 2945 2946 return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() && 2947 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() && 2948 a.getProvider() == b.getProvider(); 2949 } 2950 2951 /** 2952 * Verify extra tags in JPEG EXIF 2953 */ verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)2954 private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, 2955 CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector, 2956 ExifTestData expectedExifData) 2957 throws ParseException { 2958 /** 2959 * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION. 2960 * Orientation and exif width/height need to be tested carefully, two cases: 2961 * 2962 * 1. Device rotate the image buffer physically, then exif width/height may not match 2963 * the requested still capture size, we need swap them to check. 2964 * 2965 * 2. Device use the exif tag to record the image orientation, it doesn't rotate 2966 * the jpeg image buffer itself. In this case, the exif width/height should always match 2967 * the requested still capture size, and the exif orientation should always match the 2968 * requested orientation. 2969 * 2970 */ 2971 int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0); 2972 int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0); 2973 Size exifSize = new Size(exifWidth, exifHeight); 2974 // Orientation could be missing, which is ok, default to 0. 2975 int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 2976 /*defaultValue*/-1); 2977 // Get requested orientation from result, because they should be same. 2978 if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) { 2979 int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION); 2980 final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED; 2981 final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270; 2982 boolean orientationValid = collector.expectTrue(String.format( 2983 "Exif orientation must be in range of [%d, %d]", 2984 ORIENTATION_MIN, ORIENTATION_MAX), 2985 exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX); 2986 if (orientationValid) { 2987 /** 2988 * Device captured image doesn't respect the requested orientation, 2989 * which means it rotates the image buffer physically. Then we 2990 * should swap the exif width/height accordingly to compare. 2991 */ 2992 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED; 2993 2994 if (deviceRotatedImage) { 2995 // Case 1. 2996 boolean needSwap = (requestedOrientation % 180 == 90); 2997 if (needSwap) { 2998 exifSize = new Size(exifHeight, exifWidth); 2999 } 3000 } else { 3001 // Case 2. 3002 collector.expectEquals("Exif orientaiton should match requested orientation", 3003 requestedOrientation, getExifOrientationInDegree(exifOrientation, 3004 collector)); 3005 } 3006 } 3007 } 3008 3009 /** 3010 * Ideally, need check exifSize == jpegSize == actual buffer size. But 3011 * jpegSize == jpeg decode bounds size(from jpeg jpeg frame 3012 * header, not exif) was validated in ImageReaderTest, no need to 3013 * validate again here. 3014 */ 3015 collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize); 3016 3017 // TAG_DATETIME, it should be local time 3018 long currentTimeInMs = System.currentTimeMillis(); 3019 long currentTimeInSecond = currentTimeInMs / 1000; 3020 Date date = new Date(currentTimeInMs); 3021 String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date); 3022 String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME); 3023 if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) { 3024 collector.expectTrue("Exif TAG_DATETIME is wrong", 3025 dateTime.length() == EXIF_DATETIME_LENGTH); 3026 long exifTimeInSecond = 3027 new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000; 3028 long delta = currentTimeInSecond - exifTimeInSecond; 3029 collector.expectTrue("Capture time deviates too much from the current time", 3030 Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC); 3031 // It should be local time. 3032 collector.expectTrue("Exif date time should be local time", 3033 dateTime.startsWith(localDatetime)); 3034 } 3035 3036 boolean isExternalCamera = staticInfo.isExternalCamera(); 3037 if (!isExternalCamera) { 3038 // TAG_FOCAL_LENGTH. 3039 float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked(); 3040 float exifFocalLength = (float)exif.getAttributeDouble( 3041 ExifInterface.TAG_FOCAL_LENGTH, -1); 3042 collector.expectEquals("Focal length should match", 3043 getClosestValueInArray(focalLengths, exifFocalLength), 3044 exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN); 3045 // More checks for focal length. 3046 collector.expectEquals("Exif focal length should match capture result", 3047 validateFocalLength(result, staticInfo, collector), 3048 exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN); 3049 3050 // TAG_EXPOSURE_TIME 3051 // ExifInterface API gives exposure time value in the form of float instead of rational 3052 String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME); 3053 collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime); 3054 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) { 3055 if (exposureTime != null) { 3056 double exposureTimeValue = Double.parseDouble(exposureTime); 3057 long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 3058 double expected = expTimeResult / 1e9; 3059 double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO; 3060 tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC); 3061 collector.expectEquals("Exif exposure time doesn't match", expected, 3062 exposureTimeValue, tolerance); 3063 } 3064 } 3065 3066 // TAG_APERTURE 3067 // ExifInterface API gives aperture value in the form of float instead of rational 3068 String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE); 3069 collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture); 3070 if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) { 3071 float[] apertures = staticInfo.getAvailableAperturesChecked(); 3072 if (exifAperture != null) { 3073 float apertureValue = Float.parseFloat(exifAperture); 3074 collector.expectEquals("Aperture value should match", 3075 getClosestValueInArray(apertures, apertureValue), 3076 apertureValue, EXIF_APERTURE_ERROR_MARGIN); 3077 // More checks for aperture. 3078 collector.expectEquals("Exif aperture length should match capture result", 3079 validateAperture(result, staticInfo, collector), 3080 apertureValue, EXIF_APERTURE_ERROR_MARGIN); 3081 } 3082 } 3083 3084 // TAG_MAKE 3085 String make = exif.getAttribute(ExifInterface.TAG_MAKE); 3086 collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make); 3087 3088 // TAG_MODEL 3089 String model = exif.getAttribute(ExifInterface.TAG_MODEL); 3090 collector.expectEquals("Exif TAG_MODEL is incorrect", Build.MODEL, model); 3091 3092 3093 // TAG_ISO 3094 int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1); 3095 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) || 3096 staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) { 3097 int expectedIso = 100; 3098 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) { 3099 expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY); 3100 } 3101 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) { 3102 expectedIso = expectedIso * 3103 result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST); 3104 } else { 3105 expectedIso *= 100; 3106 } 3107 collector.expectInRange("Exif TAG_ISO is incorrect", iso, 3108 expectedIso/100, (expectedIso+50)/100); 3109 } 3110 } else { 3111 // External camera specific checks 3112 // TAG_MAKE 3113 String make = exif.getAttribute(ExifInterface.TAG_MAKE); 3114 collector.expectNotNull("Exif TAG_MAKE is null", make); 3115 3116 // TAG_MODEL 3117 String model = exif.getAttribute(ExifInterface.TAG_MODEL); 3118 collector.expectNotNull("Exif TAG_MODEL is nuill", model); 3119 } 3120 3121 3122 /** 3123 * TAG_FLASH. TODO: For full devices, can check a lot more info 3124 * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash) 3125 */ 3126 String flash = exif.getAttribute(ExifInterface.TAG_FLASH); 3127 collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash); 3128 3129 /** 3130 * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we 3131 * should be able to cross-check android.sensor.referenceIlluminant. 3132 */ 3133 String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE); 3134 collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance); 3135 3136 // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras). 3137 String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED); 3138 collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime); 3139 if (digitizedTime != null) { 3140 String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME); 3141 collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime); 3142 if (expectedDateTime != null) { 3143 collector.expectEquals("dataTime should match digitizedTime", 3144 expectedDateTime, digitizedTime); 3145 } 3146 } 3147 3148 /** 3149 * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at 3150 * most 9 digits in ExifInterface implementation, use getAttributeInt to 3151 * sanitize it. When the default value -1 is returned, it means that 3152 * this exif tag either doesn't exist or is a non-numerical invalid 3153 * string. Same rule applies to the rest of sub second tags. 3154 */ 3155 int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1); 3156 collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0); 3157 3158 // TAG_SUBSEC_TIME_ORIG 3159 int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG, 3160 /*defaultValue*/-1); 3161 collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!", 3162 subSecTimeOrig >= 0); 3163 3164 // TAG_SUBSEC_TIME_DIG 3165 int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG, 3166 /*defaultValue*/-1); 3167 collector.expectTrue( 3168 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0); 3169 3170 /** 3171 * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP. 3172 * The GPS timestamp information should be in seconds UTC time. 3173 */ 3174 String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP); 3175 collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp); 3176 String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP); 3177 collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp); 3178 3179 SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z"); 3180 String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC"; 3181 Date gpsDateTime = dateFormat.parse(gpsExifTimeString); 3182 Date expected = new Date(expectedExifData.gpsLocation.getTime()); 3183 collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime); 3184 } 3185 3186 3187 /** 3188 * Immutable class wrapping the exif test data. 3189 */ 3190 public static class ExifTestData { 3191 public final Location gpsLocation; 3192 public final int jpegOrientation; 3193 public final byte jpegQuality; 3194 public final byte thumbnailQuality; 3195 ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3196 public ExifTestData(Location location, int orientation, 3197 byte jpgQuality, byte thumbQuality) { 3198 gpsLocation = location; 3199 jpegOrientation = orientation; 3200 jpegQuality = jpgQuality; 3201 thumbnailQuality = thumbQuality; 3202 } 3203 } 3204 getPreviewSizeBound(WindowManager windowManager, Size bound)3205 public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) { 3206 Display display = windowManager.getDefaultDisplay(); 3207 3208 int width = display.getWidth(); 3209 int height = display.getHeight(); 3210 3211 if (height > width) { 3212 height = width; 3213 width = display.getHeight(); 3214 } 3215 3216 if (bound.getWidth() <= width && 3217 bound.getHeight() <= height) 3218 return bound; 3219 else 3220 return new Size(width, height); 3221 } 3222 3223 /** 3224 * Check if a particular stream configuration is supported by configuring it 3225 * to the device. 3226 */ isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)3227 public static boolean isStreamConfigurationSupported(CameraDevice camera, 3228 List<Surface> outputSurfaces, 3229 CameraCaptureSession.StateCallback listener, Handler handler) { 3230 try { 3231 configureCameraSession(camera, outputSurfaces, listener, handler); 3232 return true; 3233 } catch (Exception e) { 3234 Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage()); 3235 return false; 3236 } 3237 } 3238 3239 public final static class SessionConfigSupport { 3240 public final boolean error; 3241 public final boolean callSupported; 3242 public final boolean configSupported; 3243 SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)3244 public SessionConfigSupport(boolean error, 3245 boolean callSupported, boolean configSupported) { 3246 this.error = error; 3247 this.callSupported = callSupported; 3248 this.configSupported = configSupported; 3249 } 3250 } 3251 3252 /** 3253 * Query whether a particular stream combination is supported. 3254 */ checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3255 public static void checkSessionConfigurationWithSurfaces(CameraDevice camera, 3256 Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, 3257 int operatingMode, boolean defaultSupport, String msg) { 3258 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 3259 for (Surface surface : outputSurfaces) { 3260 outConfigurations.add(new OutputConfiguration(surface)); 3261 } 3262 3263 checkSessionConfigurationSupported(camera, handler, outConfigurations, 3264 inputConfig, operatingMode, defaultSupport, msg); 3265 } 3266 checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3267 public static void checkSessionConfigurationSupported(CameraDevice camera, 3268 Handler handler, List<OutputConfiguration> outputConfigs, 3269 InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, 3270 String msg) { 3271 SessionConfigSupport sessionConfigSupported = 3272 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig, 3273 operatingMode, defaultSupport); 3274 3275 assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported); 3276 } 3277 3278 /** 3279 * Query whether a particular stream combination is supported. 3280 */ isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport)3281 public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera, 3282 Handler handler, List<OutputConfiguration> outputConfigs, 3283 InputConfiguration inputConfig, int operatingMode, boolean defaultSupport) { 3284 boolean ret; 3285 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 3286 3287 SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs, 3288 new HandlerExecutor(handler), sessionListener); 3289 if (inputConfig != null) { 3290 sessionConfig.setInputConfiguration(inputConfig); 3291 } 3292 3293 try { 3294 ret = camera.isSessionConfigurationSupported(sessionConfig); 3295 } catch (UnsupportedOperationException e) { 3296 // Camera doesn't support session configuration query 3297 return new SessionConfigSupport(false/*error*/, 3298 false/*callSupported*/, defaultSupport/*configSupported*/); 3299 } catch (IllegalArgumentException e) { 3300 return new SessionConfigSupport(true/*error*/, 3301 false/*callSupported*/, false/*configSupported*/); 3302 } catch (android.hardware.camera2.CameraAccessException e) { 3303 return new SessionConfigSupport(true/*error*/, 3304 false/*callSupported*/, false/*configSupported*/); 3305 } 3306 3307 return new SessionConfigSupport(false/*error*/, 3308 true/*callSupported*/, ret/*configSupported*/); 3309 } 3310 3311 /** 3312 * Wait for numResultWait frames 3313 * 3314 * @param resultListener The capture listener to get capture result back. 3315 * @param numResultsWait Number of frame to wait 3316 * @param timeout Wait timeout in ms. 3317 * 3318 * @return the last result, or {@code null} if there was none 3319 */ waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)3320 public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener, 3321 int numResultsWait, int timeout) { 3322 if (numResultsWait < 0 || resultListener == null) { 3323 throw new IllegalArgumentException( 3324 "Input must be positive number and listener must be non-null"); 3325 } 3326 3327 CaptureResult result = null; 3328 for (int i = 0; i < numResultsWait; i++) { 3329 result = resultListener.getCaptureResult(timeout); 3330 } 3331 3332 return result; 3333 } 3334 3335 /** 3336 * Wait for any expected result key values available in a certain number of results. 3337 * 3338 * <p> 3339 * Check the result immediately if numFramesWait is 0. 3340 * </p> 3341 * 3342 * @param listener The capture listener to get capture result. 3343 * @param resultKey The capture result key associated with the result value. 3344 * @param expectedValues The list of result value need to be waited for, 3345 * return immediately if the list is empty. 3346 * @param numResultsWait Number of frame to wait before times out. 3347 * @param timeout result wait time out in ms. 3348 * @throws TimeoutRuntimeException If more than numResultsWait results are. 3349 * seen before the result matching myRequest arrives, or each individual wait 3350 * for result times out after 'timeout' ms. 3351 */ waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)3352 public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener, 3353 CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, 3354 int timeout) { 3355 if (numResultsWait < 0 || listener == null || expectedValues == null) { 3356 throw new IllegalArgumentException( 3357 "Input must be non-negative number and listener/expectedValues " 3358 + "must be non-null"); 3359 } 3360 3361 int i = 0; 3362 CaptureResult result; 3363 do { 3364 result = listener.getCaptureResult(timeout); 3365 T value = result.get(resultKey); 3366 for ( T expectedValue : expectedValues) { 3367 if (VERBOSE) { 3368 Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: " 3369 + value.toString()); 3370 } 3371 if (value.equals(expectedValue)) { 3372 return; 3373 } 3374 } 3375 } while (i++ < numResultsWait); 3376 3377 throw new TimeoutRuntimeException( 3378 "Unable to get the expected result value " + expectedValues + " for key " + 3379 resultKey.getName() + " after waiting for " + numResultsWait + " results"); 3380 } 3381 3382 /** 3383 * Wait for expected result key value available in a certain number of results. 3384 * 3385 * <p> 3386 * Check the result immediately if numFramesWait is 0. 3387 * </p> 3388 * 3389 * @param listener The capture listener to get capture result 3390 * @param resultKey The capture result key associated with the result value 3391 * @param expectedValue The result value need to be waited for 3392 * @param numResultsWait Number of frame to wait before times out 3393 * @param timeout Wait time out. 3394 * @throws TimeoutRuntimeException If more than numResultsWait results are 3395 * seen before the result matching myRequest arrives, or each individual wait 3396 * for result times out after 'timeout' ms. 3397 */ waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)3398 public static <T> void waitForResultValue(SimpleCaptureCallback listener, 3399 CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) { 3400 List<T> expectedValues = new ArrayList<T>(); 3401 expectedValues.add(expectedValue); 3402 waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout); 3403 } 3404 3405 /** 3406 * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED. 3407 * 3408 * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure 3409 * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency 3410 * is unknown.</p> 3411 * 3412 * <p>This is a no-op for {@code LEGACY} devices since they don't report 3413 * the {@code aeState} result.</p> 3414 * 3415 * @param resultListener The capture listener to get capture result back. 3416 * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is 3417 * unknown. 3418 * @param staticInfo corresponding camera device static metadata. 3419 * @param settingsTimeout wait timeout for settings application in ms. 3420 * @param resultTimeout wait timeout for result in ms. 3421 * @param numResultsWait Number of frame to wait before times out. 3422 */ waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)3423 public static void waitForAeStable(SimpleCaptureCallback resultListener, 3424 int numResultWaitForUnknownLatency, StaticMetadata staticInfo, 3425 int settingsTimeout, int numResultWait) { 3426 waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo, 3427 settingsTimeout); 3428 3429 if (!staticInfo.isHardwareLevelAtLeastLimited()) { 3430 // No-op for metadata 3431 return; 3432 } 3433 List<Integer> expectedAeStates = new ArrayList<Integer>(); 3434 expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED)); 3435 expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED)); 3436 waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates, 3437 numResultWait, settingsTimeout); 3438 } 3439 3440 /** 3441 * Wait for enough results for settings to be applied 3442 * 3443 * @param resultListener The capture listener to get capture result back. 3444 * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is 3445 * unknown. 3446 * @param staticInfo corresponding camera device static metadata. 3447 * @param timeout wait timeout in ms. 3448 */ waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)3449 public static void waitForSettingsApplied(SimpleCaptureCallback resultListener, 3450 int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) { 3451 int maxLatency = staticInfo.getSyncMaxLatency(); 3452 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3453 maxLatency = numResultWaitForUnknownLatency; 3454 } 3455 // Wait for settings to take effect 3456 waitForNumResults(resultListener, maxLatency, timeout); 3457 } 3458 getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)3459 public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId, 3460 long frameDuration, StaticMetadata staticInfo) { 3461 // Add 0.05 here so Fps like 29.99 evaluated to 30 3462 int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f); 3463 boolean foundConstantMaxYUVRange = false; 3464 boolean foundYUVStreamingRange = false; 3465 boolean isExternalCamera = staticInfo.isExternalCamera(); 3466 boolean isNIR = staticInfo.isNIRColorFilter(); 3467 3468 // Find suitable target FPS range - as high as possible that covers the max YUV rate 3469 // Also verify that there's a good preview rate as well 3470 List<Range<Integer> > fpsRanges = Arrays.asList( 3471 staticInfo.getAeAvailableTargetFpsRangesChecked()); 3472 Range<Integer> targetRange = null; 3473 for (Range<Integer> fpsRange : fpsRanges) { 3474 if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) { 3475 foundConstantMaxYUVRange = true; 3476 targetRange = fpsRange; 3477 } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) { 3478 targetRange = fpsRange; 3479 } 3480 if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) { 3481 foundYUVStreamingRange = true; 3482 } 3483 3484 } 3485 3486 if (!isExternalCamera) { 3487 assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported", 3488 cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange); 3489 } 3490 3491 if (!isNIR) { 3492 assertTrue(String.format( 3493 "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported", 3494 cameraId, minBurstFps), foundYUVStreamingRange); 3495 } 3496 return targetRange; 3497 } 3498 /** 3499 * Get the candidate supported zoom ratios for testing 3500 * 3501 * <p> 3502 * This function returns the bounary values of supported zoom ratio range in addition to 1.0x 3503 * zoom ratio. 3504 * </p> 3505 */ getCandidateZoomRatios(StaticMetadata staticInfo)3506 public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) { 3507 List<Float> zoomRatios = new ArrayList<Float>(); 3508 Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked(); 3509 zoomRatios.add(zoomRatioRange.getLower()); 3510 if (zoomRatioRange.contains(1.0f) && 3511 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD && 3512 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) { 3513 zoomRatios.add(1.0f); 3514 } 3515 zoomRatios.add(zoomRatioRange.getUpper()); 3516 3517 return zoomRatios; 3518 } 3519 3520 private static final int PERFORMANCE_CLASS_R = Build.VERSION_CODES.R; 3521 private static final int PERFORMANCE_CLASS_S = Build.VERSION_CODES.R + 1; 3522 3523 /** 3524 * Check whether this mobile device is R performance class as defined in CDD 3525 */ isRPerfClass()3526 public static boolean isRPerfClass() { 3527 return Build.VERSION.MEDIA_PERFORMANCE_CLASS == PERFORMANCE_CLASS_R; 3528 } 3529 3530 /** 3531 * Check whether this mobile device is S performance class as defined in CDD 3532 */ isSPerfClass()3533 public static boolean isSPerfClass() { 3534 return Build.VERSION.MEDIA_PERFORMANCE_CLASS == PERFORMANCE_CLASS_S; 3535 } 3536 3537 /** 3538 * Check whether a camera Id is a primary rear facing camera 3539 */ isPrimaryRearFacingCamera(CameraManager manager, String cameraId)3540 public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId) 3541 throws Exception { 3542 return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK); 3543 } 3544 3545 /** 3546 * Check whether a camera Id is a primary front facing camera 3547 */ isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)3548 public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId) 3549 throws Exception { 3550 return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT); 3551 } 3552 isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)3553 private static boolean isPrimaryCamera(CameraManager manager, String cameraId, 3554 Integer lensFacing) throws Exception { 3555 CameraCharacteristics characteristics; 3556 Integer facing; 3557 3558 String [] ids = manager.getCameraIdList(); 3559 for (String id : ids) { 3560 characteristics = manager.getCameraCharacteristics(id); 3561 facing = characteristics.get(CameraCharacteristics.LENS_FACING); 3562 if (lensFacing.equals(facing)) { 3563 if (cameraId.equals(id)) { 3564 return true; 3565 } else { 3566 return false; 3567 } 3568 } 3569 } 3570 return false; 3571 } 3572 } 3573