1 /* 2 * Copyright 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import androidx.annotation.NonNull; 20 import android.graphics.Bitmap; 21 import android.graphics.BitmapFactory; 22 import android.graphics.ImageFormat; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraAccessException; 27 import android.hardware.camera2.CameraCaptureSession; 28 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession; 29 import android.hardware.camera2.CameraDevice; 30 import android.hardware.camera2.CameraManager; 31 import android.hardware.camera2.CameraMetadata; 32 import android.hardware.camera2.CameraCharacteristics; 33 import android.hardware.camera2.CaptureFailure; 34 import android.hardware.camera2.CaptureRequest; 35 import android.hardware.camera2.CaptureResult; 36 import android.hardware.camera2.MultiResolutionImageReader; 37 import android.hardware.camera2.cts.helpers.CameraErrorCollector; 38 import android.hardware.camera2.cts.helpers.StaticMetadata; 39 import android.hardware.camera2.params.InputConfiguration; 40 import android.hardware.camera2.TotalCaptureResult; 41 import android.hardware.cts.helpers.CameraUtils; 42 import android.hardware.camera2.params.MeteringRectangle; 43 import android.hardware.camera2.params.MandatoryStreamCombination; 44 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation; 45 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 46 import android.hardware.camera2.params.MultiResolutionStreamInfo; 47 import android.hardware.camera2.params.OutputConfiguration; 48 import android.hardware.camera2.params.SessionConfiguration; 49 import android.hardware.camera2.params.StreamConfigurationMap; 50 import android.location.Location; 51 import android.location.LocationManager; 52 import android.media.ExifInterface; 53 import android.media.Image; 54 import android.media.ImageReader; 55 import android.media.ImageWriter; 56 import android.media.Image.Plane; 57 import android.os.Build; 58 import android.os.ConditionVariable; 59 import android.os.Handler; 60 import android.util.Log; 61 import android.util.Pair; 62 import android.util.Size; 63 import android.util.Range; 64 import android.view.Display; 65 import android.view.Surface; 66 import android.view.WindowManager; 67 68 import com.android.ex.camera2.blocking.BlockingCameraManager; 69 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 70 import com.android.ex.camera2.blocking.BlockingSessionCallback; 71 import com.android.ex.camera2.blocking.BlockingStateCallback; 72 import com.android.ex.camera2.exceptions.TimeoutRuntimeException; 73 74 import junit.framework.Assert; 75 76 import org.mockito.Mockito; 77 78 import java.io.FileOutputStream; 79 import java.io.IOException; 80 import java.lang.reflect.Array; 81 import java.nio.ByteBuffer; 82 import java.util.ArrayList; 83 import java.util.Arrays; 84 import java.util.Collection; 85 import java.util.Collections; 86 import java.util.Comparator; 87 import java.util.Date; 88 import java.util.HashMap; 89 import java.util.HashSet; 90 import java.util.List; 91 import java.util.Set; 92 import java.util.concurrent.atomic.AtomicLong; 93 import java.util.concurrent.Executor; 94 import java.util.concurrent.LinkedBlockingQueue; 95 import java.util.concurrent.Semaphore; 96 import java.util.concurrent.TimeUnit; 97 import java.text.ParseException; 98 import java.text.SimpleDateFormat; 99 100 /** 101 * A package private utility class for wrapping up the camera2 cts test common utility functions 102 */ 103 public class CameraTestUtils extends Assert { 104 private static final String TAG = "CameraTestUtils"; 105 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 106 private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG); 107 public static final Size SIZE_BOUND_720P = new Size(1280, 720); 108 public static final Size SIZE_BOUND_1080P = new Size(1920, 1088); 109 public static final Size SIZE_BOUND_2K = new Size(2048, 1088); 110 public static final Size SIZE_BOUND_QHD = new Size(2560, 1440); 111 public static final Size SIZE_BOUND_2160P = new Size(3840, 2160); 112 // Only test the preview size that is no larger than 1080p. 113 public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P; 114 // Default timeouts for reaching various states 115 public static final int CAMERA_OPEN_TIMEOUT_MS = 3000; 116 public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000; 117 public static final int CAMERA_IDLE_TIMEOUT_MS = 3000; 118 public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000; 119 public static final int CAMERA_BUSY_TIMEOUT_MS = 1000; 120 public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000; 121 public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000; 122 public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000; 123 public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000; 124 125 public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000; 126 public static final int SESSION_CLOSE_TIMEOUT_MS = 3000; 127 public static final int SESSION_READY_TIMEOUT_MS = 5000; 128 public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000; 129 130 public static final int MAX_READER_IMAGES = 5; 131 132 public static final int INDEX_ALGORITHM_AE = 0; 133 public static final int INDEX_ALGORITHM_AWB = 1; 134 public static final int INDEX_ALGORITHM_AF = 2; 135 public static final int NUM_ALGORITHMS = 3; // AE, AWB and AF 136 137 public static final String OFFLINE_CAMERA_ID = "offline_camera_id"; 138 public static final String REPORT_LOG_NAME = "CtsCameraTestCases"; 139 140 private static final int EXIF_DATETIME_LENGTH = 19; 141 private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60; 142 private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f; 143 private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f; 144 private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f; 145 private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f; 146 147 private static final float ZOOM_RATIO_THRESHOLD = 0.01f; 148 149 private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER); 150 private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER); 151 private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER); 152 153 static { 154 sTestLocation0.setTime(1199145600000L); 155 sTestLocation0.setLatitude(37.736071); 156 sTestLocation0.setLongitude(-122.441983); 157 sTestLocation0.setAltitude(21.0); 158 159 sTestLocation1.setTime(1199145601000L); 160 sTestLocation1.setLatitude(0.736071); 161 sTestLocation1.setLongitude(0.441983); 162 sTestLocation1.setAltitude(1.0); 163 164 sTestLocation2.setTime(1199145602000L); 165 sTestLocation2.setLatitude(-89.736071); 166 sTestLocation2.setLongitude(-179.441983); 167 sTestLocation2.setAltitude(100000.0); 168 } 169 170 // Exif test data vectors. 171 public static final ExifTestData[] EXIF_TEST_DATA = { 172 new ExifTestData( 173 /*gpsLocation*/ sTestLocation0, 174 /* orientation */90, 175 /* jpgQuality */(byte) 80, 176 /* thumbQuality */(byte) 75), 177 new ExifTestData( 178 /*gpsLocation*/ sTestLocation1, 179 /* orientation */180, 180 /* jpgQuality */(byte) 90, 181 /* thumbQuality */(byte) 85), 182 new ExifTestData( 183 /*gpsLocation*/ sTestLocation2, 184 /* orientation */270, 185 /* jpgQuality */(byte) 100, 186 /* thumbQuality */(byte) 100) 187 }; 188 189 /** 190 * Create an {@link android.media.ImageReader} object and get the surface. 191 * 192 * @param size The size of this ImageReader to be created. 193 * @param format The format of this ImageReader to be created 194 * @param maxNumImages The max number of images that can be acquired simultaneously. 195 * @param listener The listener used by this ImageReader to notify callbacks. 196 * @param handler The handler to use for any listener callbacks. 197 */ makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)198 public static ImageReader makeImageReader(Size size, int format, int maxNumImages, 199 ImageReader.OnImageAvailableListener listener, Handler handler) { 200 ImageReader reader; 201 reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format, 202 maxNumImages); 203 reader.setOnImageAvailableListener(listener, handler); 204 if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size); 205 return reader; 206 } 207 208 /** 209 * Create an ImageWriter and hook up the ImageListener. 210 * 211 * @param inputSurface The input surface of the ImageWriter. 212 * @param maxImages The max number of Images that can be dequeued simultaneously. 213 * @param listener The listener used by this ImageWriter to notify callbacks 214 * @param handler The handler to post listener callbacks. 215 * @return ImageWriter object created. 216 */ makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)217 public static ImageWriter makeImageWriter( 218 Surface inputSurface, int maxImages, 219 ImageWriter.OnImageReleasedListener listener, Handler handler) { 220 ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages); 221 writer.setOnImageReleasedListener(listener, handler); 222 return writer; 223 } 224 225 /** 226 * Utility class to store the targets for mandatory stream combination test. 227 */ 228 public static class StreamCombinationTargets { 229 public List<SurfaceTexture> mPrivTargets = new ArrayList<>(); 230 public List<ImageReader> mJpegTargets = new ArrayList<>(); 231 public List<ImageReader> mYuvTargets = new ArrayList<>(); 232 public List<ImageReader> mY8Targets = new ArrayList<>(); 233 public List<ImageReader> mRawTargets = new ArrayList<>(); 234 public List<ImageReader> mHeicTargets = new ArrayList<>(); 235 public List<ImageReader> mDepth16Targets = new ArrayList<>(); 236 237 public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>(); 238 public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>(); 239 public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>(); 240 public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>(); 241 close()242 public void close() { 243 for (SurfaceTexture target : mPrivTargets) { 244 target.release(); 245 } 246 for (ImageReader target : mJpegTargets) { 247 target.close(); 248 } 249 for (ImageReader target : mYuvTargets) { 250 target.close(); 251 } 252 for (ImageReader target : mY8Targets) { 253 target.close(); 254 } 255 for (ImageReader target : mRawTargets) { 256 target.close(); 257 } 258 for (ImageReader target : mHeicTargets) { 259 target.close(); 260 } 261 for (ImageReader target : mDepth16Targets) { 262 target.close(); 263 } 264 265 for (MultiResolutionImageReader target : mPrivMultiResTargets) { 266 target.close(); 267 } 268 for (MultiResolutionImageReader target : mJpegMultiResTargets) { 269 target.close(); 270 } 271 for (MultiResolutionImageReader target : mYuvMultiResTargets) { 272 target.close(); 273 } 274 for (MultiResolutionImageReader target : mRawMultiResTargets) { 275 target.close(); 276 } 277 } 278 } 279 configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler)280 private static void configureTarget(StreamCombinationTargets targets, 281 List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, 282 int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, 283 MultiResolutionStreamConfigurationMap multiResStreamConfig, 284 boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler) { 285 if (createMultiResiStreamConfig) { 286 Collection<MultiResolutionStreamInfo> multiResolutionStreams = 287 multiResStreamConfig.getOutputInfo(format); 288 MultiResolutionImageReader multiResReader = new MultiResolutionImageReader( 289 multiResolutionStreams, format, numBuffers); 290 multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler)); 291 Collection<OutputConfiguration> configs = 292 OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader); 293 outputConfigs.addAll(configs); 294 outputSurfaces.add(multiResReader.getSurface()); 295 switch (format) { 296 case ImageFormat.PRIVATE: 297 targets.mPrivMultiResTargets.add(multiResReader); 298 break; 299 case ImageFormat.JPEG: 300 targets.mJpegMultiResTargets.add(multiResReader); 301 break; 302 case ImageFormat.YUV_420_888: 303 targets.mYuvMultiResTargets.add(multiResReader); 304 break; 305 case ImageFormat.RAW_SENSOR: 306 targets.mRawMultiResTargets.add(multiResReader); 307 break; 308 default: 309 fail("Unknown/Unsupported output format " + format); 310 } 311 } else { 312 if (format == ImageFormat.PRIVATE) { 313 SurfaceTexture target = new SurfaceTexture(/*random int*/1); 314 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight()); 315 OutputConfiguration config = new OutputConfiguration(new Surface(target)); 316 if (overridePhysicalCameraId != null) { 317 config.setPhysicalCameraId(overridePhysicalCameraId); 318 } 319 outputConfigs.add(config); 320 outputSurfaces.add(config.getSurface()); 321 targets.mPrivTargets.add(target); 322 } else { 323 ImageReader target = ImageReader.newInstance(targetSize.getWidth(), 324 targetSize.getHeight(), format, numBuffers); 325 target.setOnImageAvailableListener(listener, handler); 326 OutputConfiguration config = new OutputConfiguration(target.getSurface()); 327 if (overridePhysicalCameraId != null) { 328 config.setPhysicalCameraId(overridePhysicalCameraId); 329 } 330 outputConfigs.add(config); 331 outputSurfaces.add(config.getSurface()); 332 333 switch (format) { 334 case ImageFormat.JPEG: 335 targets.mJpegTargets.add(target); 336 break; 337 case ImageFormat.YUV_420_888: 338 targets.mYuvTargets.add(target); 339 break; 340 case ImageFormat.Y8: 341 targets.mY8Targets.add(target); 342 break; 343 case ImageFormat.RAW_SENSOR: 344 targets.mRawTargets.add(target); 345 break; 346 case ImageFormat.HEIC: 347 targets.mHeicTargets.add(target); 348 break; 349 case ImageFormat.DEPTH16: 350 targets.mDepth16Targets.add(target); 351 break; 352 default: 353 fail("Unknown/Unsupported output format " + format); 354 } 355 } 356 } 357 } 358 setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)359 public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, 360 StreamCombinationTargets targets, 361 List<OutputConfiguration> outputConfigs, 362 List<Surface> outputSurfaces, int numBuffers, 363 boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, 364 MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) { 365 List<Surface> uhSurfaces = new ArrayList<Surface>(); 366 setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces, 367 numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId, 368 multiResStreamConfig, handler); 369 } 370 setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)371 public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, 372 StreamCombinationTargets targets, 373 List<OutputConfiguration> outputConfigs, 374 List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, 375 boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, 376 MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) { 377 378 ImageDropperListener imageDropperListener = new ImageDropperListener(); 379 List<Surface> chosenSurfaces; 380 for (MandatoryStreamInformation streamInfo : streamsInfo) { 381 if (streamInfo.isInput()) { 382 continue; 383 } 384 chosenSurfaces = outputSurfaces; 385 if (streamInfo.isUltraHighResolution()) { 386 chosenSurfaces = uhSurfaces; 387 } 388 int format = streamInfo.getFormat(); 389 if (substituteY8 && (format == ImageFormat.YUV_420_888)) { 390 format = ImageFormat.Y8; 391 } else if (substituteHeic && (format == ImageFormat.JPEG)) { 392 format = ImageFormat.HEIC; 393 } 394 Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()]; 395 availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes); 396 Size targetSize = CameraTestUtils.getMaxSize(availableSizes); 397 boolean createMultiResReader = 398 (multiResStreamConfig != null && 399 !multiResStreamConfig.getOutputInfo(format).isEmpty() && 400 streamInfo.isMaximumSize()); 401 switch (format) { 402 case ImageFormat.PRIVATE: 403 case ImageFormat.JPEG: 404 case ImageFormat.YUV_420_888: 405 case ImageFormat.Y8: 406 case ImageFormat.HEIC: 407 case ImageFormat.DEPTH16: 408 { 409 configureTarget(targets, outputConfigs, chosenSurfaces, format, 410 targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig, 411 createMultiResReader, imageDropperListener, handler); 412 break; 413 } 414 case ImageFormat.RAW_SENSOR: { 415 // targetSize could be null in the logical camera case where only 416 // physical camera supports RAW stream. 417 if (targetSize != null) { 418 configureTarget(targets, outputConfigs, chosenSurfaces, format, 419 targetSize, numBuffers, overridePhysicalCameraId, 420 multiResStreamConfig, createMultiResReader, imageDropperListener, 421 handler); 422 } 423 break; 424 } 425 default: 426 fail("Unknown output format " + format); 427 } 428 } 429 } 430 431 /** 432 * Close pending images and clean up an {@link android.media.ImageReader} object. 433 * @param reader an {@link android.media.ImageReader} to close. 434 */ closeImageReader(ImageReader reader)435 public static void closeImageReader(ImageReader reader) { 436 if (reader != null) { 437 reader.close(); 438 } 439 } 440 441 /** 442 * Close the pending images then close current active {@link ImageReader} objects. 443 */ closeImageReaders(ImageReader[] readers)444 public static void closeImageReaders(ImageReader[] readers) { 445 if ((readers != null) && (readers.length > 0)) { 446 for (ImageReader reader : readers) { 447 CameraTestUtils.closeImageReader(reader); 448 } 449 } 450 } 451 452 /** 453 * Close pending images and clean up an {@link android.media.ImageWriter} object. 454 * @param writer an {@link android.media.ImageWriter} to close. 455 */ closeImageWriter(ImageWriter writer)456 public static void closeImageWriter(ImageWriter writer) { 457 if (writer != null) { 458 writer.close(); 459 } 460 } 461 462 /** 463 * Dummy listener that release the image immediately once it is available. 464 * 465 * <p> 466 * It can be used for the case where we don't care the image data at all. 467 * </p> 468 */ 469 public static class ImageDropperListener implements ImageReader.OnImageAvailableListener { 470 @Override onImageAvailable(ImageReader reader)471 public synchronized void onImageAvailable(ImageReader reader) { 472 Image image = null; 473 try { 474 image = reader.acquireNextImage(); 475 } finally { 476 if (image != null) { 477 image.close(); 478 mImagesDropped++; 479 } 480 } 481 } 482 getImageCount()483 public synchronized int getImageCount() { 484 return mImagesDropped; 485 } 486 resetImageCount()487 public synchronized void resetImageCount() { 488 mImagesDropped = 0; 489 } 490 491 private int mImagesDropped = 0; 492 } 493 494 /** 495 * Image listener that release the image immediately after validating the image 496 */ 497 public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener { 498 private Size mSize; 499 private int mFormat; 500 // Whether the parent ImageReader is valid or not. If the parent ImageReader 501 // is destroyed, the acquired Image may become invalid. 502 private boolean mReaderIsValid; 503 ImageVerifierListener(Size sz, int format)504 public ImageVerifierListener(Size sz, int format) { 505 mSize = sz; 506 mFormat = format; 507 mReaderIsValid = true; 508 } 509 onReaderDestroyed()510 public synchronized void onReaderDestroyed() { 511 mReaderIsValid = false; 512 } 513 514 @Override onImageAvailable(ImageReader reader)515 public synchronized void onImageAvailable(ImageReader reader) { 516 Image image = null; 517 try { 518 image = reader.acquireNextImage(); 519 } finally { 520 if (image != null) { 521 // Should only do some quick validity checks in callback, as the ImageReader 522 // could be closed asynchronously, which will close all images acquired from 523 // this ImageReader. 524 checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat); 525 // checkAndroidImageFormat calls into underlying Image object, which could 526 // become invalid if the ImageReader is destroyed. 527 if (mReaderIsValid) { 528 checkAndroidImageFormat(image); 529 } 530 image.close(); 531 } 532 } 533 } 534 } 535 536 public static class SimpleImageReaderListener 537 implements ImageReader.OnImageAvailableListener { 538 private final LinkedBlockingQueue<Image> mQueue = 539 new LinkedBlockingQueue<Image>(); 540 // Indicate whether this listener will drop images or not, 541 // when the queued images reaches the reader maxImages 542 private final boolean mAsyncMode; 543 // maxImages held by the queue in async mode. 544 private final int mMaxImages; 545 546 /** 547 * Create a synchronous SimpleImageReaderListener that queues the images 548 * automatically when they are available, no image will be dropped. If 549 * the caller doesn't call getImage(), the producer will eventually run 550 * into buffer starvation. 551 */ SimpleImageReaderListener()552 public SimpleImageReaderListener() { 553 mAsyncMode = false; 554 mMaxImages = 0; 555 } 556 557 /** 558 * Create a synchronous/asynchronous SimpleImageReaderListener that 559 * queues the images automatically when they are available. For 560 * asynchronous listener, image will be dropped if the queued images 561 * reach to maxImages queued. If the caller doesn't call getImage(), the 562 * producer will not be blocked. For synchronous listener, no image will 563 * be dropped. If the caller doesn't call getImage(), the producer will 564 * eventually run into buffer starvation. 565 * 566 * @param asyncMode If the listener is operating at asynchronous mode. 567 * @param maxImages The max number of images held by this listener. 568 */ 569 /** 570 * 571 * @param asyncMode 572 */ SimpleImageReaderListener(boolean asyncMode, int maxImages)573 public SimpleImageReaderListener(boolean asyncMode, int maxImages) { 574 mAsyncMode = asyncMode; 575 mMaxImages = maxImages; 576 } 577 578 @Override onImageAvailable(ImageReader reader)579 public void onImageAvailable(ImageReader reader) { 580 try { 581 Image imge = reader.acquireNextImage(); 582 if (imge == null) { 583 return; 584 } 585 mQueue.put(imge); 586 if (mAsyncMode && mQueue.size() >= mMaxImages) { 587 Image img = mQueue.poll(); 588 img.close(); 589 } 590 } catch (InterruptedException e) { 591 throw new UnsupportedOperationException( 592 "Can't handle InterruptedException in onImageAvailable"); 593 } 594 } 595 596 /** 597 * Get an image from the image reader. 598 * 599 * @param timeout Timeout value for the wait. 600 * @return The image from the image reader. 601 */ getImage(long timeout)602 public Image getImage(long timeout) throws InterruptedException { 603 Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 604 assertNotNull("Wait for an image timed out in " + timeout + "ms", image); 605 return image; 606 } 607 608 /** 609 * Drain the pending images held by this listener currently. 610 * 611 */ drain()612 public void drain() { 613 while (!mQueue.isEmpty()) { 614 Image image = mQueue.poll(); 615 assertNotNull("Unable to get an image", image); 616 image.close(); 617 } 618 } 619 } 620 621 public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener { 622 private final Semaphore mImageReleasedSema = new Semaphore(0); 623 private final ImageWriter mWriter; 624 @Override onImageReleased(ImageWriter writer)625 public void onImageReleased(ImageWriter writer) { 626 if (writer != mWriter) { 627 return; 628 } 629 630 if (VERBOSE) { 631 Log.v(TAG, "Input image is released"); 632 } 633 mImageReleasedSema.release(); 634 } 635 SimpleImageWriterListener(ImageWriter writer)636 public SimpleImageWriterListener(ImageWriter writer) { 637 if (writer == null) { 638 throw new IllegalArgumentException("writer cannot be null"); 639 } 640 mWriter = writer; 641 } 642 waitForImageReleased(long timeoutMs)643 public void waitForImageReleased(long timeoutMs) throws InterruptedException { 644 if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) { 645 fail("wait for image available timed out after " + timeoutMs + "ms"); 646 } 647 } 648 } 649 650 public static class ImageAndMultiResStreamInfo { 651 public final Image image; 652 public final MultiResolutionStreamInfo streamInfo; 653 ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)654 public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) { 655 this.image = image; 656 this.streamInfo = streamInfo; 657 } 658 } 659 660 public static class SimpleMultiResolutionImageReaderListener 661 implements ImageReader.OnImageAvailableListener { SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)662 public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, 663 int maxBuffers, boolean acquireLatest) { 664 mOwner = owner; 665 mMaxBuffers = maxBuffers; 666 mAcquireLatest = acquireLatest; 667 } 668 669 @Override onImageAvailable(ImageReader reader)670 public void onImageAvailable(ImageReader reader) { 671 if (VERBOSE) Log.v(TAG, "new image available"); 672 673 if (mAcquireLatest) { 674 mLastReader = reader; 675 mImageAvailable.open(); 676 } else { 677 if (mQueue.size() < mMaxBuffers) { 678 Image image = reader.acquireNextImage(); 679 MultiResolutionStreamInfo multiResStreamInfo = 680 mOwner.getStreamInfoForImageReader(reader); 681 mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo)); 682 } 683 } 684 } 685 getAnyImageAndInfoAvailable(long timeoutMs)686 public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs) 687 throws Exception { 688 if (mAcquireLatest) { 689 Image image = null; 690 if (mImageAvailable.block(timeoutMs)) { 691 if (mLastReader != null) { 692 image = mLastReader.acquireLatestImage(); 693 if (VERBOSE) Log.v(TAG, "acquireLatestImage"); 694 } else { 695 fail("invalid image reader"); 696 } 697 mImageAvailable.close(); 698 } else { 699 fail("wait for image available time out after " + timeoutMs + "ms"); 700 } 701 return new ImageAndMultiResStreamInfo(image, 702 mOwner.getStreamInfoForImageReader(mLastReader)); 703 } else { 704 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs, 705 java.util.concurrent.TimeUnit.MILLISECONDS); 706 if (imageAndInfo == null) { 707 fail("wait for image available timed out after " + timeoutMs + "ms"); 708 } 709 return imageAndInfo; 710 } 711 } 712 reset()713 public void reset() { 714 while (!mQueue.isEmpty()) { 715 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(); 716 assertNotNull("Acquired image is not valid", imageAndInfo.image); 717 imageAndInfo.image.close(); 718 } 719 mImageAvailable.close(); 720 mLastReader = null; 721 } 722 723 private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue = 724 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>(); 725 private final MultiResolutionImageReader mOwner; 726 private final int mMaxBuffers; 727 private final boolean mAcquireLatest; 728 private ConditionVariable mImageAvailable = new ConditionVariable(); 729 private ImageReader mLastReader = null; 730 } 731 732 public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback { 733 private final LinkedBlockingQueue<TotalCaptureResult> mQueue = 734 new LinkedBlockingQueue<TotalCaptureResult>(); 735 private final LinkedBlockingQueue<CaptureFailure> mFailureQueue = 736 new LinkedBlockingQueue<>(); 737 // (Surface, framenumber) pair for lost buffers 738 private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue = 739 new LinkedBlockingQueue<>(); 740 private final LinkedBlockingQueue<Integer> mAbortQueue = 741 new LinkedBlockingQueue<>(); 742 // Pair<CaptureRequest, Long> is a pair of capture request and timestamp. 743 private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue = 744 new LinkedBlockingQueue<>(); 745 // Pair<Int, Long> is a pair of sequence id and frame number 746 private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue = 747 new LinkedBlockingQueue<>(); 748 749 private AtomicLong mNumFramesArrived = new AtomicLong(0); 750 751 @Override onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)752 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 753 long timestamp, long frameNumber) { 754 try { 755 mCaptureStartQueue.put(new Pair(request, timestamp)); 756 } catch (InterruptedException e) { 757 throw new UnsupportedOperationException( 758 "Can't handle InterruptedException in onCaptureStarted"); 759 } 760 } 761 762 @Override onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)763 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 764 TotalCaptureResult result) { 765 try { 766 mNumFramesArrived.incrementAndGet(); 767 mQueue.put(result); 768 } catch (InterruptedException e) { 769 throw new UnsupportedOperationException( 770 "Can't handle InterruptedException in onCaptureCompleted"); 771 } 772 } 773 774 @Override onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)775 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 776 CaptureFailure failure) { 777 try { 778 mFailureQueue.put(failure); 779 } catch (InterruptedException e) { 780 throw new UnsupportedOperationException( 781 "Can't handle InterruptedException in onCaptureFailed"); 782 } 783 } 784 785 @Override onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)786 public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) { 787 try { 788 mAbortQueue.put(sequenceId); 789 } catch (InterruptedException e) { 790 throw new UnsupportedOperationException( 791 "Can't handle InterruptedException in onCaptureAborted"); 792 } 793 } 794 795 @Override onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)796 public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, 797 long frameNumber) { 798 try { 799 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber)); 800 } catch (InterruptedException e) { 801 throw new UnsupportedOperationException( 802 "Can't handle InterruptedException in onCaptureSequenceCompleted"); 803 } 804 } 805 806 @Override onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)807 public void onCaptureBufferLost(CameraCaptureSession session, 808 CaptureRequest request, Surface target, long frameNumber) { 809 try { 810 mBufferLostQueue.put(new Pair<>(target, frameNumber)); 811 } catch (InterruptedException e) { 812 throw new UnsupportedOperationException( 813 "Can't handle InterruptedException in onCaptureBufferLost"); 814 } 815 } 816 getTotalNumFrames()817 public long getTotalNumFrames() { 818 return mNumFramesArrived.get(); 819 } 820 getCaptureResult(long timeout)821 public CaptureResult getCaptureResult(long timeout) { 822 return getTotalCaptureResult(timeout); 823 } 824 getCaptureResult(long timeout, long timestamp)825 public TotalCaptureResult getCaptureResult(long timeout, long timestamp) { 826 try { 827 long currentTs = -1L; 828 TotalCaptureResult result; 829 while (true) { 830 result = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 831 if (result == null) { 832 throw new RuntimeException( 833 "Wait for a capture result timed out in " + timeout + "ms"); 834 } 835 currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP); 836 if (currentTs == timestamp) { 837 return result; 838 } 839 } 840 841 } catch (InterruptedException e) { 842 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 843 } 844 } 845 getTotalCaptureResult(long timeout)846 public TotalCaptureResult getTotalCaptureResult(long timeout) { 847 try { 848 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS); 849 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result); 850 return result; 851 } catch (InterruptedException e) { 852 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 853 } 854 } 855 856 /** 857 * Get the {@link #CaptureResult capture result} for a given 858 * {@link #CaptureRequest capture request}. 859 * 860 * @param myRequest The {@link #CaptureRequest capture request} whose 861 * corresponding {@link #CaptureResult capture result} was 862 * being waited for 863 * @param numResultsWait Number of frames to wait for the capture result 864 * before timeout. 865 * @throws TimeoutRuntimeException If more than numResultsWait results are 866 * seen before the result matching myRequest arrives, or each 867 * individual wait for result times out after 868 * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms. 869 */ getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)870 public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest, 871 int numResultsWait) { 872 return getTotalCaptureResultForRequest(myRequest, numResultsWait); 873 } 874 875 /** 876 * Get the {@link #TotalCaptureResult total capture result} for a given 877 * {@link #CaptureRequest capture request}. 878 * 879 * @param myRequest The {@link #CaptureRequest capture request} whose 880 * corresponding {@link #TotalCaptureResult capture result} was 881 * being waited for 882 * @param numResultsWait Number of frames to wait for the capture result 883 * before timeout. 884 * @throws TimeoutRuntimeException If more than numResultsWait results are 885 * seen before the result matching myRequest arrives, or each 886 * individual wait for result times out after 887 * {@value #CAPTURE_RESULT_TIMEOUT_MS}ms. 888 */ getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)889 public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest, 890 int numResultsWait) { 891 ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1); 892 captureRequests.add(myRequest); 893 return getTotalCaptureResultsForRequests(captureRequests, numResultsWait)[0]; 894 } 895 896 /** 897 * Get an array of {@link #TotalCaptureResult total capture results} for a given list of 898 * {@link #CaptureRequest capture requests}. This can be used when the order of results 899 * may not the same as the order of requests. 900 * 901 * @param captureRequests The list of {@link #CaptureRequest capture requests} whose 902 * corresponding {@link #TotalCaptureResult capture results} are 903 * being waited for. 904 * @param numResultsWait Number of frames to wait for the capture results 905 * before timeout. 906 * @throws TimeoutRuntimeException If more than numResultsWait results are 907 * seen before all the results matching captureRequests arrives. 908 */ getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)909 public TotalCaptureResult[] getTotalCaptureResultsForRequests( 910 List<CaptureRequest> captureRequests, int numResultsWait) { 911 if (numResultsWait < 0) { 912 throw new IllegalArgumentException("numResultsWait must be no less than 0"); 913 } 914 if (captureRequests == null || captureRequests.size() == 0) { 915 throw new IllegalArgumentException("captureRequests must have at least 1 request."); 916 } 917 918 // Create a request -> a list of result indices map that it will wait for. 919 HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>(); 920 for (int i = 0; i < captureRequests.size(); i++) { 921 CaptureRequest request = captureRequests.get(i); 922 ArrayList<Integer> indices = remainingResultIndicesMap.get(request); 923 if (indices == null) { 924 indices = new ArrayList<>(); 925 remainingResultIndicesMap.put(request, indices); 926 } 927 indices.add(i); 928 } 929 930 TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()]; 931 int i = 0; 932 do { 933 TotalCaptureResult result = getTotalCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 934 CaptureRequest request = result.getRequest(); 935 ArrayList<Integer> indices = remainingResultIndicesMap.get(request); 936 if (indices != null) { 937 results[indices.get(0)] = result; 938 indices.remove(0); 939 940 // Remove the entry if all results for this request has been fulfilled. 941 if (indices.isEmpty()) { 942 remainingResultIndicesMap.remove(request); 943 } 944 } 945 946 if (remainingResultIndicesMap.isEmpty()) { 947 return results; 948 } 949 } while (i++ < numResultsWait); 950 951 throw new TimeoutRuntimeException("Unable to get the expected capture result after " 952 + "waiting for " + numResultsWait + " results"); 953 } 954 955 /** 956 * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries 957 * at most. If it times out before maxNumFailures failures are received, return the failures 958 * received so far. 959 * 960 * @param maxNumFailures The maximal number of failures to return. If it times out before 961 * the maximal number of failures are received, return the received 962 * failures so far. 963 * @throws UnsupportedOperationException If an error happens while waiting on the failure. 964 */ getCaptureFailures(long maxNumFailures)965 public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) { 966 ArrayList<CaptureFailure> failures = new ArrayList<>(); 967 try { 968 for (int i = 0; i < maxNumFailures; i++) { 969 CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 970 TimeUnit.MILLISECONDS); 971 if (failure == null) { 972 // If waiting on a failure times out, return the failures so far. 973 break; 974 } 975 failures.add(failure); 976 } 977 } catch (InterruptedException e) { 978 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 979 } 980 981 return failures; 982 } 983 984 /** 985 * Get an array list of lost buffers with maxNumLost entries at most. 986 * If it times out before maxNumLost buffer lost callbacks are received, return the 987 * lost callbacks received so far. 988 * 989 * @param maxNumLost The maximal number of buffer lost failures to return. If it times out 990 * before the maximal number of failures are received, return the received 991 * buffer lost failures so far. 992 * @throws UnsupportedOperationException If an error happens while waiting on the failure. 993 */ getLostBuffers(long maxNumLost)994 public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) { 995 ArrayList<Pair<Surface, Long>> failures = new ArrayList<>(); 996 try { 997 for (int i = 0; i < maxNumLost; i++) { 998 Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 999 TimeUnit.MILLISECONDS); 1000 if (failure == null) { 1001 // If waiting on a failure times out, return the failures so far. 1002 break; 1003 } 1004 failures.add(failure); 1005 } 1006 } catch (InterruptedException e) { 1007 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 1008 } 1009 1010 return failures; 1011 } 1012 1013 /** 1014 * Get an array list of aborted capture sequence ids with maxNumAborts entries 1015 * at most. If it times out before maxNumAborts are received, return the aborted sequences 1016 * received so far. 1017 * 1018 * @param maxNumAborts The maximal number of aborted sequences to return. If it times out 1019 * before the maximal number of aborts are received, return the received 1020 * failed sequences so far. 1021 * @throws UnsupportedOperationException If an error happens while waiting on the failed 1022 * sequences. 1023 */ geAbortedSequences(long maxNumAborts)1024 public ArrayList<Integer> geAbortedSequences(long maxNumAborts) { 1025 ArrayList<Integer> abortList = new ArrayList<>(); 1026 try { 1027 for (int i = 0; i < maxNumAborts; i++) { 1028 Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS, 1029 TimeUnit.MILLISECONDS); 1030 if (abortSequence == null) { 1031 break; 1032 } 1033 abortList.add(abortSequence); 1034 } 1035 } catch (InterruptedException e) { 1036 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 1037 } 1038 1039 return abortList; 1040 } 1041 1042 /** 1043 * Wait until the capture start of a request and expected timestamp arrives or it times 1044 * out after a number of capture starts. 1045 * 1046 * @param request The request for the capture start to wait for. 1047 * @param timestamp The timestamp for the capture start to wait for. 1048 * @param numCaptureStartsWait The number of capture start events to wait for before timing 1049 * out. 1050 */ waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1051 public void waitForCaptureStart(CaptureRequest request, Long timestamp, 1052 int numCaptureStartsWait) throws Exception { 1053 Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp); 1054 1055 int i = 0; 1056 do { 1057 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll( 1058 CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1059 1060 if (shutter == null) { 1061 throw new TimeoutRuntimeException("Unable to get any more capture start " + 1062 "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms."); 1063 } else if (expectedShutter.equals(shutter)) { 1064 return; 1065 } 1066 1067 } while (i++ < numCaptureStartsWait); 1068 1069 throw new TimeoutRuntimeException("Unable to get the expected capture start " + 1070 "event after waiting for " + numCaptureStartsWait + " capture starts"); 1071 } 1072 1073 /** 1074 * Wait until it receives capture sequence completed callback for a given squence ID. 1075 * 1076 * @param sequenceId The sequence ID of the capture sequence completed callback to wait for. 1077 * @param timeoutMs Time to wait for each capture sequence complete callback before 1078 * timing out. 1079 */ getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1080 public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) { 1081 try { 1082 while (true) { 1083 Pair<Integer, Long> completedSequence = 1084 mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1085 assertNotNull("Wait for a capture sequence completed timed out in " + 1086 timeoutMs + "ms", completedSequence); 1087 1088 if (completedSequence.first.equals(sequenceId)) { 1089 return completedSequence.second.longValue(); 1090 } 1091 } 1092 } catch (InterruptedException e) { 1093 throw new UnsupportedOperationException("Unhandled interrupted exception", e); 1094 } 1095 } 1096 hasMoreResults()1097 public boolean hasMoreResults() 1098 { 1099 return !mQueue.isEmpty(); 1100 } 1101 hasMoreFailures()1102 public boolean hasMoreFailures() 1103 { 1104 return !mFailureQueue.isEmpty(); 1105 } 1106 getNumLostBuffers()1107 public int getNumLostBuffers() 1108 { 1109 return mBufferLostQueue.size(); 1110 } 1111 hasMoreAbortedSequences()1112 public boolean hasMoreAbortedSequences() 1113 { 1114 return !mAbortQueue.isEmpty(); 1115 } 1116 drain()1117 public void drain() { 1118 mQueue.clear(); 1119 mNumFramesArrived.getAndSet(0); 1120 mFailureQueue.clear(); 1121 mBufferLostQueue.clear(); 1122 mCaptureStartQueue.clear(); 1123 mAbortQueue.clear(); 1124 } 1125 } 1126 hasCapability(CameraCharacteristics characteristics, int capability)1127 public static boolean hasCapability(CameraCharacteristics characteristics, int capability) { 1128 int [] capabilities = 1129 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1130 for (int c : capabilities) { 1131 if (c == capability) { 1132 return true; 1133 } 1134 } 1135 return false; 1136 } 1137 isSystemCamera(CameraManager manager, String cameraId)1138 public static boolean isSystemCamera(CameraManager manager, String cameraId) 1139 throws CameraAccessException { 1140 CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); 1141 return hasCapability(characteristics, 1142 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA); 1143 } 1144 getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1145 public static String[] getCameraIdListForTesting(CameraManager manager, 1146 boolean getSystemCameras) 1147 throws CameraAccessException { 1148 String [] ids = manager.getCameraIdListNoLazy(); 1149 List<String> idsForTesting = new ArrayList<String>(); 1150 for (String id : ids) { 1151 boolean isSystemCamera = isSystemCamera(manager, id); 1152 if (getSystemCameras == isSystemCamera) { 1153 idsForTesting.add(id); 1154 } 1155 } 1156 return idsForTesting.toArray(new String[idsForTesting.size()]); 1157 } 1158 getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1159 public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager, 1160 boolean getSystemCameras) 1161 throws CameraAccessException { 1162 Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras))); 1163 Set<Set<String>> combinations = manager.getConcurrentCameraIds(); 1164 Set<Set<String>> correctComb = new HashSet<Set<String>>(); 1165 for (Set<String> comb : combinations) { 1166 Set<String> filteredIds = new HashSet<String>(); 1167 for (String id : comb) { 1168 if (cameraIds.contains(id)) { 1169 filteredIds.add(id); 1170 } 1171 } 1172 if (filteredIds.isEmpty()) { 1173 continue; 1174 } 1175 correctComb.add(filteredIds); 1176 } 1177 return correctComb; 1178 } 1179 1180 /** 1181 * Block until the camera is opened. 1182 * 1183 * <p>Don't use this to test #onDisconnected/#onError since this will throw 1184 * an AssertionError if it fails to open the camera device.</p> 1185 * 1186 * @return CameraDevice opened camera device 1187 * 1188 * @throws IllegalArgumentException 1189 * If the handler is null, or if the handler's looper is current. 1190 * @throws CameraAccessException 1191 * If open fails immediately. 1192 * @throws BlockingOpenException 1193 * If open fails after blocking for some amount of time. 1194 * @throws TimeoutRuntimeException 1195 * If opening times out. Typically unrecoverable. 1196 */ openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1197 public static CameraDevice openCamera(CameraManager manager, String cameraId, 1198 CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException, 1199 BlockingOpenException { 1200 1201 /** 1202 * Although camera2 API allows 'null' Handler (it will just use the current 1203 * thread's Looper), this is not what we want for CTS. 1204 * 1205 * In CTS the default looper is used only to process events in between test runs, 1206 * so anything sent there would not be executed inside a test and the test would fail. 1207 * 1208 * In this case, BlockingCameraManager#openCamera performs the check for us. 1209 */ 1210 return (new BlockingCameraManager(manager)).openCamera(cameraId, listener, handler); 1211 } 1212 1213 1214 /** 1215 * Block until the camera is opened. 1216 * 1217 * <p>Don't use this to test #onDisconnected/#onError since this will throw 1218 * an AssertionError if it fails to open the camera device.</p> 1219 * 1220 * @throws IllegalArgumentException 1221 * If the handler is null, or if the handler's looper is current. 1222 * @throws CameraAccessException 1223 * If open fails immediately. 1224 * @throws BlockingOpenException 1225 * If open fails after blocking for some amount of time. 1226 * @throws TimeoutRuntimeException 1227 * If opening times out. Typically unrecoverable. 1228 */ openCamera(CameraManager manager, String cameraId, Handler handler)1229 public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler) 1230 throws CameraAccessException, 1231 BlockingOpenException { 1232 return openCamera(manager, cameraId, /*listener*/null, handler); 1233 } 1234 1235 /** 1236 * Configure a new camera session with output surfaces and type. 1237 * 1238 * @param camera The CameraDevice to be configured. 1239 * @param outputSurfaces The surface list that used for camera output. 1240 * @param listener The callback CameraDevice will notify when capture results are available. 1241 */ configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1242 public static CameraCaptureSession configureCameraSession(CameraDevice camera, 1243 List<Surface> outputSurfaces, boolean isHighSpeed, 1244 CameraCaptureSession.StateCallback listener, Handler handler) 1245 throws CameraAccessException { 1246 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1247 if (isHighSpeed) { 1248 camera.createConstrainedHighSpeedCaptureSession(outputSurfaces, 1249 sessionListener, handler); 1250 } else { 1251 camera.createCaptureSession(outputSurfaces, sessionListener, handler); 1252 } 1253 CameraCaptureSession session = 1254 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1255 assertFalse("Camera session should not be a reprocessable session", 1256 session.isReprocessable()); 1257 String sessionType = isHighSpeed ? "High Speed" : "Normal"; 1258 assertTrue("Capture session type must be " + sessionType, 1259 isHighSpeed == 1260 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass())); 1261 1262 return session; 1263 } 1264 1265 /** 1266 * Build a new constrained camera session with output surfaces, type and recording session 1267 * parameters. 1268 * 1269 * @param camera The CameraDevice to be configured. 1270 * @param outputSurfaces The surface list that used for camera output. 1271 * @param listener The callback CameraDevice will notify when capture results are available. 1272 * @param initialRequest Initial request settings to use as session parameters. 1273 */ buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1274 public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera, 1275 List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, 1276 Handler handler, CaptureRequest initialRequest) throws CameraAccessException { 1277 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1278 1279 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 1280 for (Surface surface : outputSurfaces) { 1281 outConfigurations.add(new OutputConfiguration(surface)); 1282 } 1283 SessionConfiguration sessionConfig = new SessionConfiguration( 1284 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations, 1285 new HandlerExecutor(handler), sessionListener); 1286 sessionConfig.setSessionParameters(initialRequest); 1287 camera.createCaptureSession(sessionConfig); 1288 1289 CameraCaptureSession session = 1290 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1291 assertFalse("Camera session should not be a reprocessable session", 1292 session.isReprocessable()); 1293 assertTrue("Capture session type must be High Speed", 1294 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom( 1295 session.getClass())); 1296 1297 return session; 1298 } 1299 1300 /** 1301 * Configure a new camera session with output configurations. 1302 * 1303 * @param camera The CameraDevice to be configured. 1304 * @param outputs The OutputConfiguration list that is used for camera output. 1305 * @param listener The callback CameraDevice will notify when capture results are available. 1306 */ configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1307 public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera, 1308 List<OutputConfiguration> outputs, 1309 CameraCaptureSession.StateCallback listener, Handler handler) 1310 throws CameraAccessException { 1311 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1312 camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler); 1313 CameraCaptureSession session = 1314 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1315 assertFalse("Camera session should not be a reprocessable session", 1316 session.isReprocessable()); 1317 return session; 1318 } 1319 1320 /** 1321 * Try configure a new camera session with output configurations. 1322 * 1323 * @param camera The CameraDevice to be configured. 1324 * @param outputs The OutputConfiguration list that is used for camera output. 1325 * @param initialRequest The session parameters passed in during stream configuration 1326 * @param listener The callback CameraDevice will notify when capture results are available. 1327 */ tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1328 public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera, 1329 List<OutputConfiguration> outputs, CaptureRequest initialRequest, 1330 CameraCaptureSession.StateCallback listener, Handler handler) 1331 throws CameraAccessException { 1332 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1333 SessionConfiguration sessionConfig = new SessionConfiguration( 1334 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler), 1335 sessionListener); 1336 sessionConfig.setSessionParameters(initialRequest); 1337 camera.createCaptureSession(sessionConfig); 1338 1339 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1340 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1341 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1342 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1343 1344 CameraCaptureSession session = null; 1345 if (state == BlockingSessionCallback.SESSION_READY) { 1346 session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1347 assertFalse("Camera session should not be a reprocessable session", 1348 session.isReprocessable()); 1349 } 1350 return session; 1351 } 1352 1353 /** 1354 * Configure a new camera session with output surfaces and initial session parameters. 1355 * 1356 * @param camera The CameraDevice to be configured. 1357 * @param outputSurfaces The surface list that used for camera output. 1358 * @param listener The callback CameraDevice will notify when session is available. 1359 * @param handler The handler used to notify callbacks. 1360 * @param initialRequest Initial request settings to use as session parameters. 1361 */ configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1362 public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera, 1363 List<Surface> outputSurfaces, BlockingSessionCallback listener, 1364 Handler handler, CaptureRequest initialRequest) throws CameraAccessException { 1365 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 1366 for (Surface surface : outputSurfaces) { 1367 outConfigurations.add(new OutputConfiguration(surface)); 1368 } 1369 SessionConfiguration sessionConfig = new SessionConfiguration( 1370 SessionConfiguration.SESSION_REGULAR, outConfigurations, 1371 new HandlerExecutor(handler), listener); 1372 sessionConfig.setSessionParameters(initialRequest); 1373 camera.createCaptureSession(sessionConfig); 1374 1375 CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1376 assertFalse("Camera session should not be a reprocessable session", 1377 session.isReprocessable()); 1378 assertFalse("Capture session type must be regular", 1379 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom( 1380 session.getClass())); 1381 1382 return session; 1383 } 1384 1385 /** 1386 * Configure a new camera session with output surfaces. 1387 * 1388 * @param camera The CameraDevice to be configured. 1389 * @param outputSurfaces The surface list that used for camera output. 1390 * @param listener The callback CameraDevice will notify when capture results are available. 1391 */ configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1392 public static CameraCaptureSession configureCameraSession(CameraDevice camera, 1393 List<Surface> outputSurfaces, 1394 CameraCaptureSession.StateCallback listener, Handler handler) 1395 throws CameraAccessException { 1396 1397 return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false, 1398 listener, handler); 1399 } 1400 configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1401 public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera, 1402 InputConfiguration inputConfiguration, List<Surface> outputSurfaces, 1403 CameraCaptureSession.StateCallback listener, Handler handler) 1404 throws CameraAccessException { 1405 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(); 1406 for (Surface surface : outputSurfaces) { 1407 outputConfigs.add(new OutputConfiguration(surface)); 1408 } 1409 CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations( 1410 camera, inputConfiguration, outputConfigs, listener, handler); 1411 1412 return session; 1413 } 1414 configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1415 public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations( 1416 CameraDevice camera, InputConfiguration inputConfiguration, 1417 List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, 1418 Handler handler) throws CameraAccessException { 1419 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1420 SessionConfiguration sessionConfig = new SessionConfiguration( 1421 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler), 1422 sessionListener); 1423 sessionConfig.setInputConfiguration(inputConfiguration); 1424 camera.createCaptureSession(sessionConfig); 1425 1426 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1427 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1428 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1429 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1430 1431 assertTrue("Creating a reprocessable session failed.", 1432 state == BlockingSessionCallback.SESSION_READY); 1433 CameraCaptureSession session = 1434 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1435 assertTrue("Camera session should be a reprocessable session", session.isReprocessable()); 1436 1437 return session; 1438 } 1439 1440 /** 1441 * Create a reprocessable camera session with input and output configurations. 1442 * 1443 * @param camera The CameraDevice to be configured. 1444 * @param inputConfiguration The input configuration used to create this session. 1445 * @param outputs The output configurations used to create this session. 1446 * @param listener The callback CameraDevice will notify when capture results are available. 1447 * @param handler The handler used to notify callbacks. 1448 * @return The session ready to use. 1449 * @throws CameraAccessException 1450 */ configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1451 public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera, 1452 InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, 1453 CameraCaptureSession.StateCallback listener, Handler handler) 1454 throws CameraAccessException { 1455 BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener); 1456 camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs, 1457 sessionListener, handler); 1458 1459 Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY, 1460 BlockingSessionCallback.SESSION_CONFIGURE_FAILED}; 1461 int state = sessionListener.getStateWaiter().waitForAnyOfStates( 1462 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS); 1463 1464 assertTrue("Creating a reprocessable session failed.", 1465 state == BlockingSessionCallback.SESSION_READY); 1466 1467 CameraCaptureSession session = 1468 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 1469 assertTrue("Camera session should be a reprocessable session", session.isReprocessable()); 1470 1471 return session; 1472 } 1473 assertArrayNotEmpty(T arr, String message)1474 public static <T> void assertArrayNotEmpty(T arr, String message) { 1475 assertTrue(message, arr != null && Array.getLength(arr) > 0); 1476 } 1477 1478 /** 1479 * Check if the format is a legal YUV format camera supported. 1480 */ checkYuvFormat(int format)1481 public static void checkYuvFormat(int format) { 1482 if ((format != ImageFormat.YUV_420_888) && 1483 (format != ImageFormat.NV21) && 1484 (format != ImageFormat.YV12)) { 1485 fail("Wrong formats: " + format); 1486 } 1487 } 1488 1489 /** 1490 * Check if image size and format match given size and format. 1491 */ checkImage(Image image, int width, int height, int format)1492 public static void checkImage(Image image, int width, int height, int format) { 1493 // Image reader will wrap YV12/NV21 image by YUV_420_888 1494 if (format == ImageFormat.NV21 || format == ImageFormat.YV12) { 1495 format = ImageFormat.YUV_420_888; 1496 } 1497 assertNotNull("Input image is invalid", image); 1498 assertEquals("Format doesn't match", format, image.getFormat()); 1499 assertEquals("Width doesn't match", width, image.getWidth()); 1500 assertEquals("Height doesn't match", height, image.getHeight()); 1501 } 1502 1503 /** 1504 * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked 1505 * 1-D linear byte array, such that it can be write into disk, or accessed by 1506 * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input 1507 * Image format.</p> 1508 * 1509 * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains 1510 * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any 1511 * (xstride = width, ystride = height for chroma and luma components).</p> 1512 * 1513 * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p> 1514 * 1515 * <p>For YUV P010, it returns a byte array that contains Y plane first, followed 1516 * by the interleaved U(Cb)/V(Cr) plane.</p> 1517 */ getDataFromImage(Image image)1518 public static byte[] getDataFromImage(Image image) { 1519 assertNotNull("Invalid image:", image); 1520 int format = image.getFormat(); 1521 int width = image.getWidth(); 1522 int height = image.getHeight(); 1523 int rowStride, pixelStride; 1524 byte[] data = null; 1525 1526 // Read image data 1527 Plane[] planes = image.getPlanes(); 1528 assertTrue("Fail to get image planes", planes != null && planes.length > 0); 1529 1530 // Check image validity 1531 checkAndroidImageFormat(image); 1532 1533 ByteBuffer buffer = null; 1534 // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. 1535 // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC 1536 if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD || 1537 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG || 1538 format == ImageFormat.HEIC) { 1539 buffer = planes[0].getBuffer(); 1540 assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer); 1541 data = new byte[buffer.remaining()]; 1542 buffer.get(data); 1543 buffer.rewind(); 1544 return data; 1545 } else if (format == ImageFormat.YCBCR_P010) { 1546 // P010 samples are stored within 16 bit values 1547 int offset = 0; 1548 int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8; 1549 data = new byte[width * height * bytesPerPixelRounded]; 1550 assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length, 1551 planes.length == 3); 1552 for (int i = 0; i < 2; i++) { 1553 buffer = planes[i].getBuffer(); 1554 assertNotNull("Fail to get bytebuffer from plane", buffer); 1555 buffer.rewind(); 1556 rowStride = planes[i].getRowStride(); 1557 if (VERBOSE) { 1558 Log.v(TAG, "rowStride " + rowStride); 1559 Log.v(TAG, "width " + width); 1560 Log.v(TAG, "height " + height); 1561 } 1562 int h = (i == 0) ? height : height / 2; 1563 for (int row = 0; row < h; row++) { 1564 // Each 10-bit pixel occupies 2 bytes 1565 int length = 2 * width; 1566 buffer.get(data, offset, length); 1567 offset += length; 1568 if (row < h - 1) { 1569 buffer.position(buffer.position() + rowStride - length); 1570 } 1571 } 1572 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i); 1573 buffer.rewind(); 1574 } 1575 return data; 1576 } 1577 1578 int offset = 0; 1579 data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; 1580 int maxRowSize = planes[0].getRowStride(); 1581 for (int i = 0; i < planes.length; i++) { 1582 if (maxRowSize < planes[i].getRowStride()) { 1583 maxRowSize = planes[i].getRowStride(); 1584 } 1585 } 1586 byte[] rowData = new byte[maxRowSize]; 1587 if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes"); 1588 for (int i = 0; i < planes.length; i++) { 1589 buffer = planes[i].getBuffer(); 1590 assertNotNull("Fail to get bytebuffer from plane", buffer); 1591 buffer.rewind(); 1592 rowStride = planes[i].getRowStride(); 1593 pixelStride = planes[i].getPixelStride(); 1594 assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0); 1595 if (VERBOSE) { 1596 Log.v(TAG, "pixelStride " + pixelStride); 1597 Log.v(TAG, "rowStride " + rowStride); 1598 Log.v(TAG, "width " + width); 1599 Log.v(TAG, "height " + height); 1600 } 1601 // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. 1602 int w = (i == 0) ? width : width / 2; 1603 int h = (i == 0) ? height : height / 2; 1604 assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w); 1605 for (int row = 0; row < h; row++) { 1606 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; 1607 int length; 1608 if (pixelStride == bytesPerPixel) { 1609 // Special case: optimized read of the entire row 1610 length = w * bytesPerPixel; 1611 buffer.get(data, offset, length); 1612 offset += length; 1613 } else { 1614 // Generic case: should work for any pixelStride but slower. 1615 // Use intermediate buffer to avoid read byte-by-byte from 1616 // DirectByteBuffer, which is very bad for performance 1617 length = (w - 1) * pixelStride + bytesPerPixel; 1618 buffer.get(rowData, 0, length); 1619 for (int col = 0; col < w; col++) { 1620 data[offset++] = rowData[col * pixelStride]; 1621 } 1622 } 1623 // Advance buffer the remainder of the row stride 1624 if (row < h - 1) { 1625 buffer.position(buffer.position() + rowStride - length); 1626 } 1627 } 1628 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i); 1629 buffer.rewind(); 1630 } 1631 return data; 1632 } 1633 1634 /** 1635 * <p>Check android image format validity for an image, only support below formats:</p> 1636 * 1637 * <p>YUV_420_888/NV21/YV12, can add more for future</p> 1638 */ checkAndroidImageFormat(Image image)1639 public static void checkAndroidImageFormat(Image image) { 1640 int format = image.getFormat(); 1641 Plane[] planes = image.getPlanes(); 1642 switch (format) { 1643 case ImageFormat.YUV_420_888: 1644 case ImageFormat.NV21: 1645 case ImageFormat.YV12: 1646 case ImageFormat.YCBCR_P010: 1647 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length); 1648 break; 1649 case ImageFormat.JPEG: 1650 case ImageFormat.RAW_SENSOR: 1651 case ImageFormat.RAW_PRIVATE: 1652 case ImageFormat.DEPTH16: 1653 case ImageFormat.DEPTH_POINT_CLOUD: 1654 case ImageFormat.DEPTH_JPEG: 1655 case ImageFormat.Y8: 1656 case ImageFormat.HEIC: 1657 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length); 1658 break; 1659 default: 1660 fail("Unsupported Image Format: " + format); 1661 } 1662 } 1663 dumpFile(String fileName, Bitmap data)1664 public static void dumpFile(String fileName, Bitmap data) { 1665 FileOutputStream outStream; 1666 try { 1667 Log.v(TAG, "output will be saved as " + fileName); 1668 outStream = new FileOutputStream(fileName); 1669 } catch (IOException ioe) { 1670 throw new RuntimeException("Unable to create debug output file " + fileName, ioe); 1671 } 1672 1673 try { 1674 data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream); 1675 outStream.close(); 1676 } catch (IOException ioe) { 1677 throw new RuntimeException("failed writing data to file " + fileName, ioe); 1678 } 1679 } 1680 dumpFile(String fileName, byte[] data)1681 public static void dumpFile(String fileName, byte[] data) { 1682 FileOutputStream outStream; 1683 try { 1684 Log.v(TAG, "output will be saved as " + fileName); 1685 outStream = new FileOutputStream(fileName); 1686 } catch (IOException ioe) { 1687 throw new RuntimeException("Unable to create debug output file " + fileName, ioe); 1688 } 1689 1690 try { 1691 outStream.write(data); 1692 outStream.close(); 1693 } catch (IOException ioe) { 1694 throw new RuntimeException("failed writing data to file " + fileName, ioe); 1695 } 1696 } 1697 1698 /** 1699 * Get the available output sizes for the user-defined {@code format}. 1700 * 1701 * <p>Note that implementation-defined/hidden formats are not supported.</p> 1702 */ getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)1703 public static Size[] getSupportedSizeForFormat(int format, String cameraId, 1704 CameraManager cameraManager) throws CameraAccessException { 1705 return getSupportedSizeForFormat(format, cameraId, cameraManager, 1706 /*maxResolution*/false); 1707 } 1708 getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager, boolean maxResolution)1709 public static Size[] getSupportedSizeForFormat(int format, String cameraId, 1710 CameraManager cameraManager, boolean maxResolution) throws CameraAccessException { 1711 CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId); 1712 assertNotNull("Can't get camera characteristics!", properties); 1713 if (VERBOSE) { 1714 Log.v(TAG, "get camera characteristics for camera: " + cameraId); 1715 } 1716 CameraCharacteristics.Key<StreamConfigurationMap> configMapTag = maxResolution ? 1717 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION : 1718 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP; 1719 StreamConfigurationMap configMap = properties.get(configMapTag); 1720 if (configMap == null) { 1721 assertTrue("SCALER_STREAM_CONFIGURATION_MAP is null!", maxResolution); 1722 return null; 1723 } 1724 1725 Size[] availableSizes = configMap.getOutputSizes(format); 1726 if (!maxResolution) { 1727 assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: " 1728 + format); 1729 } 1730 Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format); 1731 if (highResAvailableSizes != null && highResAvailableSizes.length > 0) { 1732 Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length]; 1733 System.arraycopy(availableSizes, 0, allSizes, 0, 1734 availableSizes.length); 1735 System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, 1736 highResAvailableSizes.length); 1737 availableSizes = allSizes; 1738 } 1739 if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes)); 1740 return availableSizes; 1741 } 1742 1743 /** 1744 * Get the available output sizes for the given class. 1745 * 1746 */ getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)1747 public static Size[] getSupportedSizeForClass(Class klass, String cameraId, 1748 CameraManager cameraManager) throws CameraAccessException { 1749 CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId); 1750 assertNotNull("Can't get camera characteristics!", properties); 1751 if (VERBOSE) { 1752 Log.v(TAG, "get camera characteristics for camera: " + cameraId); 1753 } 1754 StreamConfigurationMap configMap = 1755 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 1756 Size[] availableSizes = configMap.getOutputSizes(klass); 1757 assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: " 1758 + klass); 1759 Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE); 1760 if (highResAvailableSizes != null && highResAvailableSizes.length > 0) { 1761 Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length]; 1762 System.arraycopy(availableSizes, 0, allSizes, 0, 1763 availableSizes.length); 1764 System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length, 1765 highResAvailableSizes.length); 1766 availableSizes = allSizes; 1767 } 1768 if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes)); 1769 return availableSizes; 1770 } 1771 1772 /** 1773 * Size comparator that compares the number of pixels it covers. 1774 * 1775 * <p>If two the areas of two sizes are same, compare the widths.</p> 1776 */ 1777 public static class SizeComparator implements Comparator<Size> { 1778 @Override compare(Size lhs, Size rhs)1779 public int compare(Size lhs, Size rhs) { 1780 return CameraUtils 1781 .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight()); 1782 } 1783 } 1784 1785 /** 1786 * Get sorted size list in descending order. Remove the sizes larger than 1787 * the bound. If the bound is null, don't do the size bound filtering. 1788 */ getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)1789 static public List<Size> getSupportedPreviewSizes(String cameraId, 1790 CameraManager cameraManager, Size bound) throws CameraAccessException { 1791 1792 Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId, 1793 cameraManager); 1794 assertArrayNotEmpty(rawSizes, 1795 "Available sizes for SurfaceHolder class should not be empty"); 1796 if (VERBOSE) { 1797 Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes)); 1798 } 1799 1800 if (bound == null) { 1801 return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false); 1802 } 1803 1804 List<Size> sizes = new ArrayList<Size>(); 1805 for (Size sz: rawSizes) { 1806 if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) { 1807 sizes.add(sz); 1808 } 1809 } 1810 return getAscendingOrderSizes(sizes, /*ascending*/false); 1811 } 1812 1813 /** 1814 * Get a sorted list of sizes from a given size list. 1815 * 1816 * <p> 1817 * The size is compare by area it covers, if the areas are same, then 1818 * compare the widths. 1819 * </p> 1820 * 1821 * @param sizeList The input size list to be sorted 1822 * @param ascending True if the order is ascending, otherwise descending order 1823 * @return The ordered list of sizes 1824 */ getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)1825 static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) { 1826 if (sizeList == null) { 1827 throw new IllegalArgumentException("sizeList shouldn't be null"); 1828 } 1829 1830 Comparator<Size> comparator = new SizeComparator(); 1831 List<Size> sortedSizes = new ArrayList<Size>(); 1832 sortedSizes.addAll(sizeList); 1833 Collections.sort(sortedSizes, comparator); 1834 if (!ascending) { 1835 Collections.reverse(sortedSizes); 1836 } 1837 1838 return sortedSizes; 1839 } 1840 /** 1841 * Get sorted (descending order) size list for given format. Remove the sizes larger than 1842 * the bound. If the bound is null, don't do the size bound filtering. 1843 */ getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)1844 static public List<Size> getSortedSizesForFormat(String cameraId, 1845 CameraManager cameraManager, int format, Size bound) throws CameraAccessException { 1846 return getSortedSizesForFormat(cameraId, cameraManager, format, /*maxResolution*/false, 1847 bound); 1848 } 1849 1850 /** 1851 * Get sorted (descending order) size list for given format (with an option to get sizes from 1852 * the maximum resolution stream configuration map). Remove the sizes larger than 1853 * the bound. If the bound is null, don't do the size bound filtering. 1854 */ getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, boolean maxResolution, Size bound)1855 static public List<Size> getSortedSizesForFormat(String cameraId, 1856 CameraManager cameraManager, int format, boolean maxResolution, Size bound) 1857 throws CameraAccessException { 1858 Comparator<Size> comparator = new SizeComparator(); 1859 Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager, maxResolution); 1860 List<Size> sortedSizes = null; 1861 if (bound != null) { 1862 sortedSizes = new ArrayList<Size>(/*capacity*/1); 1863 for (Size sz : sizes) { 1864 if (comparator.compare(sz, bound) <= 0) { 1865 sortedSizes.add(sz); 1866 } 1867 } 1868 } else { 1869 sortedSizes = Arrays.asList(sizes); 1870 } 1871 assertTrue("Supported size list should have at least one element", 1872 sortedSizes.size() > 0); 1873 1874 Collections.sort(sortedSizes, comparator); 1875 // Make it in descending order. 1876 Collections.reverse(sortedSizes); 1877 return sortedSizes; 1878 } 1879 1880 /** 1881 * Get supported video size list for a given camera device. 1882 * 1883 * <p> 1884 * Filter out the sizes that are larger than the bound. If the bound is 1885 * null, don't do the size bound filtering. 1886 * </p> 1887 */ getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)1888 static public List<Size> getSupportedVideoSizes(String cameraId, 1889 CameraManager cameraManager, Size bound) throws CameraAccessException { 1890 1891 Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class, 1892 cameraId, cameraManager); 1893 assertArrayNotEmpty(rawSizes, 1894 "Available sizes for MediaRecorder class should not be empty"); 1895 if (VERBOSE) { 1896 Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes)); 1897 } 1898 1899 if (bound == null) { 1900 return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false); 1901 } 1902 1903 List<Size> sizes = new ArrayList<Size>(); 1904 for (Size sz: rawSizes) { 1905 if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) { 1906 sizes.add(sz); 1907 } 1908 } 1909 return getAscendingOrderSizes(sizes, /*ascending*/false); 1910 } 1911 1912 /** 1913 * Get supported video size list (descending order) for a given camera device. 1914 * 1915 * <p> 1916 * Filter out the sizes that are larger than the bound. If the bound is 1917 * null, don't do the size bound filtering. 1918 * </p> 1919 */ getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)1920 static public List<Size> getSupportedStillSizes(String cameraId, 1921 CameraManager cameraManager, Size bound) throws CameraAccessException { 1922 return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound); 1923 } 1924 getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)1925 static public List<Size> getSupportedHeicSizes(String cameraId, 1926 CameraManager cameraManager, Size bound) throws CameraAccessException { 1927 return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound); 1928 } 1929 getMinPreviewSize(String cameraId, CameraManager cameraManager)1930 static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager) 1931 throws CameraAccessException { 1932 List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null); 1933 return sizes.get(sizes.size() - 1); 1934 } 1935 1936 /** 1937 * Get max supported preview size for a camera device. 1938 */ getMaxPreviewSize(String cameraId, CameraManager cameraManager)1939 static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager) 1940 throws CameraAccessException { 1941 return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null); 1942 } 1943 1944 /** 1945 * Get max preview size for a camera device in the supported sizes that are no larger 1946 * than the bound. 1947 */ getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)1948 static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound) 1949 throws CameraAccessException { 1950 List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound); 1951 return sizes.get(0); 1952 } 1953 1954 /** 1955 * Get max depth size for a camera device. 1956 */ getMaxDepthSize(String cameraId, CameraManager cameraManager)1957 static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager) 1958 throws CameraAccessException { 1959 List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16, 1960 /*bound*/ null); 1961 return sizes.get(0); 1962 } 1963 1964 /** 1965 * Return the lower size 1966 * @param a first size 1967 * 1968 * @param b second size 1969 * 1970 * @return Size the smaller size 1971 * 1972 * @throws IllegalArgumentException if either param was null. 1973 * 1974 */ getMinSize(Size a, Size b)1975 @NonNull public static Size getMinSize(Size a, Size b) { 1976 if (a == null || b == null) { 1977 throw new IllegalArgumentException("sizes was empty"); 1978 } 1979 if (a.getWidth() * a.getHeight() < b.getHeight() * b.getWidth()) { 1980 return a; 1981 } 1982 return b; 1983 } 1984 1985 /** 1986 * Get the largest size by area. 1987 * 1988 * @param sizes an array of sizes, must have at least 1 element 1989 * 1990 * @return Largest Size 1991 * 1992 * @throws IllegalArgumentException if sizes was null or had 0 elements 1993 */ getMaxSize(Size... sizes)1994 public static Size getMaxSize(Size... sizes) { 1995 if (sizes == null || sizes.length == 0) { 1996 throw new IllegalArgumentException("sizes was empty"); 1997 } 1998 1999 Size sz = sizes[0]; 2000 for (Size size : sizes) { 2001 if (size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) { 2002 sz = size; 2003 } 2004 } 2005 2006 return sz; 2007 } 2008 2009 /** 2010 * Get the largest size by area within (less than) bound 2011 * 2012 * @param sizes an array of sizes, must have at least 1 element 2013 * 2014 * @return Largest Size. Null if no such size exists within bound. 2015 * 2016 * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid. 2017 */ getMaxSizeWithBound(Size[] sizes, int bound)2018 public static Size getMaxSizeWithBound(Size[] sizes, int bound) { 2019 if (sizes == null || sizes.length == 0) { 2020 throw new IllegalArgumentException("sizes was empty"); 2021 } 2022 if (bound <= 0) { 2023 throw new IllegalArgumentException("bound is invalid"); 2024 } 2025 2026 Size sz = null; 2027 for (Size size : sizes) { 2028 if (size.getWidth() * size.getHeight() >= bound) { 2029 continue; 2030 } 2031 2032 if (sz == null || 2033 size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) { 2034 sz = size; 2035 } 2036 } 2037 2038 return sz; 2039 } 2040 2041 /** 2042 * Returns true if the given {@code array} contains the given element. 2043 * 2044 * @param array {@code array} to check for {@code elem} 2045 * @param elem {@code elem} to test for 2046 * @return {@code true} if the given element is contained 2047 */ contains(int[] array, int elem)2048 public static boolean contains(int[] array, int elem) { 2049 if (array == null) return false; 2050 for (int i = 0; i < array.length; i++) { 2051 if (elem == array[i]) return true; 2052 } 2053 return false; 2054 } 2055 2056 /** 2057 * Get object array from byte array. 2058 * 2059 * @param array Input byte array to be converted 2060 * @return Byte object array converted from input byte array 2061 */ toObject(byte[] array)2062 public static Byte[] toObject(byte[] array) { 2063 return convertPrimitiveArrayToObjectArray(array, Byte.class); 2064 } 2065 2066 /** 2067 * Get object array from int array. 2068 * 2069 * @param array Input int array to be converted 2070 * @return Integer object array converted from input int array 2071 */ toObject(int[] array)2072 public static Integer[] toObject(int[] array) { 2073 return convertPrimitiveArrayToObjectArray(array, Integer.class); 2074 } 2075 2076 /** 2077 * Get object array from float array. 2078 * 2079 * @param array Input float array to be converted 2080 * @return Float object array converted from input float array 2081 */ toObject(float[] array)2082 public static Float[] toObject(float[] array) { 2083 return convertPrimitiveArrayToObjectArray(array, Float.class); 2084 } 2085 2086 /** 2087 * Get object array from double array. 2088 * 2089 * @param array Input double array to be converted 2090 * @return Double object array converted from input double array 2091 */ toObject(double[] array)2092 public static Double[] toObject(double[] array) { 2093 return convertPrimitiveArrayToObjectArray(array, Double.class); 2094 } 2095 2096 /** 2097 * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]). 2098 * 2099 * @param array Input array object 2100 * @param wrapperClass The boxed class it converts to 2101 * @return Boxed version of primitive array 2102 */ convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2103 private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array, 2104 final Class<T> wrapperClass) { 2105 // getLength does the null check and isArray check already. 2106 int arrayLength = Array.getLength(array); 2107 if (arrayLength == 0) { 2108 throw new IllegalArgumentException("Input array shouldn't be empty"); 2109 } 2110 2111 @SuppressWarnings("unchecked") 2112 final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength); 2113 for (int i = 0; i < arrayLength; i++) { 2114 Array.set(result, i, Array.get(array, i)); 2115 } 2116 return result; 2117 } 2118 2119 /** 2120 * Update one 3A region in capture request builder if that region is supported. Do nothing 2121 * if the specified 3A region is not supported by camera device. 2122 * @param requestBuilder The request to be updated 2123 * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2) 2124 * @param regions The 3A regions to be set 2125 * @param staticInfo static metadata characteristics 2126 */ update3aRegion( CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, StaticMetadata staticInfo)2127 public static void update3aRegion( 2128 CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, 2129 StaticMetadata staticInfo) 2130 { 2131 int maxRegions; 2132 CaptureRequest.Key<MeteringRectangle[]> key; 2133 2134 if (regions == null || regions.length == 0 || staticInfo == null) { 2135 throw new IllegalArgumentException("Invalid input 3A region!"); 2136 } 2137 2138 switch (algoIdx) { 2139 case INDEX_ALGORITHM_AE: 2140 maxRegions = staticInfo.getAeMaxRegionsChecked(); 2141 key = CaptureRequest.CONTROL_AE_REGIONS; 2142 break; 2143 case INDEX_ALGORITHM_AWB: 2144 maxRegions = staticInfo.getAwbMaxRegionsChecked(); 2145 key = CaptureRequest.CONTROL_AWB_REGIONS; 2146 break; 2147 case INDEX_ALGORITHM_AF: 2148 maxRegions = staticInfo.getAfMaxRegionsChecked(); 2149 key = CaptureRequest.CONTROL_AF_REGIONS; 2150 break; 2151 default: 2152 throw new IllegalArgumentException("Unknown 3A Algorithm!"); 2153 } 2154 2155 if (maxRegions >= regions.length) { 2156 requestBuilder.set(key, regions); 2157 } 2158 } 2159 2160 /** 2161 * Validate one 3A region in capture result equals to expected region if that region is 2162 * supported. Do nothing if the specified 3A region is not supported by camera device. 2163 * @param result The capture result to be validated 2164 * @param partialResults The partial results to be validated 2165 * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2) 2166 * @param expectRegions The 3A regions expected in capture result 2167 * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio 2168 * @param staticInfo static metadata characteristics 2169 */ validate3aRegion( CaptureResult result, List<CaptureResult> partialResults, int algoIdx, MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)2170 public static void validate3aRegion( 2171 CaptureResult result, List<CaptureResult> partialResults, int algoIdx, 2172 MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo) 2173 { 2174 // There are multiple cases where result 3A region could be slightly different than the 2175 // request: 2176 // 1. Distortion correction, 2177 // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger 2178 // than 1. 2179 // 3. Precision loss due to converting to HAL zoom ratio and back 2180 // 4. Error magnification due to active array scale-up when zoom ratio API is used. 2181 // 2182 // To handle all these scenarios, make the threshold larger, and scale the threshold based 2183 // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller 2184 // than 1x. 2185 final int maxCoordOffset = 5; 2186 int maxRegions; 2187 CaptureResult.Key<MeteringRectangle[]> key; 2188 MeteringRectangle[] actualRegion; 2189 2190 switch (algoIdx) { 2191 case INDEX_ALGORITHM_AE: 2192 maxRegions = staticInfo.getAeMaxRegionsChecked(); 2193 key = CaptureResult.CONTROL_AE_REGIONS; 2194 break; 2195 case INDEX_ALGORITHM_AWB: 2196 maxRegions = staticInfo.getAwbMaxRegionsChecked(); 2197 key = CaptureResult.CONTROL_AWB_REGIONS; 2198 break; 2199 case INDEX_ALGORITHM_AF: 2200 maxRegions = staticInfo.getAfMaxRegionsChecked(); 2201 key = CaptureResult.CONTROL_AF_REGIONS; 2202 break; 2203 default: 2204 throw new IllegalArgumentException("Unknown 3A Algorithm!"); 2205 } 2206 2207 int maxDist = maxCoordOffset; 2208 if (scaleByZoomRatio) { 2209 Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO); 2210 for (CaptureResult partialResult : partialResults) { 2211 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO); 2212 if (zoomRatioInPartial != null) { 2213 assertEquals("CONTROL_ZOOM_RATIO in partial result must match" 2214 + " that in final result", zoomRatio, zoomRatioInPartial); 2215 } 2216 } 2217 maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f)); 2218 } 2219 2220 if (maxRegions > 0) 2221 { 2222 actualRegion = getValueNotNull(result, key); 2223 for (CaptureResult partialResult : partialResults) { 2224 MeteringRectangle[] actualRegionInPartial = partialResult.get(key); 2225 if (actualRegionInPartial != null) { 2226 assertEquals("Key " + key.getName() + " in partial result must match" 2227 + " that in final result", actualRegionInPartial, actualRegion); 2228 } 2229 } 2230 2231 for (int i = 0; i < actualRegion.length; i++) { 2232 // If the expected region's metering weight is 0, allow the camera device 2233 // to override it. 2234 if (expectRegions[i].getMeteringWeight() == 0) { 2235 continue; 2236 } 2237 2238 Rect a = actualRegion[i].getRect(); 2239 Rect e = expectRegions[i].getRect(); 2240 2241 if (VERBOSE) { 2242 Log.v(TAG, "Actual region " + actualRegion[i].toString() + 2243 ", expected region " + expectRegions[i].toString() + 2244 ", maxDist " + maxDist); 2245 } 2246 assertTrue( 2247 "Expected 3A regions: " + Arrays.toString(expectRegions) + 2248 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 2249 maxDist >= Math.abs(a.left - e.left)); 2250 2251 assertTrue( 2252 "Expected 3A regions: " + Arrays.toString(expectRegions) + 2253 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 2254 maxDist >= Math.abs(a.right - e.right)); 2255 2256 assertTrue( 2257 "Expected 3A regions: " + Arrays.toString(expectRegions) + 2258 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 2259 maxDist >= Math.abs(a.top - e.top)); 2260 assertTrue( 2261 "Expected 3A regions: " + Arrays.toString(expectRegions) + 2262 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 2263 maxDist >= Math.abs(a.bottom - e.bottom)); 2264 } 2265 } 2266 } 2267 2268 2269 /** 2270 * Validate image based on format and size. 2271 * 2272 * @param image The image to be validated. 2273 * @param width The image width. 2274 * @param height The image height. 2275 * @param format The image format. 2276 * @param filePath The debug dump file path, null if don't want to dump to 2277 * file. 2278 * @throws UnsupportedOperationException if calling with an unknown format 2279 */ validateImage(Image image, int width, int height, int format, String filePath)2280 public static void validateImage(Image image, int width, int height, int format, 2281 String filePath) { 2282 checkImage(image, width, height, format); 2283 2284 /** 2285 * TODO: validate timestamp: 2286 * 1. capture result timestamp against the image timestamp (need 2287 * consider frame drops) 2288 * 2. timestamps should be monotonically increasing for different requests 2289 */ 2290 if(VERBOSE) Log.v(TAG, "validating Image"); 2291 byte[] data = getDataFromImage(image); 2292 assertTrue("Invalid image data", data != null && data.length > 0); 2293 2294 switch (format) { 2295 // Clients must be able to process and handle depth jpeg images like any other 2296 // regular jpeg. 2297 case ImageFormat.DEPTH_JPEG: 2298 case ImageFormat.JPEG: 2299 validateJpegData(data, width, height, filePath); 2300 break; 2301 case ImageFormat.YCBCR_P010: 2302 validateP010Data(data, width, height, format, image.getTimestamp(), filePath); 2303 break; 2304 case ImageFormat.YUV_420_888: 2305 case ImageFormat.YV12: 2306 validateYuvData(data, width, height, format, image.getTimestamp(), filePath); 2307 break; 2308 case ImageFormat.RAW_SENSOR: 2309 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath); 2310 break; 2311 case ImageFormat.DEPTH16: 2312 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath); 2313 break; 2314 case ImageFormat.DEPTH_POINT_CLOUD: 2315 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath); 2316 break; 2317 case ImageFormat.RAW_PRIVATE: 2318 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath); 2319 break; 2320 case ImageFormat.Y8: 2321 validateY8Data(data, width, height, format, image.getTimestamp(), filePath); 2322 break; 2323 case ImageFormat.HEIC: 2324 validateHeicData(data, width, height, filePath); 2325 break; 2326 default: 2327 throw new UnsupportedOperationException("Unsupported format for validation: " 2328 + format); 2329 } 2330 } 2331 2332 public static class HandlerExecutor implements Executor { 2333 private final Handler mHandler; 2334 HandlerExecutor(Handler handler)2335 public HandlerExecutor(Handler handler) { 2336 assertNotNull("handler must be valid", handler); 2337 mHandler = handler; 2338 } 2339 2340 @Override execute(Runnable runCmd)2341 public void execute(Runnable runCmd) { 2342 mHandler.post(runCmd); 2343 } 2344 } 2345 2346 /** 2347 * Provide a mock for {@link CameraDevice.StateCallback}. 2348 * 2349 * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an 2350 * abstract class.</p> 2351 * 2352 * <p> 2353 * Use this instead of other classes when needing to verify interactions, since 2354 * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra 2355 * interactions which will cause false test failures. 2356 * </p> 2357 * 2358 */ 2359 public static class MockStateCallback extends CameraDevice.StateCallback { 2360 2361 @Override onOpened(CameraDevice camera)2362 public void onOpened(CameraDevice camera) { 2363 } 2364 2365 @Override onDisconnected(CameraDevice camera)2366 public void onDisconnected(CameraDevice camera) { 2367 } 2368 2369 @Override onError(CameraDevice camera, int error)2370 public void onError(CameraDevice camera, int error) { 2371 } 2372 MockStateCallback()2373 private MockStateCallback() {} 2374 2375 /** 2376 * Create a Mockito-ready mocked StateCallback. 2377 */ mock()2378 public static MockStateCallback mock() { 2379 return Mockito.spy(new MockStateCallback()); 2380 } 2381 } 2382 validateJpegData(byte[] jpegData, int width, int height, String filePath)2383 public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) { 2384 BitmapFactory.Options bmpOptions = new BitmapFactory.Options(); 2385 // DecodeBound mode: only parse the frame header to get width/height. 2386 // it doesn't decode the pixel. 2387 bmpOptions.inJustDecodeBounds = true; 2388 BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions); 2389 assertEquals(width, bmpOptions.outWidth); 2390 assertEquals(height, bmpOptions.outHeight); 2391 2392 // Pixel decoding mode: decode whole image. check if the image data 2393 // is decodable here. 2394 assertNotNull("Decoding jpeg failed", 2395 BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length)); 2396 if (DEBUG && filePath != null) { 2397 String fileName = 2398 filePath + "/" + width + "x" + height + ".jpeg"; 2399 dumpFile(fileName, jpegData); 2400 } 2401 } 2402 validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2403 private static void validateYuvData(byte[] yuvData, int width, int height, int format, 2404 long ts, String filePath) { 2405 checkYuvFormat(format); 2406 if (VERBOSE) Log.v(TAG, "Validating YUV data"); 2407 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2408 assertEquals("Yuv data doesn't match", expectedSize, yuvData.length); 2409 2410 // TODO: Can add data validation for test pattern. 2411 2412 if (DEBUG && filePath != null) { 2413 String fileName = 2414 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv"; 2415 dumpFile(fileName, yuvData); 2416 } 2417 } 2418 validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2419 private static void validateP010Data(byte[] p010Data, int width, int height, int format, 2420 long ts, String filePath) { 2421 if (VERBOSE) Log.v(TAG, "Validating P010 data"); 2422 // The P010 10 bit samples are stored in two bytes so the size needs to be adjusted 2423 // accordingly. 2424 int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8; 2425 int expectedSize = width * height * bytesPerPixelRounded; 2426 assertEquals("P010 data doesn't match", expectedSize, p010Data.length); 2427 2428 if (DEBUG && filePath != null) { 2429 String fileName = 2430 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010"; 2431 dumpFile(fileName, p010Data); 2432 } 2433 } validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2434 private static void validateRaw16Data(byte[] rawData, int width, int height, int format, 2435 long ts, String filePath) { 2436 if (VERBOSE) Log.v(TAG, "Validating raw data"); 2437 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2438 assertEquals("Raw data doesn't match", expectedSize, rawData.length); 2439 2440 // TODO: Can add data validation for test pattern. 2441 2442 if (DEBUG && filePath != null) { 2443 String fileName = 2444 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16"; 2445 dumpFile(fileName, rawData); 2446 } 2447 2448 return; 2449 } 2450 validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2451 private static void validateY8Data(byte[] rawData, int width, int height, int format, 2452 long ts, String filePath) { 2453 if (VERBOSE) Log.v(TAG, "Validating Y8 data"); 2454 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2455 assertEquals("Y8 data doesn't match", expectedSize, rawData.length); 2456 2457 // TODO: Can add data validation for test pattern. 2458 2459 if (DEBUG && filePath != null) { 2460 String fileName = 2461 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8"; 2462 dumpFile(fileName, rawData); 2463 } 2464 2465 return; 2466 } 2467 validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2468 private static void validateRawPrivateData(byte[] rawData, int width, int height, 2469 long ts, String filePath) { 2470 if (VERBOSE) Log.v(TAG, "Validating private raw data"); 2471 // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes 2472 int expectedSizeMin = width * height; 2473 int expectedSizeMax = width * height * 30; 2474 2475 assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" + 2476 expectedSizeMin + "," + expectedSizeMax + "]", 2477 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax); 2478 2479 if (DEBUG && filePath != null) { 2480 String fileName = 2481 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv"; 2482 dumpFile(fileName, rawData); 2483 } 2484 2485 return; 2486 } 2487 validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2488 private static void validateDepth16Data(byte[] depthData, int width, int height, int format, 2489 long ts, String filePath) { 2490 2491 if (VERBOSE) Log.v(TAG, "Validating depth16 data"); 2492 int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; 2493 assertEquals("Depth data doesn't match", expectedSize, depthData.length); 2494 2495 2496 if (DEBUG && filePath != null) { 2497 String fileName = 2498 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16"; 2499 dumpFile(fileName, depthData); 2500 } 2501 2502 return; 2503 2504 } 2505 validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2506 private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format, 2507 long ts, String filePath) { 2508 2509 if (VERBOSE) Log.v(TAG, "Validating depth point cloud data"); 2510 2511 // Can't validate size since it is variable 2512 2513 if (DEBUG && filePath != null) { 2514 String fileName = 2515 filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud"; 2516 dumpFile(fileName, depthData); 2517 } 2518 2519 return; 2520 2521 } 2522 validateHeicData(byte[] heicData, int width, int height, String filePath)2523 private static void validateHeicData(byte[] heicData, int width, int height, String filePath) { 2524 BitmapFactory.Options bmpOptions = new BitmapFactory.Options(); 2525 // DecodeBound mode: only parse the frame header to get width/height. 2526 // it doesn't decode the pixel. 2527 bmpOptions.inJustDecodeBounds = true; 2528 BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions); 2529 assertEquals(width, bmpOptions.outWidth); 2530 assertEquals(height, bmpOptions.outHeight); 2531 2532 // Pixel decoding mode: decode whole image. check if the image data 2533 // is decodable here. 2534 assertNotNull("Decoding heic failed", 2535 BitmapFactory.decodeByteArray(heicData, 0, heicData.length)); 2536 if (DEBUG && filePath != null) { 2537 String fileName = 2538 filePath + "/" + width + "x" + height + ".heic"; 2539 dumpFile(fileName, heicData); 2540 } 2541 } 2542 getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)2543 public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) { 2544 if (result == null) { 2545 throw new IllegalArgumentException("Result must not be null"); 2546 } 2547 2548 T value = result.get(key); 2549 assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value); 2550 return value; 2551 } 2552 getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)2553 public static <T> T getValueNotNull(CameraCharacteristics characteristics, 2554 CameraCharacteristics.Key<T> key) { 2555 if (characteristics == null) { 2556 throw new IllegalArgumentException("Camera characteristics must not be null"); 2557 } 2558 2559 T value = characteristics.get(key); 2560 assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value); 2561 return value; 2562 } 2563 2564 /** 2565 * Get a crop region for a given zoom factor and center position. 2566 * <p> 2567 * The center position is normalized position in range of [0, 1.0], where 2568 * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right 2569 * corner. The center position could limit the effective minimal zoom 2570 * factor, for example, if the center position is (0.75, 0.75), the 2571 * effective minimal zoom position becomes 2.0. If the requested zoom factor 2572 * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned. 2573 * </p> 2574 * <p> 2575 * The aspect ratio of the crop region is maintained the same as the aspect 2576 * ratio of active array. 2577 * </p> 2578 * 2579 * @param zoomFactor The zoom factor to generate the crop region, it must be 2580 * >= 1.0 2581 * @param center The normalized zoom center point that is in the range of [0, 1]. 2582 * @param maxZoom The max zoom factor supported by this device. 2583 * @param activeArray The active array size of this device. 2584 * @return crop region for the given normalized center and zoom factor. 2585 */ getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)2586 public static Rect getCropRegionForZoom(float zoomFactor, final PointF center, 2587 final float maxZoom, final Rect activeArray) { 2588 if (zoomFactor < 1.0) { 2589 throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0"); 2590 } 2591 if (center.x > 1.0 || center.x < 0) { 2592 throw new IllegalArgumentException("center.x " + center.x 2593 + " should be in range of [0, 1.0]"); 2594 } 2595 if (center.y > 1.0 || center.y < 0) { 2596 throw new IllegalArgumentException("center.y " + center.y 2597 + " should be in range of [0, 1.0]"); 2598 } 2599 if (maxZoom < 1.0) { 2600 throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0"); 2601 } 2602 if (activeArray == null) { 2603 throw new IllegalArgumentException("activeArray must not be null"); 2604 } 2605 2606 float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x), 2607 Math.min(center.y, 1.0f - center.y)); 2608 float minEffectiveZoom = 0.5f / minCenterLength; 2609 if (minEffectiveZoom > maxZoom) { 2610 throw new IllegalArgumentException("Requested center " + center.toString() + 2611 " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max" 2612 + " zoom factor " + maxZoom); 2613 } 2614 2615 if (zoomFactor < minEffectiveZoom) { 2616 Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor " 2617 + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom); 2618 zoomFactor = minEffectiveZoom; 2619 } 2620 2621 int cropCenterX = (int)(activeArray.width() * center.x); 2622 int cropCenterY = (int)(activeArray.height() * center.y); 2623 int cropWidth = (int) (activeArray.width() / zoomFactor); 2624 int cropHeight = (int) (activeArray.height() / zoomFactor); 2625 2626 return new Rect( 2627 /*left*/cropCenterX - cropWidth / 2, 2628 /*top*/cropCenterY - cropHeight / 2, 2629 /*right*/ cropCenterX + cropWidth / 2, 2630 /*bottom*/cropCenterY + cropHeight / 2); 2631 } 2632 2633 /** 2634 * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps 2635 * 2636 * @param staticInfo camera static metadata 2637 * @return AeAvailableTargetFpsRanges in descending order by max fps 2638 */ getDescendingTargetFpsRanges(StaticMetadata staticInfo)2639 public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) { 2640 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2641 Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() { 2642 public int compare(Range<Integer> r1, Range<Integer> r2) { 2643 return r2.getUpper() - r1.getUpper(); 2644 } 2645 }); 2646 return fpsRanges; 2647 } 2648 2649 /** 2650 * Get AeAvailableTargetFpsRanges with max fps not exceeding 30 2651 * 2652 * @param staticInfo camera static metadata 2653 * @return AeAvailableTargetFpsRanges with max fps not exceeding 30 2654 */ getTargetFpsRangesUpTo30(StaticMetadata staticInfo)2655 public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) { 2656 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2657 ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>(); 2658 for (Range<Integer> fpsRange : fpsRanges) { 2659 if (fpsRange.getUpper() <= 30) { 2660 fpsRangesUpTo30.add(fpsRange); 2661 } 2662 } 2663 return fpsRangesUpTo30; 2664 } 2665 2666 /** 2667 * Get AeAvailableTargetFpsRanges with max fps greater than 30 2668 * 2669 * @param staticInfo camera static metadata 2670 * @return AeAvailableTargetFpsRanges with max fps greater than 30 2671 */ getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)2672 public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) { 2673 Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked(); 2674 ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>(); 2675 for (Range<Integer> fpsRange : fpsRanges) { 2676 if (fpsRange.getUpper() > 30) { 2677 fpsRangesGreaterThan30.add(fpsRange); 2678 } 2679 } 2680 return fpsRangesGreaterThan30; 2681 } 2682 2683 /** 2684 * Calculate output 3A region from the intersection of input 3A region and cropped region. 2685 * 2686 * @param requestRegions The input 3A regions 2687 * @param cropRect The cropped region 2688 * @return expected 3A regions output in capture result 2689 */ getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)2690 public static MeteringRectangle[] getExpectedOutputRegion( 2691 MeteringRectangle[] requestRegions, Rect cropRect){ 2692 MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length]; 2693 for (int i = 0; i < requestRegions.length; i++) { 2694 Rect requestRect = requestRegions[i].getRect(); 2695 Rect resultRect = new Rect(); 2696 boolean intersect = resultRect.setIntersect(requestRect, cropRect); 2697 resultRegions[i] = new MeteringRectangle( 2698 resultRect, 2699 intersect ? requestRegions[i].getMeteringWeight() : 0); 2700 } 2701 return resultRegions; 2702 } 2703 2704 /** 2705 * Copy source image data to destination image. 2706 * 2707 * @param src The source image to be copied from. 2708 * @param dst The destination image to be copied to. 2709 * @throws IllegalArgumentException If the source and destination images have 2710 * different format, size, or one of the images is not copyable. 2711 */ imageCopy(Image src, Image dst)2712 public static void imageCopy(Image src, Image dst) { 2713 if (src == null || dst == null) { 2714 throw new IllegalArgumentException("Images should be non-null"); 2715 } 2716 if (src.getFormat() != dst.getFormat()) { 2717 throw new IllegalArgumentException("Src and dst images should have the same format"); 2718 } 2719 if (src.getFormat() == ImageFormat.PRIVATE || 2720 dst.getFormat() == ImageFormat.PRIVATE) { 2721 throw new IllegalArgumentException("PRIVATE format images are not copyable"); 2722 } 2723 2724 Size srcSize = new Size(src.getWidth(), src.getHeight()); 2725 Size dstSize = new Size(dst.getWidth(), dst.getHeight()); 2726 if (!srcSize.equals(dstSize)) { 2727 throw new IllegalArgumentException("source image size " + srcSize + " is different" 2728 + " with " + "destination image size " + dstSize); 2729 } 2730 2731 // TODO: check the owner of the dst image, it must be from ImageWriter, other source may 2732 // not be writable. Maybe we should add an isWritable() method in image class. 2733 2734 Plane[] srcPlanes = src.getPlanes(); 2735 Plane[] dstPlanes = dst.getPlanes(); 2736 ByteBuffer srcBuffer = null; 2737 ByteBuffer dstBuffer = null; 2738 for (int i = 0; i < srcPlanes.length; i++) { 2739 srcBuffer = srcPlanes[i].getBuffer(); 2740 dstBuffer = dstPlanes[i].getBuffer(); 2741 int srcPos = srcBuffer.position(); 2742 srcBuffer.rewind(); 2743 dstBuffer.rewind(); 2744 int srcRowStride = srcPlanes[i].getRowStride(); 2745 int dstRowStride = dstPlanes[i].getRowStride(); 2746 int srcPixStride = srcPlanes[i].getPixelStride(); 2747 int dstPixStride = dstPlanes[i].getPixelStride(); 2748 2749 if (srcPixStride > 2 || dstPixStride > 2) { 2750 throw new IllegalArgumentException("source pixel stride " + srcPixStride + 2751 " with destination pixel stride " + dstPixStride + 2752 " is not supported"); 2753 } 2754 2755 if (srcRowStride == dstRowStride && srcPixStride == dstPixStride && 2756 srcPixStride == 1) { 2757 // Fast path, just copy the content in the byteBuffer all together. 2758 dstBuffer.put(srcBuffer); 2759 } else { 2760 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i); 2761 int srcRowByteCount = srcRowStride; 2762 int dstRowByteCount = dstRowStride; 2763 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)]; 2764 2765 if (srcPixStride == dstPixStride && srcPixStride == 1) { 2766 // Row by row copy case 2767 for (int row = 0; row < effectivePlaneSize.getHeight(); row++) { 2768 if (row == effectivePlaneSize.getHeight() - 1) { 2769 // Special case for interleaved planes: need handle the last row 2770 // carefully to avoid memory corruption. Check if we have enough bytes 2771 // to copy. 2772 srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining()); 2773 dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining()); 2774 } 2775 srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount); 2776 dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount); 2777 } 2778 } else { 2779 // Row by row per pixel copy case 2780 byte[] dstDataRow = new byte[dstRowByteCount]; 2781 for (int row = 0; row < effectivePlaneSize.getHeight(); row++) { 2782 if (row == effectivePlaneSize.getHeight() - 1) { 2783 // Special case for interleaved planes: need handle the last row 2784 // carefully to avoid memory corruption. Check if we have enough bytes 2785 // to copy. 2786 int remainingBytes = srcBuffer.remaining(); 2787 if (srcRowByteCount > remainingBytes) { 2788 srcRowByteCount = remainingBytes; 2789 } 2790 remainingBytes = dstBuffer.remaining(); 2791 if (dstRowByteCount > remainingBytes) { 2792 dstRowByteCount = remainingBytes; 2793 } 2794 } 2795 srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount); 2796 int pos = dstBuffer.position(); 2797 dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount); 2798 dstBuffer.position(pos); 2799 for (int x = 0; x < effectivePlaneSize.getWidth(); x++) { 2800 dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride]; 2801 } 2802 dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount); 2803 } 2804 } 2805 } 2806 srcBuffer.position(srcPos); 2807 dstBuffer.rewind(); 2808 } 2809 } 2810 getEffectivePlaneSizeForImage(Image image, int planeIdx)2811 private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) { 2812 switch (image.getFormat()) { 2813 case ImageFormat.YUV_420_888: 2814 if (planeIdx == 0) { 2815 return new Size(image.getWidth(), image.getHeight()); 2816 } else { 2817 return new Size(image.getWidth() / 2, image.getHeight() / 2); 2818 } 2819 case ImageFormat.JPEG: 2820 case ImageFormat.RAW_SENSOR: 2821 case ImageFormat.RAW10: 2822 case ImageFormat.RAW12: 2823 case ImageFormat.DEPTH16: 2824 return new Size(image.getWidth(), image.getHeight()); 2825 case ImageFormat.PRIVATE: 2826 return new Size(0, 0); 2827 default: 2828 throw new UnsupportedOperationException( 2829 String.format("Invalid image format %d", image.getFormat())); 2830 } 2831 } 2832 2833 /** 2834 * <p> 2835 * Checks whether the two images are strongly equal. 2836 * </p> 2837 * <p> 2838 * Two images are strongly equal if and only if the data, formats, sizes, 2839 * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format 2840 * images, the image data is not not accessible thus the data comparison is 2841 * effectively skipped as the number of planes is zero. 2842 * </p> 2843 * <p> 2844 * Note that this method compares the pixel data even outside of the crop 2845 * region, which may not be necessary for general use case. 2846 * </p> 2847 * 2848 * @param lhsImg First image to be compared with. 2849 * @param rhsImg Second image to be compared with. 2850 * @return true if the two images are equal, false otherwise. 2851 * @throws IllegalArgumentException If either of image is null. 2852 */ isImageStronglyEqual(Image lhsImg, Image rhsImg)2853 public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) { 2854 if (lhsImg == null || rhsImg == null) { 2855 throw new IllegalArgumentException("Images should be non-null"); 2856 } 2857 2858 if (lhsImg.getFormat() != rhsImg.getFormat()) { 2859 Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format " 2860 + rhsImg.getFormat()); 2861 return false; 2862 } 2863 2864 if (lhsImg.getWidth() != rhsImg.getWidth()) { 2865 Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width " 2866 + rhsImg.getWidth()); 2867 return false; 2868 } 2869 2870 if (lhsImg.getHeight() != rhsImg.getHeight()) { 2871 Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height " 2872 + rhsImg.getHeight()); 2873 return false; 2874 } 2875 2876 if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) { 2877 Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp() 2878 + " is different with rhsImg timestamp " + rhsImg.getTimestamp()); 2879 return false; 2880 } 2881 2882 if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) { 2883 Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect() 2884 + " is different with rhsImg crop rect " + rhsImg.getCropRect()); 2885 return false; 2886 } 2887 2888 // Compare data inside of the image. 2889 Plane[] lhsPlanes = lhsImg.getPlanes(); 2890 Plane[] rhsPlanes = rhsImg.getPlanes(); 2891 ByteBuffer lhsBuffer = null; 2892 ByteBuffer rhsBuffer = null; 2893 for (int i = 0; i < lhsPlanes.length; i++) { 2894 lhsBuffer = lhsPlanes[i].getBuffer(); 2895 rhsBuffer = rhsPlanes[i].getBuffer(); 2896 lhsBuffer.rewind(); 2897 rhsBuffer.rewind(); 2898 // Special case for YUV420_888 buffer with different layout or 2899 // potentially differently interleaved U/V planes. 2900 if (lhsImg.getFormat() == ImageFormat.YUV_420_888 && 2901 (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() || 2902 lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() || 2903 (lhsPlanes[i].getPixelStride() != 1))) { 2904 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth(); 2905 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight(); 2906 int rowSizeL = lhsPlanes[i].getRowStride(); 2907 int rowSizeR = rhsPlanes[i].getRowStride(); 2908 byte[] lhsRow = new byte[rowSizeL]; 2909 byte[] rhsRow = new byte[rowSizeR]; 2910 int pixStrideL = lhsPlanes[i].getPixelStride(); 2911 int pixStrideR = rhsPlanes[i].getPixelStride(); 2912 for (int r = 0; r < height; r++) { 2913 if (r == height -1) { 2914 rowSizeL = lhsBuffer.remaining(); 2915 rowSizeR = rhsBuffer.remaining(); 2916 } 2917 lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL); 2918 rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR); 2919 for (int c = 0; c < width; c++) { 2920 if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) { 2921 Log.i(TAG, String.format( 2922 "byte buffers for plane %d row %d col %d don't match.", 2923 i, r, c)); 2924 return false; 2925 } 2926 } 2927 } 2928 } else { 2929 // Compare entire buffer directly 2930 if (!lhsBuffer.equals(rhsBuffer)) { 2931 Log.i(TAG, "byte buffers for plane " + i + " don't match."); 2932 return false; 2933 } 2934 } 2935 } 2936 2937 return true; 2938 } 2939 2940 /** 2941 * Set jpeg related keys in a capture request builder. 2942 * 2943 * @param builder The capture request builder to set the keys inl 2944 * @param exifData The exif data to set. 2945 * @param thumbnailSize The thumbnail size to set. 2946 * @param collector The camera error collector to collect errors. 2947 */ setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)2948 public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, 2949 Size thumbnailSize, CameraErrorCollector collector) { 2950 builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize); 2951 builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation); 2952 builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation); 2953 builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality); 2954 builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY, 2955 exifData.thumbnailQuality); 2956 2957 // Validate request set and get. 2958 collector.expectEquals("JPEG thumbnail size request set and get should match", 2959 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE)); 2960 collector.expectTrue("GPS locations request set and get should match.", 2961 areGpsFieldsEqual(exifData.gpsLocation, 2962 builder.get(CaptureRequest.JPEG_GPS_LOCATION))); 2963 collector.expectEquals("JPEG orientation request set and get should match", 2964 exifData.jpegOrientation, 2965 builder.get(CaptureRequest.JPEG_ORIENTATION)); 2966 collector.expectEquals("JPEG quality request set and get should match", 2967 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY)); 2968 collector.expectEquals("JPEG thumbnail quality request set and get should match", 2969 exifData.thumbnailQuality, 2970 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY)); 2971 } 2972 2973 /** 2974 * Simple validation of JPEG image size and format. 2975 * <p> 2976 * Only validate the image object basic correctness. It is fast, but doesn't actually 2977 * check the buffer data. Assert is used here as it make no sense to 2978 * continue the test if the jpeg image captured has some serious failures. 2979 * </p> 2980 * 2981 * @param image The captured JPEG/HEIC image 2982 * @param expectedSize Expected capture JEPG/HEIC size 2983 * @param format JPEG/HEIC image format 2984 */ basicValidateBlobImage(Image image, Size expectedSize, int format)2985 public static void basicValidateBlobImage(Image image, Size expectedSize, int format) { 2986 Size imageSz = new Size(image.getWidth(), image.getHeight()); 2987 assertTrue( 2988 String.format("Image size doesn't match (expected %s, actual %s) ", 2989 expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz)); 2990 assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"), 2991 format, image.getFormat()); 2992 assertNotNull("Image plane shouldn't be null", image.getPlanes()); 2993 assertEquals("Image plane number should be 1", 1, image.getPlanes().length); 2994 2995 // Jpeg/Heic decoding validate was done in ImageReaderTest, 2996 // no need to duplicate the test here. 2997 } 2998 2999 /** 3000 * Verify the EXIF and JPEG related keys in a capture result are expected. 3001 * - Capture request get values are same as were set. 3002 * - capture result's exif data is the same as was set by 3003 * the capture request. 3004 * - new tags in the result set by the camera service are 3005 * present and semantically correct. 3006 * 3007 * @param image The output JPEG/HEIC image to verify. 3008 * @param captureResult The capture result to verify. 3009 * @param expectedSize The expected JPEG/HEIC size. 3010 * @param expectedThumbnailSize The expected thumbnail size. 3011 * @param expectedExifData The expected EXIF data 3012 * @param staticInfo The static metadata for the camera device. 3013 * @param blobFilename The filename to dump the jpeg/heic to. 3014 * @param collector The camera error collector to collect errors. 3015 * @param format JPEG/HEIC format 3016 */ verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)3017 public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, 3018 Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, 3019 CameraErrorCollector collector, String debugFileNameBase, int format) throws Exception { 3020 3021 basicValidateBlobImage(image, expectedSize, format); 3022 3023 byte[] blobBuffer = getDataFromImage(image); 3024 // Have to dump into a file to be able to use ExifInterface 3025 String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg"); 3026 String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix; 3027 dumpFile(blobFilename, blobBuffer); 3028 ExifInterface exif = new ExifInterface(blobFilename); 3029 3030 if (expectedThumbnailSize.equals(new Size(0,0))) { 3031 collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)", 3032 !exif.hasThumbnail()); 3033 } else { 3034 collector.expectTrue("Jpeg must have thumbnail for thumbnail size " + 3035 expectedThumbnailSize, exif.hasThumbnail()); 3036 } 3037 3038 // Validate capture result vs. request 3039 Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE); 3040 int orientationTested = expectedExifData.jpegOrientation; 3041 // Legacy shim always doesn't rotate thumbnail size 3042 if ((orientationTested == 90 || orientationTested == 270) && 3043 staticInfo.isHardwareLevelAtLeastLimited()) { 3044 int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 3045 /*defaultValue*/-1); 3046 if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) { 3047 // Device physically rotated image+thumbnail data 3048 // Expect thumbnail size to be also rotated 3049 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(), 3050 resultThumbnailSize.getWidth()); 3051 } 3052 } 3053 3054 collector.expectEquals("JPEG thumbnail size result and request should match", 3055 expectedThumbnailSize, resultThumbnailSize); 3056 if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) != 3057 null) { 3058 collector.expectTrue("GPS location result and request should match.", 3059 areGpsFieldsEqual(expectedExifData.gpsLocation, 3060 captureResult.get(CaptureResult.JPEG_GPS_LOCATION))); 3061 } 3062 collector.expectEquals("JPEG orientation result and request should match", 3063 expectedExifData.jpegOrientation, 3064 captureResult.get(CaptureResult.JPEG_ORIENTATION)); 3065 collector.expectEquals("JPEG quality result and request should match", 3066 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY)); 3067 collector.expectEquals("JPEG thumbnail quality result and request should match", 3068 expectedExifData.thumbnailQuality, 3069 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY)); 3070 3071 // Validate other exif tags for all non-legacy devices 3072 if (!staticInfo.isHardwareLevelLegacy()) { 3073 verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, collector, 3074 expectedExifData); 3075 } 3076 } 3077 3078 /** 3079 * Get the degree of an EXIF orientation. 3080 */ getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)3081 private static int getExifOrientationInDegree(int exifOrientation, 3082 CameraErrorCollector collector) { 3083 switch (exifOrientation) { 3084 case ExifInterface.ORIENTATION_NORMAL: 3085 return 0; 3086 case ExifInterface.ORIENTATION_ROTATE_90: 3087 return 90; 3088 case ExifInterface.ORIENTATION_ROTATE_180: 3089 return 180; 3090 case ExifInterface.ORIENTATION_ROTATE_270: 3091 return 270; 3092 default: 3093 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" + 3094 "info based on the request orientation range"); 3095 return 0; 3096 } 3097 } 3098 3099 /** 3100 * Validate and return the focal length. 3101 * 3102 * @param result Capture result to get the focal length 3103 * @return Focal length from capture result or -1 if focal length is not available. 3104 */ validateFocalLength(CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector)3105 private static float validateFocalLength(CaptureResult result, StaticMetadata staticInfo, 3106 CameraErrorCollector collector) { 3107 float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked(); 3108 Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH); 3109 if (collector.expectTrue("Focal length is invalid", 3110 resultFocalLength != null && resultFocalLength > 0)) { 3111 List<Float> focalLengthList = 3112 Arrays.asList(CameraTestUtils.toObject(focalLengths)); 3113 collector.expectTrue("Focal length should be one of the available focal length", 3114 focalLengthList.contains(resultFocalLength)); 3115 return resultFocalLength; 3116 } 3117 return -1; 3118 } 3119 3120 /** 3121 * Validate and return the aperture. 3122 * 3123 * @param result Capture result to get the aperture 3124 * @return Aperture from capture result or -1 if aperture is not available. 3125 */ validateAperture(CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector)3126 private static float validateAperture(CaptureResult result, StaticMetadata staticInfo, 3127 CameraErrorCollector collector) { 3128 float[] apertures = staticInfo.getAvailableAperturesChecked(); 3129 Float resultAperture = result.get(CaptureResult.LENS_APERTURE); 3130 if (collector.expectTrue("Capture result aperture is invalid", 3131 resultAperture != null && resultAperture > 0)) { 3132 List<Float> apertureList = 3133 Arrays.asList(CameraTestUtils.toObject(apertures)); 3134 collector.expectTrue("Aperture should be one of the available apertures", 3135 apertureList.contains(resultAperture)); 3136 return resultAperture; 3137 } 3138 return -1; 3139 } 3140 3141 /** 3142 * Return the closest value in an array of floats. 3143 */ getClosestValueInArray(float[] values, float target)3144 private static float getClosestValueInArray(float[] values, float target) { 3145 int minIdx = 0; 3146 float minDistance = Math.abs(values[0] - target); 3147 for(int i = 0; i < values.length; i++) { 3148 float distance = Math.abs(values[i] - target); 3149 if (minDistance > distance) { 3150 minDistance = distance; 3151 minIdx = i; 3152 } 3153 } 3154 3155 return values[minIdx]; 3156 } 3157 3158 /** 3159 * Return if two Location's GPS field are the same. 3160 */ areGpsFieldsEqual(Location a, Location b)3161 private static boolean areGpsFieldsEqual(Location a, Location b) { 3162 if (a == null || b == null) { 3163 return false; 3164 } 3165 3166 return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() && 3167 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() && 3168 a.getProvider() == b.getProvider(); 3169 } 3170 3171 /** 3172 * Verify extra tags in JPEG EXIF 3173 */ verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)3174 private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, 3175 CaptureResult result, StaticMetadata staticInfo, CameraErrorCollector collector, 3176 ExifTestData expectedExifData) 3177 throws ParseException { 3178 /** 3179 * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION. 3180 * Orientation and exif width/height need to be tested carefully, two cases: 3181 * 3182 * 1. Device rotate the image buffer physically, then exif width/height may not match 3183 * the requested still capture size, we need swap them to check. 3184 * 3185 * 2. Device use the exif tag to record the image orientation, it doesn't rotate 3186 * the jpeg image buffer itself. In this case, the exif width/height should always match 3187 * the requested still capture size, and the exif orientation should always match the 3188 * requested orientation. 3189 * 3190 */ 3191 int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0); 3192 int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0); 3193 Size exifSize = new Size(exifWidth, exifHeight); 3194 // Orientation could be missing, which is ok, default to 0. 3195 int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 3196 /*defaultValue*/-1); 3197 // Get requested orientation from result, because they should be same. 3198 if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) { 3199 int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION); 3200 final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED; 3201 final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270; 3202 boolean orientationValid = collector.expectTrue(String.format( 3203 "Exif orientation must be in range of [%d, %d]", 3204 ORIENTATION_MIN, ORIENTATION_MAX), 3205 exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX); 3206 if (orientationValid) { 3207 /** 3208 * Device captured image doesn't respect the requested orientation, 3209 * which means it rotates the image buffer physically. Then we 3210 * should swap the exif width/height accordingly to compare. 3211 */ 3212 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED; 3213 3214 if (deviceRotatedImage) { 3215 // Case 1. 3216 boolean needSwap = (requestedOrientation % 180 == 90); 3217 if (needSwap) { 3218 exifSize = new Size(exifHeight, exifWidth); 3219 } 3220 } else { 3221 // Case 2. 3222 collector.expectEquals("Exif orientaiton should match requested orientation", 3223 requestedOrientation, getExifOrientationInDegree(exifOrientation, 3224 collector)); 3225 } 3226 } 3227 } 3228 3229 /** 3230 * Ideally, need check exifSize == jpegSize == actual buffer size. But 3231 * jpegSize == jpeg decode bounds size(from jpeg jpeg frame 3232 * header, not exif) was validated in ImageReaderTest, no need to 3233 * validate again here. 3234 */ 3235 collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize); 3236 3237 // TAG_DATETIME, it should be local time 3238 long currentTimeInMs = System.currentTimeMillis(); 3239 long currentTimeInSecond = currentTimeInMs / 1000; 3240 Date date = new Date(currentTimeInMs); 3241 String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date); 3242 String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME); 3243 if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) { 3244 collector.expectTrue("Exif TAG_DATETIME is wrong", 3245 dateTime.length() == EXIF_DATETIME_LENGTH); 3246 long exifTimeInSecond = 3247 new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000; 3248 long delta = currentTimeInSecond - exifTimeInSecond; 3249 collector.expectTrue("Capture time deviates too much from the current time", 3250 Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC); 3251 // It should be local time. 3252 collector.expectTrue("Exif date time should be local time", 3253 dateTime.startsWith(localDatetime)); 3254 } 3255 3256 boolean isExternalCamera = staticInfo.isExternalCamera(); 3257 if (!isExternalCamera) { 3258 // TAG_FOCAL_LENGTH. 3259 float[] focalLengths = staticInfo.getAvailableFocalLengthsChecked(); 3260 float exifFocalLength = (float)exif.getAttributeDouble( 3261 ExifInterface.TAG_FOCAL_LENGTH, -1); 3262 collector.expectEquals("Focal length should match", 3263 getClosestValueInArray(focalLengths, exifFocalLength), 3264 exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN); 3265 // More checks for focal length. 3266 collector.expectEquals("Exif focal length should match capture result", 3267 validateFocalLength(result, staticInfo, collector), 3268 exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN); 3269 3270 // TAG_EXPOSURE_TIME 3271 // ExifInterface API gives exposure time value in the form of float instead of rational 3272 String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME); 3273 collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime); 3274 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) { 3275 if (exposureTime != null) { 3276 double exposureTimeValue = Double.parseDouble(exposureTime); 3277 long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 3278 double expected = expTimeResult / 1e9; 3279 double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO; 3280 tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC); 3281 collector.expectEquals("Exif exposure time doesn't match", expected, 3282 exposureTimeValue, tolerance); 3283 } 3284 } 3285 3286 // TAG_APERTURE 3287 // ExifInterface API gives aperture value in the form of float instead of rational 3288 String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE); 3289 collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture); 3290 if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) { 3291 float[] apertures = staticInfo.getAvailableAperturesChecked(); 3292 if (exifAperture != null) { 3293 float apertureValue = Float.parseFloat(exifAperture); 3294 collector.expectEquals("Aperture value should match", 3295 getClosestValueInArray(apertures, apertureValue), 3296 apertureValue, EXIF_APERTURE_ERROR_MARGIN); 3297 // More checks for aperture. 3298 collector.expectEquals("Exif aperture length should match capture result", 3299 validateAperture(result, staticInfo, collector), 3300 apertureValue, EXIF_APERTURE_ERROR_MARGIN); 3301 } 3302 } 3303 3304 // TAG_MAKE 3305 String make = exif.getAttribute(ExifInterface.TAG_MAKE); 3306 collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make); 3307 3308 // TAG_MODEL 3309 String model = exif.getAttribute(ExifInterface.TAG_MODEL); 3310 collector.expectEquals("Exif TAG_MODEL is incorrect", Build.MODEL, model); 3311 3312 3313 // TAG_ISO 3314 int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1); 3315 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) || 3316 staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) { 3317 int expectedIso = 100; 3318 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) { 3319 expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY); 3320 } 3321 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) { 3322 expectedIso = expectedIso * 3323 result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST); 3324 } else { 3325 expectedIso *= 100; 3326 } 3327 collector.expectInRange("Exif TAG_ISO is incorrect", iso, 3328 expectedIso/100, (expectedIso+50)/100); 3329 } 3330 } else { 3331 // External camera specific checks 3332 // TAG_MAKE 3333 String make = exif.getAttribute(ExifInterface.TAG_MAKE); 3334 collector.expectNotNull("Exif TAG_MAKE is null", make); 3335 3336 // TAG_MODEL 3337 String model = exif.getAttribute(ExifInterface.TAG_MODEL); 3338 collector.expectNotNull("Exif TAG_MODEL is nuill", model); 3339 } 3340 3341 3342 /** 3343 * TAG_FLASH. TODO: For full devices, can check a lot more info 3344 * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash) 3345 */ 3346 String flash = exif.getAttribute(ExifInterface.TAG_FLASH); 3347 collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash); 3348 3349 /** 3350 * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we 3351 * should be able to cross-check android.sensor.referenceIlluminant. 3352 */ 3353 String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE); 3354 collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance); 3355 3356 // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras). 3357 String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED); 3358 collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime); 3359 if (digitizedTime != null) { 3360 String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME); 3361 collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime); 3362 if (expectedDateTime != null) { 3363 collector.expectEquals("dataTime should match digitizedTime", 3364 expectedDateTime, digitizedTime); 3365 } 3366 } 3367 3368 /** 3369 * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at 3370 * most 9 digits in ExifInterface implementation, use getAttributeInt to 3371 * sanitize it. When the default value -1 is returned, it means that 3372 * this exif tag either doesn't exist or is a non-numerical invalid 3373 * string. Same rule applies to the rest of sub second tags. 3374 */ 3375 int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1); 3376 collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0); 3377 3378 // TAG_SUBSEC_TIME_ORIG 3379 int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG, 3380 /*defaultValue*/-1); 3381 collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!", 3382 subSecTimeOrig >= 0); 3383 3384 // TAG_SUBSEC_TIME_DIG 3385 int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG, 3386 /*defaultValue*/-1); 3387 collector.expectTrue( 3388 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0); 3389 3390 /** 3391 * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP. 3392 * The GPS timestamp information should be in seconds UTC time. 3393 */ 3394 String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP); 3395 collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp); 3396 String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP); 3397 collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp); 3398 3399 SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z"); 3400 String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC"; 3401 Date gpsDateTime = dateFormat.parse(gpsExifTimeString); 3402 Date expected = new Date(expectedExifData.gpsLocation.getTime()); 3403 collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime); 3404 } 3405 3406 3407 /** 3408 * Immutable class wrapping the exif test data. 3409 */ 3410 public static class ExifTestData { 3411 public final Location gpsLocation; 3412 public final int jpegOrientation; 3413 public final byte jpegQuality; 3414 public final byte thumbnailQuality; 3415 ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3416 public ExifTestData(Location location, int orientation, 3417 byte jpgQuality, byte thumbQuality) { 3418 gpsLocation = location; 3419 jpegOrientation = orientation; 3420 jpegQuality = jpgQuality; 3421 thumbnailQuality = thumbQuality; 3422 } 3423 } 3424 getPreviewSizeBound(WindowManager windowManager, Size bound)3425 public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) { 3426 Display display = windowManager.getDefaultDisplay(); 3427 3428 int width = display.getWidth(); 3429 int height = display.getHeight(); 3430 3431 if (height > width) { 3432 height = width; 3433 width = display.getHeight(); 3434 } 3435 3436 if (bound.getWidth() <= width && 3437 bound.getHeight() <= height) 3438 return bound; 3439 else 3440 return new Size(width, height); 3441 } 3442 3443 /** 3444 * Check if a particular stream configuration is supported by configuring it 3445 * to the device. 3446 */ isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)3447 public static boolean isStreamConfigurationSupported(CameraDevice camera, 3448 List<Surface> outputSurfaces, 3449 CameraCaptureSession.StateCallback listener, Handler handler) { 3450 try { 3451 configureCameraSession(camera, outputSurfaces, listener, handler); 3452 return true; 3453 } catch (Exception e) { 3454 Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage()); 3455 return false; 3456 } 3457 } 3458 3459 public final static class SessionConfigSupport { 3460 public final boolean error; 3461 public final boolean callSupported; 3462 public final boolean configSupported; 3463 SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)3464 public SessionConfigSupport(boolean error, 3465 boolean callSupported, boolean configSupported) { 3466 this.error = error; 3467 this.callSupported = callSupported; 3468 this.configSupported = configSupported; 3469 } 3470 } 3471 3472 /** 3473 * Query whether a particular stream combination is supported. 3474 */ checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3475 public static void checkSessionConfigurationWithSurfaces(CameraDevice camera, 3476 Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, 3477 int operatingMode, boolean defaultSupport, String msg) { 3478 List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size()); 3479 for (Surface surface : outputSurfaces) { 3480 outConfigurations.add(new OutputConfiguration(surface)); 3481 } 3482 3483 checkSessionConfigurationSupported(camera, handler, outConfigurations, 3484 inputConfig, operatingMode, defaultSupport, msg); 3485 } 3486 checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, String msg)3487 public static void checkSessionConfigurationSupported(CameraDevice camera, 3488 Handler handler, List<OutputConfiguration> outputConfigs, 3489 InputConfiguration inputConfig, int operatingMode, boolean defaultSupport, 3490 String msg) { 3491 SessionConfigSupport sessionConfigSupported = 3492 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig, 3493 operatingMode, defaultSupport); 3494 3495 assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported); 3496 } 3497 3498 /** 3499 * Query whether a particular stream combination is supported. 3500 */ isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, boolean defaultSupport)3501 public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera, 3502 Handler handler, List<OutputConfiguration> outputConfigs, 3503 InputConfiguration inputConfig, int operatingMode, boolean defaultSupport) { 3504 boolean ret; 3505 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 3506 3507 SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs, 3508 new HandlerExecutor(handler), sessionListener); 3509 if (inputConfig != null) { 3510 sessionConfig.setInputConfiguration(inputConfig); 3511 } 3512 3513 try { 3514 ret = camera.isSessionConfigurationSupported(sessionConfig); 3515 } catch (UnsupportedOperationException e) { 3516 // Camera doesn't support session configuration query 3517 return new SessionConfigSupport(false/*error*/, 3518 false/*callSupported*/, defaultSupport/*configSupported*/); 3519 } catch (IllegalArgumentException e) { 3520 return new SessionConfigSupport(true/*error*/, 3521 false/*callSupported*/, false/*configSupported*/); 3522 } catch (android.hardware.camera2.CameraAccessException e) { 3523 return new SessionConfigSupport(true/*error*/, 3524 false/*callSupported*/, false/*configSupported*/); 3525 } 3526 3527 return new SessionConfigSupport(false/*error*/, 3528 true/*callSupported*/, ret/*configSupported*/); 3529 } 3530 3531 /** 3532 * Wait for numResultWait frames 3533 * 3534 * @param resultListener The capture listener to get capture result back. 3535 * @param numResultsWait Number of frame to wait 3536 * @param timeout Wait timeout in ms. 3537 * 3538 * @return the last result, or {@code null} if there was none 3539 */ waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)3540 public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener, 3541 int numResultsWait, int timeout) { 3542 if (numResultsWait < 0 || resultListener == null) { 3543 throw new IllegalArgumentException( 3544 "Input must be positive number and listener must be non-null"); 3545 } 3546 3547 CaptureResult result = null; 3548 for (int i = 0; i < numResultsWait; i++) { 3549 result = resultListener.getCaptureResult(timeout); 3550 } 3551 3552 return result; 3553 } 3554 3555 /** 3556 * Wait for any expected result key values available in a certain number of results. 3557 * 3558 * <p> 3559 * Check the result immediately if numFramesWait is 0. 3560 * </p> 3561 * 3562 * @param listener The capture listener to get capture result. 3563 * @param resultKey The capture result key associated with the result value. 3564 * @param expectedValues The list of result value need to be waited for, 3565 * return immediately if the list is empty. 3566 * @param numResultsWait Number of frame to wait before times out. 3567 * @param timeout result wait time out in ms. 3568 * @throws TimeoutRuntimeException If more than numResultsWait results are. 3569 * seen before the result matching myRequest arrives, or each individual wait 3570 * for result times out after 'timeout' ms. 3571 */ waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)3572 public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener, 3573 CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, 3574 int timeout) { 3575 if (numResultsWait < 0 || listener == null || expectedValues == null) { 3576 throw new IllegalArgumentException( 3577 "Input must be non-negative number and listener/expectedValues " 3578 + "must be non-null"); 3579 } 3580 3581 int i = 0; 3582 CaptureResult result; 3583 do { 3584 result = listener.getCaptureResult(timeout); 3585 T value = result.get(resultKey); 3586 for ( T expectedValue : expectedValues) { 3587 if (VERBOSE) { 3588 Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: " 3589 + value.toString()); 3590 } 3591 if (value.equals(expectedValue)) { 3592 return; 3593 } 3594 } 3595 } while (i++ < numResultsWait); 3596 3597 throw new TimeoutRuntimeException( 3598 "Unable to get the expected result value " + expectedValues + " for key " + 3599 resultKey.getName() + " after waiting for " + numResultsWait + " results"); 3600 } 3601 3602 /** 3603 * Wait for expected result key value available in a certain number of results. 3604 * 3605 * <p> 3606 * Check the result immediately if numFramesWait is 0. 3607 * </p> 3608 * 3609 * @param listener The capture listener to get capture result 3610 * @param resultKey The capture result key associated with the result value 3611 * @param expectedValue The result value need to be waited for 3612 * @param numResultsWait Number of frame to wait before times out 3613 * @param timeout Wait time out. 3614 * @throws TimeoutRuntimeException If more than numResultsWait results are 3615 * seen before the result matching myRequest arrives, or each individual wait 3616 * for result times out after 'timeout' ms. 3617 */ waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)3618 public static <T> void waitForResultValue(SimpleCaptureCallback listener, 3619 CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) { 3620 List<T> expectedValues = new ArrayList<T>(); 3621 expectedValues.add(expectedValue); 3622 waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout); 3623 } 3624 3625 /** 3626 * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED. 3627 * 3628 * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure 3629 * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency 3630 * is unknown.</p> 3631 * 3632 * <p>This is a no-op for {@code LEGACY} devices since they don't report 3633 * the {@code aeState} result.</p> 3634 * 3635 * @param resultListener The capture listener to get capture result back. 3636 * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is 3637 * unknown. 3638 * @param staticInfo corresponding camera device static metadata. 3639 * @param settingsTimeout wait timeout for settings application in ms. 3640 * @param resultTimeout wait timeout for result in ms. 3641 * @param numResultsWait Number of frame to wait before times out. 3642 */ waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)3643 public static void waitForAeStable(SimpleCaptureCallback resultListener, 3644 int numResultWaitForUnknownLatency, StaticMetadata staticInfo, 3645 int settingsTimeout, int numResultWait) { 3646 waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo, 3647 settingsTimeout); 3648 3649 if (!staticInfo.isHardwareLevelAtLeastLimited()) { 3650 // No-op for metadata 3651 return; 3652 } 3653 List<Integer> expectedAeStates = new ArrayList<Integer>(); 3654 expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED)); 3655 expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED)); 3656 waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates, 3657 numResultWait, settingsTimeout); 3658 } 3659 3660 /** 3661 * Wait for enough results for settings to be applied 3662 * 3663 * @param resultListener The capture listener to get capture result back. 3664 * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is 3665 * unknown. 3666 * @param staticInfo corresponding camera device static metadata. 3667 * @param timeout wait timeout in ms. 3668 */ waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)3669 public static void waitForSettingsApplied(SimpleCaptureCallback resultListener, 3670 int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) { 3671 int maxLatency = staticInfo.getSyncMaxLatency(); 3672 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 3673 maxLatency = numResultWaitForUnknownLatency; 3674 } 3675 // Wait for settings to take effect 3676 waitForNumResults(resultListener, maxLatency, timeout); 3677 } 3678 getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)3679 public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId, 3680 long frameDuration, StaticMetadata staticInfo) { 3681 // Add 0.05 here so Fps like 29.99 evaluated to 30 3682 int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f); 3683 boolean foundConstantMaxYUVRange = false; 3684 boolean foundYUVStreamingRange = false; 3685 boolean isExternalCamera = staticInfo.isExternalCamera(); 3686 boolean isNIR = staticInfo.isNIRColorFilter(); 3687 3688 // Find suitable target FPS range - as high as possible that covers the max YUV rate 3689 // Also verify that there's a good preview rate as well 3690 List<Range<Integer> > fpsRanges = Arrays.asList( 3691 staticInfo.getAeAvailableTargetFpsRangesChecked()); 3692 Range<Integer> targetRange = null; 3693 for (Range<Integer> fpsRange : fpsRanges) { 3694 if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) { 3695 foundConstantMaxYUVRange = true; 3696 targetRange = fpsRange; 3697 } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) { 3698 targetRange = fpsRange; 3699 } 3700 if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) { 3701 foundYUVStreamingRange = true; 3702 } 3703 3704 } 3705 3706 if (!isExternalCamera) { 3707 assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported", 3708 cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange); 3709 } 3710 3711 if (!isNIR) { 3712 assertTrue(String.format( 3713 "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported", 3714 cameraId, minBurstFps), foundYUVStreamingRange); 3715 } 3716 return targetRange; 3717 } 3718 /** 3719 * Get the candidate supported zoom ratios for testing 3720 * 3721 * <p> 3722 * This function returns the bounary values of supported zoom ratio range in addition to 1.0x 3723 * zoom ratio. 3724 * </p> 3725 */ getCandidateZoomRatios(StaticMetadata staticInfo)3726 public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) { 3727 List<Float> zoomRatios = new ArrayList<Float>(); 3728 Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked(); 3729 zoomRatios.add(zoomRatioRange.getLower()); 3730 if (zoomRatioRange.contains(1.0f) && 3731 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD && 3732 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) { 3733 zoomRatios.add(1.0f); 3734 } 3735 zoomRatios.add(zoomRatioRange.getUpper()); 3736 3737 return zoomRatios; 3738 } 3739 3740 private static final int PERFORMANCE_CLASS_R = Build.VERSION_CODES.R; 3741 private static final int PERFORMANCE_CLASS_S = Build.VERSION_CODES.R + 1; 3742 3743 /** 3744 * Check whether this mobile device is R performance class as defined in CDD 3745 */ isRPerfClass()3746 public static boolean isRPerfClass() { 3747 return Build.VERSION.MEDIA_PERFORMANCE_CLASS == PERFORMANCE_CLASS_R; 3748 } 3749 3750 /** 3751 * Check whether this mobile device is S performance class as defined in CDD 3752 */ isSPerfClass()3753 public static boolean isSPerfClass() { 3754 return Build.VERSION.MEDIA_PERFORMANCE_CLASS == PERFORMANCE_CLASS_S; 3755 } 3756 3757 /** 3758 * Check whether a camera Id is a primary rear facing camera 3759 */ isPrimaryRearFacingCamera(CameraManager manager, String cameraId)3760 public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId) 3761 throws Exception { 3762 return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK); 3763 } 3764 3765 /** 3766 * Check whether a camera Id is a primary front facing camera 3767 */ isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)3768 public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId) 3769 throws Exception { 3770 return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT); 3771 } 3772 isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)3773 private static boolean isPrimaryCamera(CameraManager manager, String cameraId, 3774 Integer lensFacing) throws Exception { 3775 CameraCharacteristics characteristics; 3776 Integer facing; 3777 3778 String [] ids = manager.getCameraIdList(); 3779 for (String id : ids) { 3780 characteristics = manager.getCameraCharacteristics(id); 3781 facing = characteristics.get(CameraCharacteristics.LENS_FACING); 3782 if (lensFacing.equals(facing)) { 3783 if (cameraId.equals(id)) { 3784 return true; 3785 } else { 3786 return false; 3787 } 3788 } 3789 } 3790 return false; 3791 } 3792 } 3793