1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import android.content.Context; 20 import android.graphics.Bitmap; 21 import android.graphics.BitmapFactory; 22 import android.graphics.BitmapRegionDecoder; 23 import android.graphics.ImageFormat; 24 import android.graphics.Rect; 25 import android.graphics.RectF; 26 import android.hardware.camera2.CameraCaptureSession; 27 import android.hardware.camera2.CameraCharacteristics; 28 import android.hardware.camera2.CameraDevice; 29 import android.hardware.camera2.CameraMetadata; 30 import android.hardware.camera2.CaptureRequest; 31 import android.hardware.camera2.CaptureResult; 32 import android.hardware.camera2.DngCreator; 33 import android.hardware.camera2.TotalCaptureResult; 34 import android.hardware.camera2.cts.helpers.StaticMetadata; 35 import android.hardware.camera2.cts.rs.BitmapUtils; 36 import android.hardware.camera2.cts.rs.RawConverter; 37 import android.hardware.camera2.cts.rs.RenderScriptSingleton; 38 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase; 39 import android.hardware.camera2.params.InputConfiguration; 40 import android.location.Location; 41 import android.media.ExifInterface; 42 import android.media.Image; 43 import android.media.ImageReader; 44 import android.media.ImageWriter; 45 import android.os.ConditionVariable; 46 import android.os.SystemClock; 47 import android.util.Log; 48 import android.util.Pair; 49 import android.util.Size; 50 import android.view.Surface; 51 52 import com.android.ex.camera2.blocking.BlockingSessionCallback; 53 54 import java.io.ByteArrayOutputStream; 55 import java.io.File; 56 import java.io.FileOutputStream; 57 import java.nio.ByteBuffer; 58 import java.nio.channels.FileChannel; 59 import java.util.ArrayList; 60 import java.util.Arrays; 61 import java.util.Calendar; 62 import java.util.Collections; 63 import java.util.Date; 64 import java.util.List; 65 import java.util.TimeZone; 66 import java.text.SimpleDateFormat; 67 68 import org.junit.runners.Parameterized; 69 import org.junit.runner.RunWith; 70 import org.junit.Test; 71 72 import static android.hardware.camera2.cts.helpers.AssertHelpers.*; 73 import static junit.framework.Assert.*; 74 75 /** 76 * Tests for the DngCreator API. 77 */ 78 79 @RunWith(Parameterized.class) 80 public class DngCreatorTest extends Camera2AndroidTestCase { 81 private static final String TAG = "DngCreatorTest"; 82 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 83 private static final String DEBUG_DNG_FILE = "raw16.dng"; 84 private static final String TEST_DNG_FILE = "test.dng"; 85 86 private static final double IMAGE_DIFFERENCE_TOLERANCE = 65; 87 private static final int DEFAULT_PATCH_DIMEN = 512; 88 private static final int AE_TIMEOUT_MS = 3000; 89 private static final int MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_SCALE = 4; 90 private static final int MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_MS = 91 CAPTURE_WAIT_TIMEOUT_MS * MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_SCALE; 92 93 // Constants used for GPS testing. 94 private static final double GPS_DIFFERENCE_TOLERANCE = 0.0001; 95 private static final double GPS_LATITUDE = 37.420016; 96 private static final double GPS_LONGITUDE = -122.081987; 97 private static final String GPS_DATESTAMP = "2015:01:27"; 98 private static final String GPS_TIMESTAMP = "02:12:01"; 99 private static final Calendar GPS_CALENDAR = 100 Calendar.getInstance(TimeZone.getTimeZone("GMT+0")); 101 102 /** Load DNG validation jni on initialization */ 103 static { 104 System.loadLibrary("ctscamera2_jni"); 105 } 106 107 static { 108 GPS_CALENDAR.set(2015, 0, 27, 2, 12, 01); 109 } 110 111 class CapturedData { 112 public Pair<List<Image>, CaptureResult> imagePair; 113 public CameraCharacteristics characteristics; 114 } 115 116 // CapturedData for maximum resolution mode, raw and jpeg don't share the same capture result 117 // since mandatory streams for maximum resolution sensor pixel mode don't guarantee more than 1 118 // stream. 119 class CapturedDataMaximumResolution { 120 public Pair<Image, CaptureResult> raw; 121 public Pair<Image, CaptureResult> jpeg; 122 public CameraCharacteristics characteristics; 123 } 124 125 class DngDebugParams { 126 String deviceId; 127 String intermediateStr; 128 CameraCharacteristics characteristics; 129 CaptureResult captureResult; 130 FileOutputStream fileStream; 131 FileChannel fileChannel; 132 Image jpeg; 133 Image raw; 134 Bitmap rawBitmap; 135 } 136 137 @Override setUp()138 public void setUp() throws Exception { 139 super.setUp(); 140 RenderScriptSingleton.setContext(mContext); 141 } 142 143 @Override tearDown()144 public void tearDown() throws Exception { 145 RenderScriptSingleton.clearContext(); 146 super.tearDown(); 147 } 148 149 /** 150 * Test basic raw capture and DNG saving functionality for each of the available cameras. 151 * 152 * <p> 153 * For each camera, capture a single RAW16 image at the first capture size reported for 154 * the raw format on that device, and save that image as a DNG file. No further validation 155 * is done. 156 * </p> 157 * 158 * <p> 159 * Note: Enabling adb shell setprop log.tag.DngCreatorTest VERBOSE will also cause the 160 * raw image captured for the first reported camera device to be saved to an output file. 161 * </p> 162 */ 163 @Test testSingleImageBasic()164 public void testSingleImageBasic() throws Exception { 165 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 166 String deviceId = mCameraIdsUnderTest[i]; 167 ImageReader captureReader = null; 168 FileOutputStream fileStream = null; 169 ByteArrayOutputStream outputStream = null; 170 try { 171 if (!mAllStaticInfo.get(deviceId).isCapabilitySupported( 172 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { 173 Log.i(TAG, "RAW capability is not supported in camera " + mCameraIdsUnderTest[i] + 174 ". Skip the test."); 175 continue; 176 } 177 178 openDevice(deviceId); 179 Size activeArraySize = mStaticInfo.getRawDimensChecked(); 180 181 // Create capture image reader 182 CameraTestUtils.SimpleImageReaderListener captureListener 183 = new CameraTestUtils.SimpleImageReaderListener(); 184 captureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2, 185 captureListener); 186 Pair<Image, CaptureResult> resultPair = captureSingleRawShot(activeArraySize, 187 /*waitForAe*/false, captureReader, captureListener); 188 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics(); 189 190 // Test simple writeImage, no header checks 191 DngCreator dngCreator = new DngCreator(characteristics, resultPair.second); 192 outputStream = new ByteArrayOutputStream(); 193 dngCreator.writeImage(outputStream, resultPair.first); 194 195 if (VERBOSE) { 196 // Write DNG to file 197 String dngFilePath = mDebugFileNameBase + "/camera_basic_" + deviceId + "_" + 198 DEBUG_DNG_FILE; 199 // Write out captured DNG file for the first camera device if setprop is enabled 200 fileStream = new FileOutputStream(dngFilePath); 201 fileStream.write(outputStream.toByteArray()); 202 fileStream.flush(); 203 fileStream.close(); 204 Log.v(TAG, "Test DNG file for camera " + deviceId + " saved to " + dngFilePath); 205 } 206 assertTrue("Generated DNG file does not pass validation", 207 validateDngNative(outputStream.toByteArray())); 208 } finally { 209 closeDevice(deviceId); 210 closeImageReader(captureReader); 211 212 if (outputStream != null) { 213 outputStream.close(); 214 } 215 216 if (fileStream != null) { 217 fileStream.close(); 218 } 219 } 220 } 221 } 222 223 /** 224 * Test basic maximum resolution raw capture and DNG saving functionality for each of the 225 * available ultra high resolution cameras. 226 * 227 * <p> 228 * For ultra high resolution each camera, capture a single RAW16 image at the first capture size 229 * reported for the maximum resolution raw format on that device, and save that image as a DNG 230 * file. No further validation is done. 231 * </p> 232 * 233 * <p> 234 * Note: Enabling adb shell setprop log.tag.DngCreatorTest VERBOSE will also cause the 235 * raw image captured for the first reported camera device to be saved to an output file. 236 * </p> 237 */ 238 @Test testSingleImageBasicMaximumResolution()239 public void testSingleImageBasicMaximumResolution() throws Exception { 240 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 241 String deviceId = mCameraIdsUnderTest[i]; 242 ImageReader captureReader = null; 243 ImageReader reprocessCaptureReader = null; 244 FileOutputStream fileStream = null; 245 ByteArrayOutputStream outputStream = null; 246 try { 247 // All ultra high resolution sensors must necessarily support RAW 248 if (!mAllStaticInfo.get(deviceId).isCapabilitySupported( 249 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR)) { 250 Log.i(TAG, "ULTRA_HIGH_RESOLUTION_SENSOR capability is not supported in " + 251 " camera " + mCameraIdsUnderTest[i] + ". Skip the test."); 252 continue; 253 } 254 255 openDevice(deviceId); 256 Size activeArraySize = mStaticInfo.getRawDimensChecked(/*maxResolution*/true); 257 258 // Create capture image reader 259 CameraTestUtils.SimpleImageReaderListener captureReaderListener 260 = new CameraTestUtils.SimpleImageReaderListener(); 261 CameraTestUtils.SimpleImageReaderListener reprocessReaderListener 262 = new CameraTestUtils.SimpleImageReaderListener(); 263 264 captureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2, 265 captureReaderListener); 266 267 reprocessCaptureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 268 2, reprocessReaderListener); 269 Pair<Image, CaptureResult> resultPair = null; 270 if (mAllStaticInfo.get(deviceId).isCapabilitySupported( 271 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING)) { 272 resultPair = 273 captureReprocessedRawShot(activeArraySize, captureReader, 274 reprocessCaptureReader, captureReaderListener, 275 reprocessReaderListener, /*waitForAe*/false); 276 } else { 277 resultPair = captureSingleShotMaximumResolution(activeArraySize, 278 captureReader, /*waitForAe*/false, captureReaderListener); 279 } 280 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics(); 281 282 // Test simple writeImage, no header checks 283 DngCreator dngCreator = new DngCreator(characteristics, resultPair.second); 284 outputStream = new ByteArrayOutputStream(); 285 dngCreator.writeImage(outputStream, resultPair.first); 286 287 if (VERBOSE) { 288 // Write DNG to file 289 String dngFilePath = mDebugFileNameBase + "/camera_basic_max_resolution_" + 290 deviceId + "_" + DEBUG_DNG_FILE; 291 // Write out captured DNG file for the first camera device if setprop is enabled 292 fileStream = new FileOutputStream(dngFilePath); 293 fileStream.write(outputStream.toByteArray()); 294 fileStream.flush(); 295 fileStream.close(); 296 Log.v(TAG, "Test DNG file for camera " + deviceId + " saved to " + dngFilePath); 297 } 298 assertTrue("Generated DNG file does not pass validation", 299 validateDngNative(outputStream.toByteArray())); 300 } finally { 301 closeDevice(deviceId); 302 closeImageReader(captureReader); 303 closeImageReader(reprocessCaptureReader); 304 305 if (outputStream != null) { 306 outputStream.close(); 307 } 308 309 if (fileStream != null) { 310 fileStream.close(); 311 } 312 } 313 } 314 } 315 316 /** 317 * Test basic raw capture and DNG saving with a thumbnail, rotation, usercomment, and GPS tags 318 * set. 319 * 320 * <p> 321 * For each camera, capture a single RAW16 image at the first capture size reported for 322 * the raw format on that device, and save that image as a DNG file. GPS information validation 323 * is done via ExifInterface. 324 * </p> 325 * 326 * <p> 327 * Note: Enabling adb shell setprop log.tag.DngCreatorTest VERBOSE will also cause the 328 * raw image captured for the first reported camera device to be saved to an output file. 329 * </p> 330 */ 331 @Test testSingleImageThumbnail()332 public void testSingleImageThumbnail() throws Exception { 333 for (int i = 0; i < mCameraIdsUnderTest.length; i++) { 334 String deviceId = mCameraIdsUnderTest[i]; 335 List<ImageReader> captureReaders = new ArrayList<ImageReader>(); 336 List<CameraTestUtils.SimpleImageReaderListener> captureListeners = 337 new ArrayList<CameraTestUtils.SimpleImageReaderListener>(); 338 FileOutputStream fileStream = null; 339 ByteArrayOutputStream outputStream = null; 340 try { 341 if (!mAllStaticInfo.get(deviceId).isCapabilitySupported( 342 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { 343 Log.i(TAG, "RAW capability is not supported in camera " + mCameraIdsUnderTest[i] + 344 ". Skip the test."); 345 continue; 346 } 347 348 openDevice(deviceId); 349 Size activeArraySize = mStaticInfo.getRawDimensChecked(); 350 351 Size[] targetPreviewSizes = 352 mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.YUV_420_888, 353 StaticMetadata.StreamDirection.Output); 354 // Get smallest preview size 355 Size previewSize = mOrderedPreviewSizes.get(mOrderedPreviewSizes.size() - 1); 356 357 // Create capture image reader 358 CameraTestUtils.SimpleImageReaderListener captureListener 359 = new CameraTestUtils.SimpleImageReaderListener(); 360 captureReaders.add(createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2, 361 captureListener)); 362 captureListeners.add(captureListener); 363 364 CameraTestUtils.SimpleImageReaderListener previewListener 365 = new CameraTestUtils.SimpleImageReaderListener(); 366 367 captureReaders.add(createImageReader(previewSize, ImageFormat.YUV_420_888, 2, 368 previewListener)); 369 captureListeners.add(previewListener); 370 371 Date beforeCaptureDate = new Date(); 372 Pair<List<Image>, CaptureResult> resultPair = captureSingleRawShot(activeArraySize, 373 captureReaders, /*waitForAe*/false, captureListeners); 374 Date afterCaptureDate = new Date(); 375 CameraCharacteristics characteristics = mStaticInfo.getCharacteristics(); 376 377 if (VERBOSE) { 378 Log.v(TAG, "Sensor timestamp (ms): " + 379 resultPair.second.get(CaptureResult.SENSOR_TIMESTAMP) / 1000000); 380 Log.v(TAG, "SystemClock.elapsedRealtimeNanos (ms): " + 381 SystemClock.elapsedRealtimeNanos() / 1000000); 382 Log.v(TAG, "SystemClock.uptimeMillis(): " + SystemClock.uptimeMillis()); 383 } 384 // Test simple writeImage, no header checks 385 DngCreator dngCreator = new DngCreator(characteristics, resultPair.second); 386 Location l = new Location("test"); 387 l.reset(); 388 l.setLatitude(GPS_LATITUDE); 389 l.setLongitude(GPS_LONGITUDE); 390 l.setTime(GPS_CALENDAR.getTimeInMillis()); 391 dngCreator.setLocation(l); 392 393 dngCreator.setDescription("helloworld"); 394 dngCreator.setOrientation(ExifInterface.ORIENTATION_FLIP_VERTICAL); 395 dngCreator.setThumbnail(resultPair.first.get(1)); 396 outputStream = new ByteArrayOutputStream(); 397 dngCreator.writeImage(outputStream, resultPair.first.get(0)); 398 399 String filePath = mDebugFileNameBase + "/camera_thumb_" + deviceId + "_" + 400 DEBUG_DNG_FILE; 401 // Write out captured DNG file for the first camera device 402 fileStream = new FileOutputStream(filePath); 403 fileStream.write(outputStream.toByteArray()); 404 fileStream.flush(); 405 fileStream.close(); 406 if (VERBOSE) { 407 Log.v(TAG, "Test DNG file for camera " + deviceId + " saved to " + filePath); 408 } 409 410 assertTrue("Generated DNG file does not pass validation", 411 validateDngNative(outputStream.toByteArray())); 412 413 ExifInterface exifInterface = new ExifInterface(filePath); 414 // Verify GPS data. 415 float[] latLong = new float[2]; 416 assertTrue(exifInterface.getLatLong(latLong)); 417 assertEquals(GPS_LATITUDE, latLong[0], GPS_DIFFERENCE_TOLERANCE); 418 assertEquals(GPS_LONGITUDE, latLong[1], GPS_DIFFERENCE_TOLERANCE); 419 assertEquals(GPS_DATESTAMP, 420 exifInterface.getAttribute(ExifInterface.TAG_GPS_DATESTAMP)); 421 assertEquals(GPS_TIMESTAMP, 422 exifInterface.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP)); 423 424 // Verify the orientation. 425 assertEquals(ExifInterface.ORIENTATION_FLIP_VERTICAL, 426 exifInterface.getAttributeInt(ExifInterface.TAG_ORIENTATION, 427 ExifInterface.ORIENTATION_UNDEFINED)); 428 429 // Verify the date/time 430 final SimpleDateFormat dngDateTimeStampFormat = 431 new SimpleDateFormat("yyyy:MM:dd HH:mm:ss"); 432 dngDateTimeStampFormat.setLenient(false); 433 434 String dateTimeString = 435 exifInterface.getAttribute(ExifInterface.TAG_DATETIME); 436 assertTrue(dateTimeString != null); 437 438 Date dateTime = dngDateTimeStampFormat.parse(dateTimeString); 439 long captureTimeMs = dateTime.getTime(); 440 441 Log.i(TAG, "DNG DateTime tag: " + dateTimeString); 442 Log.i(TAG, "Before capture time: " + beforeCaptureDate.getTime()); 443 Log.i(TAG, "Capture time: " + captureTimeMs); 444 Log.i(TAG, "After capture time: " + afterCaptureDate.getTime()); 445 446 // Offset beforeCaptureTime by 1 second to account for rounding down of 447 // DNG tag 448 long beforeCaptureTimeMs = beforeCaptureDate.getTime() - 1000; 449 long afterCaptureTimeMs = afterCaptureDate.getTime(); 450 assertTrue(captureTimeMs >= beforeCaptureTimeMs); 451 assertTrue(captureTimeMs <= afterCaptureTimeMs); 452 453 if (!VERBOSE) { 454 // Delete the captured DNG file. 455 File dngFile = new File(filePath); 456 assertTrue(dngFile.delete()); 457 } 458 } finally { 459 closeDevice(deviceId); 460 for (ImageReader r : captureReaders) { 461 closeImageReader(r); 462 } 463 464 if (outputStream != null) { 465 outputStream.close(); 466 } 467 468 if (fileStream != null) { 469 fileStream.close(); 470 } 471 } 472 } 473 } 474 475 /** 476 * Test basic maximum resolution RAW capture, and ensure that the rendered RAW output is 477 * similar to the maximum resolution JPEG created for a similar frame. 478 * 479 * Since mandatory streams for maximum resolution sensor pixel mode do not guarantee 2 maximum 480 * resolution streams we can't capture RAW + JPEG images of the same frame. Therefore, 2 481 * sessions are created, one for RAW capture and the other for JPEG capture. 482 * 483 * <p> 484 * This test renders the RAW buffer into an RGB bitmap using a rendering pipeline 485 * similar to one in the Adobe DNG validation tool. JPEGs produced by the vendor hardware may 486 * have different tonemapping and saturation applied than the RGB bitmaps produced 487 * from this DNG rendering pipeline, and this test allows for fairly wide variations 488 * between the histograms for the RAW and JPEG buffers to avoid false positives. 489 * </p> 490 * 491 * <p> 492 * To ensure more subtle errors in the colorspace transforms returned for the HAL's RAW 493 * metadata, the DNGs and JPEGs produced here should also be manually compared using external 494 * DNG rendering tools. The DNG, rendered RGB bitmap, and JPEG buffer for this test can be 495 * dumped to the SD card for further examination by enabling the 'verbose' mode for this test 496 * using: 497 * adb shell setprop log.tag.DngCreatorTest VERBOSE 498 * </p> 499 */ 500 @Test testRaw16JpegMaximumResolutionConsistency()501 public void testRaw16JpegMaximumResolutionConsistency() throws Exception { 502 for (String deviceId : mCameraIdsUnderTest) { 503 ImageReader rawImageReader = null; 504 ImageReader jpegImageReader = null; 505 FileOutputStream fileStream = null; 506 FileChannel fileChannel = null; 507 try { 508 // All ultra high resolution sensors must necessarily support RAW 509 if (!mAllStaticInfo.get(deviceId).isCapabilitySupported( 510 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR)) { 511 Log.i(TAG, "ULTRA_HIGH_RESOLUTION_SENSOR capability is not supported in " + 512 " camera " + deviceId + ". Skip " + 513 "testRaw16JpegMaximumResolutionConsistency"); 514 continue; 515 } 516 517 CapturedDataMaximumResolution data = 518 captureRawJpegImagePairMaximumResolution(deviceId, rawImageReader, 519 jpegImageReader); 520 if (data == null) { 521 continue; 522 } 523 Image raw = data.raw.first; 524 Image jpeg = data.jpeg.first; 525 526 Bitmap rawBitmap = Bitmap.createBitmap(raw.getWidth(), raw.getHeight(), 527 Bitmap.Config.ARGB_8888); 528 529 byte[] rawPlane = new byte[raw.getPlanes()[0].getRowStride() * raw.getHeight()]; 530 531 // Render RAW image to a bitmap 532 raw.getPlanes()[0].getBuffer().get(rawPlane); 533 raw.getPlanes()[0].getBuffer().rewind(); 534 535 RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(), 536 raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane, 537 data.characteristics, /*captureREsult*/data.raw.second, /*offsetX*/ 0, 538 /*offsetY*/ 0, /*out*/ rawBitmap); 539 540 rawPlane = null; 541 System.gc(); // Hint to VM 542 543 if (VERBOSE) { 544 DngDebugParams params = new DngDebugParams(); 545 params.deviceId = deviceId; 546 params.characteristics = data.characteristics; 547 params.captureResult = data.raw.second; 548 params.fileStream = fileStream; 549 params.raw = raw; 550 params.jpeg = jpeg; 551 params.fileChannel = fileChannel; 552 params.rawBitmap = rawBitmap; 553 params.intermediateStr = "maximum_resolution_"; 554 555 debugDumpDng(params); 556 } 557 558 validateRawJpegImagePair(rawBitmap, jpeg, deviceId); 559 } finally { 560 closeImageReader(rawImageReader); 561 closeImageReader(jpegImageReader); 562 563 if (fileChannel != null) { 564 fileChannel.close(); 565 } 566 if (fileStream != null) { 567 fileStream.close(); 568 } 569 } 570 } 571 } 572 573 574 575 /** 576 * Test basic RAW capture, and ensure that the rendered RAW output is similar to the JPEG 577 * created for the same frame. 578 * 579 * <p> 580 * This test renders the RAW buffer into an RGB bitmap using a rendering pipeline 581 * similar to one in the Adobe DNG validation tool. JPEGs produced by the vendor hardware may 582 * have different tonemapping and saturation applied than the RGB bitmaps produced 583 * from this DNG rendering pipeline, and this test allows for fairly wide variations 584 * between the histograms for the RAW and JPEG buffers to avoid false positives. 585 * </p> 586 * 587 * <p> 588 * To ensure more subtle errors in the colorspace transforms returned for the HAL's RAW 589 * metadata, the DNGs and JPEGs produced here should also be manually compared using external 590 * DNG rendering tools. The DNG, rendered RGB bitmap, and JPEG buffer for this test can be 591 * dumped to the SD card for further examination by enabling the 'verbose' mode for this test 592 * using: 593 * adb shell setprop log.tag.DngCreatorTest VERBOSE 594 * </p> 595 */ 596 @Test testRaw16JpegConsistency()597 public void testRaw16JpegConsistency() throws Exception { 598 for (String deviceId : mCameraIdsUnderTest) { 599 List<ImageReader> captureReaders = new ArrayList<>(); 600 FileOutputStream fileStream = null; 601 FileChannel fileChannel = null; 602 try { 603 CapturedData data = captureRawJpegImagePair(deviceId, captureReaders); 604 if (data == null) { 605 continue; 606 } 607 Image raw = data.imagePair.first.get(0); 608 Image jpeg = data.imagePair.first.get(1); 609 610 Bitmap rawBitmap = Bitmap.createBitmap(raw.getWidth(), raw.getHeight(), 611 Bitmap.Config.ARGB_8888); 612 613 byte[] rawPlane = new byte[raw.getPlanes()[0].getRowStride() * raw.getHeight()]; 614 615 // Render RAW image to a bitmap 616 raw.getPlanes()[0].getBuffer().get(rawPlane); 617 raw.getPlanes()[0].getBuffer().rewind(); 618 619 RawConverter.convertToSRGB(RenderScriptSingleton.getRS(), raw.getWidth(), 620 raw.getHeight(), raw.getPlanes()[0].getRowStride(), rawPlane, 621 data.characteristics, data.imagePair.second, /*offsetX*/ 0, /*offsetY*/ 0, 622 /*out*/ rawBitmap); 623 624 rawPlane = null; 625 System.gc(); // Hint to VM 626 627 if (VERBOSE) { 628 DngDebugParams params = new DngDebugParams(); 629 params.deviceId = deviceId; 630 params.characteristics = data.characteristics; 631 params.captureResult = data.imagePair.second; 632 params.fileStream = fileStream; 633 params.raw = raw; 634 params.jpeg = jpeg; 635 params.fileChannel = fileChannel; 636 params.rawBitmap = rawBitmap; 637 params.intermediateStr = ""; 638 639 debugDumpDng(params); 640 } 641 642 validateRawJpegImagePair(rawBitmap, jpeg, deviceId); 643 } finally { 644 for (ImageReader r : captureReaders) { 645 closeImageReader(r); 646 } 647 648 if (fileChannel != null) { 649 fileChannel.close(); 650 } 651 652 if (fileStream != null) { 653 fileStream.close(); 654 } 655 } 656 } 657 } 658 659 /** 660 * Test basic DNG creation, ensure that the DNG image can be rendered by BitmapFactory. 661 */ 662 @Test testDngRenderingByBitmapFactor()663 public void testDngRenderingByBitmapFactor() throws Exception { 664 for (String deviceId : mCameraIdsUnderTest) { 665 List<ImageReader> captureReaders = new ArrayList<>(); 666 667 CapturedData data = captureRawJpegImagePair(deviceId, captureReaders); 668 if (data == null) { 669 continue; 670 } 671 Image raw = data.imagePair.first.get(0); 672 Image jpeg = data.imagePair.first.get(1); 673 674 // Generate DNG file 675 DngCreator dngCreator = new DngCreator(data.characteristics, data.imagePair.second); 676 677 // Write DNG to file 678 String dngFilePath = mDebugFileNameBase + "/camera_" + 679 deviceId + "_" + TEST_DNG_FILE; 680 681 // Write out captured DNG file for the first camera device if setprop is enabled 682 try (FileOutputStream fileStream = new FileOutputStream(dngFilePath)) { 683 dngCreator.writeImage(fileStream, raw); 684 685 // Render the DNG file using BitmapFactory. 686 Bitmap rawBitmap = BitmapFactory.decodeFile(dngFilePath); 687 assertNotNull(rawBitmap); 688 689 validateRawJpegImagePair(rawBitmap, jpeg, deviceId); 690 } finally { 691 for (ImageReader r : captureReaders) { 692 closeImageReader(r); 693 } 694 695 System.gc(); // Hint to VM 696 } 697 } 698 } 699 700 /* 701 * Create RAW + JPEG image pair with characteristics info. 702 */ captureRawJpegImagePair(String deviceId, List<ImageReader> captureReaders)703 private CapturedData captureRawJpegImagePair(String deviceId, List<ImageReader> captureReaders) 704 throws Exception { 705 CapturedData data = new CapturedData(); 706 List<CameraTestUtils.SimpleImageReaderListener> captureListeners = new ArrayList<>(); 707 try { 708 if (!mAllStaticInfo.get(deviceId).isCapabilitySupported( 709 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { 710 Log.i(TAG, "RAW capability is not supported in camera " + deviceId 711 + ". Skip the test."); 712 return null; 713 } 714 715 openDevice(deviceId); 716 Size activeArraySize = mStaticInfo.getRawDimensChecked(); 717 718 // Get largest jpeg size 719 Size[] targetJpegSizes = mStaticInfo.getAvailableSizesForFormatChecked( 720 ImageFormat.JPEG, StaticMetadata.StreamDirection.Output); 721 722 Size largestJpegSize = Collections.max(Arrays.asList(targetJpegSizes), 723 new CameraTestUtils.SizeComparator()); 724 725 // Create raw image reader and capture listener 726 CameraTestUtils.SimpleImageReaderListener rawListener = 727 new CameraTestUtils.SimpleImageReaderListener(); 728 captureReaders.add(createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2, 729 rawListener)); 730 captureListeners.add(rawListener); 731 732 733 // Create jpeg image reader and capture listener 734 CameraTestUtils.SimpleImageReaderListener jpegListener = 735 new CameraTestUtils.SimpleImageReaderListener(); 736 captureReaders.add(createImageReader(largestJpegSize, ImageFormat.JPEG, 2, 737 jpegListener)); 738 captureListeners.add(jpegListener); 739 740 data.imagePair = captureSingleRawShot(activeArraySize, 741 captureReaders, /*waitForAe*/ true, captureListeners); 742 data.characteristics = mStaticInfo.getCharacteristics(); 743 744 Image raw = data.imagePair.first.get(0); 745 Size rawBitmapSize = new Size(raw.getWidth(), raw.getHeight()); 746 assertTrue("Raw bitmap size must be equal to either pre-correction active array" + 747 " size or pixel array size.", rawBitmapSize.equals(activeArraySize)); 748 749 return data; 750 } finally { 751 closeDevice(deviceId); 752 } 753 } 754 debugDumpDng(DngDebugParams params)755 private void debugDumpDng(DngDebugParams params) throws Exception { 756 // Generate DNG file 757 DngCreator dngCreator = 758 new DngCreator(params.characteristics, params.captureResult); 759 760 // Write DNG to file 761 String dngFilePath = mDebugFileNameBase + "/camera_" + params.intermediateStr + 762 params.deviceId + "_" + DEBUG_DNG_FILE; 763 // Write out captured DNG file for the first camera device if setprop is enabled 764 params.fileStream = new FileOutputStream(dngFilePath); 765 dngCreator.writeImage(params.fileStream, params.raw); 766 params.fileStream.flush(); 767 params.fileStream.close(); 768 Log.v(TAG, "Test DNG file for camera " + params.deviceId + " saved to " + dngFilePath); 769 770 // Write JPEG to file 771 String jpegFilePath = mDebugFileNameBase + "/camera_" + params.intermediateStr + 772 params.deviceId + "_jpeg.jpg"; 773 // Write out captured DNG file for the first camera device if setprop is enabled 774 params.fileChannel = new FileOutputStream(jpegFilePath).getChannel(); 775 ByteBuffer jPlane = params.jpeg.getPlanes()[0].getBuffer(); 776 params.fileChannel.write(jPlane); 777 params.fileChannel.close(); 778 jPlane.rewind(); 779 Log.v(TAG, "Test JPEG file for camera " + params.deviceId + " saved to " + 780 jpegFilePath); 781 782 // Write jpeg generated from demosaiced RAW frame to file 783 String rawFilePath = mDebugFileNameBase + "/camera_" + params.intermediateStr + 784 params.deviceId + "_raw.jpg"; 785 // Write out captured DNG file for the first camera device if setprop is enabled 786 params.fileStream = new FileOutputStream(rawFilePath); 787 params.rawBitmap.compress(Bitmap.CompressFormat.JPEG, 90, params.fileStream); 788 params.fileStream.flush(); 789 params.fileStream.close(); 790 Log.v(TAG, "Test converted RAW file for camera " + params.deviceId + " saved to " + 791 rawFilePath); 792 } 793 794 /* 795 * Create RAW + JPEG image pair with characteristics info. Assumes the device supports the RAW 796 * capability. 797 */ captureRawJpegImagePairMaximumResolution(String deviceId, ImageReader rawCaptureReader, ImageReader jpegCaptureReader)798 private CapturedDataMaximumResolution captureRawJpegImagePairMaximumResolution(String deviceId, 799 ImageReader rawCaptureReader, ImageReader jpegCaptureReader) 800 throws Exception { 801 CapturedDataMaximumResolution data = new CapturedDataMaximumResolution(); 802 try { 803 804 openDevice(deviceId); 805 Size activeArraySize = mStaticInfo.getRawDimensChecked(/*maxResolution*/true); 806 807 // Get largest jpeg size 808 Size[] targetJpegSizes = mStaticInfo.getAvailableSizesForFormatChecked( 809 ImageFormat.JPEG, StaticMetadata.StreamDirection.Output, /*fastSizes*/ true, 810 /*slowSizes*/ true, /*maxResolution*/true); 811 812 Size largestJpegSize = Collections.max(Arrays.asList(targetJpegSizes), 813 new CameraTestUtils.SizeComparator()); 814 815 // Create raw image reader and capture listener 816 CameraTestUtils.SimpleImageReaderListener rawCaptureReaderListener = 817 new CameraTestUtils.SimpleImageReaderListener(); 818 rawCaptureReader = createImageReader(activeArraySize, ImageFormat.RAW_SENSOR, 2, 819 rawCaptureReaderListener); 820 821 // Create jpeg image reader and capture listener 822 CameraTestUtils.SimpleImageReaderListener jpegCaptureListener = 823 new CameraTestUtils.SimpleImageReaderListener(); 824 jpegCaptureReader = createImageReader(largestJpegSize, ImageFormat.JPEG, 2, 825 jpegCaptureListener); 826 827 Pair<Image, CaptureResult> jpegResultPair = 828 captureSingleShotMaximumResolution(activeArraySize, 829 jpegCaptureReader, /*waitForAe*/true, jpegCaptureListener); 830 data.jpeg = jpegResultPair; 831 data.characteristics = mStaticInfo.getCharacteristics(); 832 // Create capture image reader 833 CameraTestUtils.SimpleImageReaderListener outputRawCaptureReaderListener 834 = new CameraTestUtils.SimpleImageReaderListener(); 835 CameraTestUtils.SimpleImageReaderListener reprocessReaderListener 836 = new CameraTestUtils.SimpleImageReaderListener(); 837 838 ImageReader outputRawCaptureReader = createImageReader(activeArraySize, 839 ImageFormat.RAW_SENSOR, 2, outputRawCaptureReaderListener); 840 Pair<Image, CaptureResult> rawResultPair = null; 841 if (mAllStaticInfo.get(deviceId).isCapabilitySupported( 842 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING)) { 843 rawResultPair = 844 captureReprocessedRawShot(activeArraySize, outputRawCaptureReader, 845 rawCaptureReader, outputRawCaptureReaderListener, 846 reprocessReaderListener, /*waitForAe*/ true); 847 } else { 848 rawResultPair = captureSingleShotMaximumResolution(activeArraySize, 849 rawCaptureReader, /*waitForAe*/true, rawCaptureReaderListener); 850 } 851 data.raw = rawResultPair; 852 Size rawBitmapSize = 853 new Size(rawResultPair.first.getWidth(), rawResultPair.first.getHeight()); 854 assertTrue("Raw bitmap size must be equal to either pre-correction active array" + 855 " size or pixel array size.", rawBitmapSize.equals(activeArraySize)); 856 857 return data; 858 } finally { 859 closeDevice(deviceId); 860 } 861 } 862 863 /* 864 * Verify the image pair by comparing the center patch. 865 */ validateRawJpegImagePair(Bitmap rawBitmap, Image jpeg, String deviceId)866 private void validateRawJpegImagePair(Bitmap rawBitmap, Image jpeg, String deviceId) 867 throws Exception { 868 // Decompress JPEG image to a bitmap 869 byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpeg); 870 871 // Get JPEG dimensions without decoding 872 BitmapFactory.Options opt0 = new BitmapFactory.Options(); 873 opt0.inJustDecodeBounds = true; 874 BitmapFactory.decodeByteArray(compressedJpegData, /*offset*/0, 875 compressedJpegData.length, /*inout*/opt0); 876 Rect jpegDimens = new Rect(0, 0, opt0.outWidth, opt0.outHeight); 877 878 // Find square center patch from JPEG and RAW bitmaps 879 RectF jpegRect = new RectF(jpegDimens); 880 RectF rawRect = new RectF(0, 0, rawBitmap.getWidth(), rawBitmap.getHeight()); 881 int sideDimen = Math.min(Math.min(Math.min(Math.min(DEFAULT_PATCH_DIMEN, 882 jpegDimens.width()), jpegDimens.height()), rawBitmap.getWidth()), 883 rawBitmap.getHeight()); 884 885 RectF jpegIntermediate = new RectF(0, 0, sideDimen, sideDimen); 886 jpegIntermediate.offset(jpegRect.centerX() - jpegIntermediate.centerX(), 887 jpegRect.centerY() - jpegIntermediate.centerY()); 888 889 RectF rawIntermediate = new RectF(0, 0, sideDimen, sideDimen); 890 rawIntermediate.offset(rawRect.centerX() - rawIntermediate.centerX(), 891 rawRect.centerY() - rawIntermediate.centerY()); 892 Rect jpegFinal = new Rect(); 893 jpegIntermediate.roundOut(jpegFinal); 894 Rect rawFinal = new Rect(); 895 rawIntermediate.roundOut(rawFinal); 896 897 // Get RAW center patch, and free up rest of RAW image 898 Bitmap rawPatch = Bitmap.createBitmap(rawBitmap, rawFinal.left, rawFinal.top, 899 rawFinal.width(), rawFinal.height()); 900 rawBitmap.recycle(); 901 rawBitmap = null; 902 System.gc(); // Hint to VM 903 904 BitmapFactory.Options opt = new BitmapFactory.Options(); 905 opt.inPreferredConfig = Bitmap.Config.ARGB_8888; 906 Bitmap jpegPatch = BitmapRegionDecoder.newInstance(compressedJpegData, 907 /*offset*/0, compressedJpegData.length, /*isShareable*/true). 908 decodeRegion(jpegFinal, opt); 909 910 // Compare center patch from JPEG and rendered RAW bitmap 911 double difference = BitmapUtils.calcDifferenceMetric(jpegPatch, rawPatch); 912 if (difference > IMAGE_DIFFERENCE_TOLERANCE) { 913 FileOutputStream fileStream = null; 914 try { 915 // Write JPEG patch to file 916 String jpegFilePath = mDebugFileNameBase + "/camera_" + deviceId + 917 "_jpeg_patch.jpg"; 918 fileStream = new FileOutputStream(jpegFilePath); 919 jpegPatch.compress(Bitmap.CompressFormat.JPEG, 90, fileStream); 920 fileStream.flush(); 921 fileStream.close(); 922 Log.e(TAG, "Failed JPEG patch file for camera " + deviceId + " saved to " + 923 jpegFilePath); 924 925 // Write RAW patch to file 926 String rawFilePath = mDebugFileNameBase + "/camera_" + deviceId + 927 "_raw_patch.jpg"; 928 fileStream = new FileOutputStream(rawFilePath); 929 rawPatch.compress(Bitmap.CompressFormat.JPEG, 90, fileStream); 930 fileStream.flush(); 931 fileStream.close(); 932 Log.e(TAG, "Failed RAW patch file for camera " + deviceId + " saved to " + 933 rawFilePath); 934 935 fail("Camera " + deviceId + ": RAW and JPEG image at for the same " + 936 "frame are not similar, center patches have difference metric of " + 937 difference); 938 } finally { 939 if (fileStream != null) { 940 fileStream.close(); 941 } 942 } 943 } 944 } 945 captureSingleRawShot(Size s, boolean waitForAe, ImageReader captureReader, CameraTestUtils.SimpleImageReaderListener captureListener)946 private Pair<Image, CaptureResult> captureSingleRawShot(Size s, boolean waitForAe, 947 ImageReader captureReader, 948 CameraTestUtils.SimpleImageReaderListener captureListener) throws Exception { 949 List<ImageReader> readers = new ArrayList<ImageReader>(); 950 readers.add(captureReader); 951 List<CameraTestUtils.SimpleImageReaderListener> listeners = 952 new ArrayList<CameraTestUtils.SimpleImageReaderListener>(); 953 listeners.add(captureListener); 954 Pair<List<Image>, CaptureResult> res = captureSingleRawShot(s, readers, waitForAe, 955 listeners); 956 return new Pair<Image, CaptureResult>(res.first.get(0), res.second); 957 } 958 captureSingleRawShot(Size s, List<ImageReader> captureReaders, boolean waitForAe, List<CameraTestUtils.SimpleImageReaderListener> captureListeners)959 private Pair<List<Image>, CaptureResult> captureSingleRawShot(Size s, 960 List<ImageReader> captureReaders, boolean waitForAe, 961 List<CameraTestUtils.SimpleImageReaderListener> captureListeners) throws Exception { 962 return captureRawShots(s, captureReaders, waitForAe, captureListeners, 1, 963 /*maxResolution*/false).get(0); 964 } 965 captureSingleShotMaximumResolution(Size s, ImageReader captureReader, boolean waitForAe, CameraTestUtils.SimpleImageReaderListener captureListener)966 private Pair<Image, CaptureResult> captureSingleShotMaximumResolution(Size s, 967 ImageReader captureReader, boolean waitForAe, 968 CameraTestUtils.SimpleImageReaderListener captureListener) 969 throws Exception { 970 List<ImageReader> readers = new ArrayList<ImageReader>(); 971 readers.add(captureReader); 972 List<CameraTestUtils.SimpleImageReaderListener> listeners = 973 new ArrayList<CameraTestUtils.SimpleImageReaderListener>(); 974 listeners.add(captureListener); 975 Pair<List<Image>, CaptureResult> res = captureRawShots(s, readers, waitForAe, 976 listeners, /*numShots*/ 1, /*maxResolution*/ true).get(0); 977 return new Pair<Image, CaptureResult>(res.first.get(0), res.second); 978 } 979 captureReprocessedRawShot(Size sz, ImageReader inputReader, ImageReader reprocessOutputReader, CameraTestUtils.SimpleImageReaderListener inputReaderListener, CameraTestUtils.SimpleImageReaderListener reprocessReaderListener, boolean waitForAe)980 private Pair<Image, CaptureResult> captureReprocessedRawShot(Size sz, 981 ImageReader inputReader, 982 ImageReader reprocessOutputReader, 983 CameraTestUtils.SimpleImageReaderListener inputReaderListener, 984 CameraTestUtils.SimpleImageReaderListener reprocessReaderListener, 985 boolean waitForAe) throws Exception { 986 987 InputConfiguration inputConfig = 988 new InputConfiguration(sz.getWidth(), sz.getHeight(), ImageFormat.RAW_SENSOR); 989 CameraTestUtils.SimpleCaptureCallback inputCaptureListener = 990 new CameraTestUtils.SimpleCaptureCallback(); 991 CameraTestUtils.SimpleCaptureCallback reprocessOutputCaptureListener = 992 new CameraTestUtils.SimpleCaptureCallback(); 993 994 inputReader.setOnImageAvailableListener(inputReaderListener, mHandler); 995 reprocessOutputReader.setOnImageAvailableListener(reprocessReaderListener, mHandler); 996 997 ArrayList<Surface> outputSurfaces = new ArrayList<Surface>(); 998 outputSurfaces.add(inputReader.getSurface()); 999 outputSurfaces.add(reprocessOutputReader.getSurface()); 1000 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1001 ImageReader previewReader = null; 1002 if (waitForAe) { 1003 // Also setup a small YUV output for AE metering if needed 1004 Size yuvSize = (mOrderedPreviewSizes.size() == 0) ? null : 1005 mOrderedPreviewSizes.get(mOrderedPreviewSizes.size() - 1); 1006 assertNotNull("Must support at least one small YUV size.", yuvSize); 1007 previewReader = createImageReader(yuvSize, ImageFormat.YUV_420_888, 1008 /*maxNumImages*/2, new CameraTestUtils.ImageDropperListener()); 1009 outputSurfaces.add(previewReader.getSurface()); 1010 } 1011 1012 createReprocessableSession(inputConfig, outputSurfaces); 1013 1014 if (waitForAe) { 1015 CaptureRequest.Builder precaptureRequest = 1016 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1017 assertNotNull("Fail to get captureRequest", precaptureRequest); 1018 precaptureRequest.addTarget(previewReader.getSurface()); 1019 precaptureRequest.set(CaptureRequest.CONTROL_MODE, 1020 CaptureRequest.CONTROL_MODE_AUTO); 1021 precaptureRequest.set(CaptureRequest.CONTROL_AE_MODE, 1022 CaptureRequest.CONTROL_AE_MODE_ON); 1023 1024 final ConditionVariable waitForAeCondition = new ConditionVariable(/*isOpen*/false); 1025 CameraCaptureSession.CaptureCallback captureCallback = 1026 new CameraCaptureSession.CaptureCallback() { 1027 @Override 1028 public void onCaptureProgressed(CameraCaptureSession session, 1029 CaptureRequest request, CaptureResult partialResult) { 1030 Integer aeState = partialResult.get(CaptureResult.CONTROL_AE_STATE); 1031 if (aeState != null && 1032 (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED || 1033 aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED)) { 1034 waitForAeCondition.open(); 1035 } 1036 } 1037 1038 @Override 1039 public void onCaptureCompleted(CameraCaptureSession session, 1040 CaptureRequest request, TotalCaptureResult result) { 1041 int aeState = result.get(CaptureResult.CONTROL_AE_STATE); 1042 if (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED || 1043 aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { 1044 waitForAeCondition.open(); 1045 } 1046 } 1047 }; 1048 1049 startCapture(precaptureRequest.build(), /*repeating*/true, captureCallback, mHandler); 1050 1051 precaptureRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 1052 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 1053 startCapture(precaptureRequest.build(), /*repeating*/false, captureCallback, mHandler); 1054 assertTrue("Timeout out waiting for AE to converge", 1055 waitForAeCondition.block(AE_TIMEOUT_MS)); 1056 } 1057 ImageWriter inputWriter = 1058 ImageWriter.newInstance(mCameraSession.getInputSurface(), 1); 1059 // Prepare a request for reprocess input 1060 CaptureRequest.Builder builder = mCamera.createCaptureRequest( 1061 CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG); 1062 builder.addTarget(inputReader.getSurface()); 1063 // This is a max resolution capture 1064 builder.set(CaptureRequest.SENSOR_PIXEL_MODE, 1065 CameraMetadata.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION); 1066 CaptureRequest inputRequest = builder.build(); 1067 mCameraSession.capture(inputRequest, inputCaptureListener, mHandler); 1068 List<CaptureRequest> reprocessCaptureRequests = new ArrayList<>(); 1069 1070 TotalCaptureResult inputResult = 1071 inputCaptureListener.getTotalCaptureResult( 1072 MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_MS); 1073 builder = mCamera.createReprocessCaptureRequest(inputResult); 1074 inputWriter.queueInputImage(inputReaderListener.getImage( 1075 MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_MS)); 1076 builder.addTarget(reprocessOutputReader.getSurface()); 1077 reprocessCaptureRequests.add(builder.build()); 1078 mCameraSession.captureBurst(reprocessCaptureRequests, reprocessOutputCaptureListener, 1079 mHandler); 1080 TotalCaptureResult result = reprocessOutputCaptureListener.getTotalCaptureResult( 1081 CAPTURE_WAIT_TIMEOUT_MS); 1082 return new Pair<Image, CaptureResult>(reprocessReaderListener.getImage( 1083 MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_MS), result); 1084 } 1085 1086 /** 1087 * Capture raw images. 1088 * 1089 * <p>Capture raw images for a given size.</p> 1090 * 1091 * @param sz The size of the raw image to capture. Must be one of the available sizes for this 1092 * device. 1093 * 1094 * @param captureReaders The image readers which are associated with the targets for this 1095 * capture. 1096 * 1097 * @param waitForAe Whether we should wait for AE to converge before capturing outputs for 1098 * the captureReaders targets 1099 * 1100 * @param captureListeners ImageReader listeners which wait on the captured images to be 1101 * available. 1102 * 1103 * @param numShots The number of shots to be captured 1104 * 1105 * @param maxResolution Whether the target in captureReaders are max resolution captures. If 1106 * this is set to true, captureReaders.size() must be == 1 ( in order to 1107 * satisfy mandatory streams for maximum resolution sensor pixel mode). 1108 * 1109 * @return a list of pairs containing a {@link Image} and {@link CaptureResult} used for 1110 * each capture. 1111 */ captureRawShots(Size sz, List<ImageReader> captureReaders, boolean waitForAe, List<CameraTestUtils.SimpleImageReaderListener> captureListeners, int numShots, boolean maxResolution)1112 private List<Pair<List<Image>, CaptureResult>> captureRawShots(Size sz, 1113 List<ImageReader> captureReaders, boolean waitForAe, 1114 List<CameraTestUtils.SimpleImageReaderListener> captureListeners, 1115 int numShots, boolean maxResolution) throws Exception { 1116 if (VERBOSE) { 1117 Log.v(TAG, "captureSingleRawShot - Capturing raw image."); 1118 } 1119 1120 int timeoutScale = maxResolution ? MAX_RESOLUTION_CAPTURE_WAIT_TIMEOUT_SCALE : 1; 1121 Size[] targetCaptureSizes = 1122 mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.RAW_SENSOR, 1123 StaticMetadata.StreamDirection.Output, /*fastSizes*/ true, 1124 /*slowSizes*/ true, maxResolution); 1125 1126 if (maxResolution) { 1127 assertTrue("Maximum number of maximum resolution targets for a session should be 1 as" + 1128 " per the mandatory streams guarantee", captureReaders.size() == 1); 1129 } 1130 1131 // Validate size 1132 boolean validSize = false; 1133 for (int i = 0; i < targetCaptureSizes.length; ++i) { 1134 if (targetCaptureSizes[i].equals(sz)) { 1135 validSize = true; 1136 break; 1137 } 1138 } 1139 assertTrue("Capture size is supported.", validSize); 1140 1141 // Capture images. 1142 final List<Surface> outputSurfaces = new ArrayList<Surface>(); 1143 for (ImageReader captureReader : captureReaders) { 1144 Surface captureSurface = captureReader.getSurface(); 1145 outputSurfaces.add(captureSurface); 1146 } 1147 1148 // Set up still capture template targeting JPEG/RAW outputs 1149 CaptureRequest.Builder request = 1150 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 1151 assertNotNull("Fail to get captureRequest", request); 1152 for (Surface surface : outputSurfaces) { 1153 request.addTarget(surface); 1154 } 1155 1156 ImageReader previewReader = null; 1157 if (waitForAe) { 1158 // Also setup a small YUV output for AE metering if needed 1159 Size yuvSize = (mOrderedPreviewSizes.size() == 0) ? null : 1160 mOrderedPreviewSizes.get(mOrderedPreviewSizes.size() - 1); 1161 assertNotNull("Must support at least one small YUV size.", yuvSize); 1162 previewReader = createImageReader(yuvSize, ImageFormat.YUV_420_888, 1163 /*maxNumImages*/2, new CameraTestUtils.ImageDropperListener()); 1164 outputSurfaces.add(previewReader.getSurface()); 1165 } 1166 1167 createSession(outputSurfaces); 1168 1169 if (waitForAe) { 1170 CaptureRequest.Builder precaptureRequest = 1171 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1172 assertNotNull("Fail to get captureRequest", precaptureRequest); 1173 precaptureRequest.addTarget(previewReader.getSurface()); 1174 precaptureRequest.set(CaptureRequest.CONTROL_MODE, 1175 CaptureRequest.CONTROL_MODE_AUTO); 1176 precaptureRequest.set(CaptureRequest.CONTROL_AE_MODE, 1177 CaptureRequest.CONTROL_AE_MODE_ON); 1178 1179 final ConditionVariable waitForAeCondition = new ConditionVariable(/*isOpen*/false); 1180 CameraCaptureSession.CaptureCallback captureCallback = 1181 new CameraCaptureSession.CaptureCallback() { 1182 @Override 1183 public void onCaptureProgressed(CameraCaptureSession session, 1184 CaptureRequest request, CaptureResult partialResult) { 1185 Integer aeState = partialResult.get(CaptureResult.CONTROL_AE_STATE); 1186 if (aeState != null && 1187 (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED || 1188 aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED)) { 1189 waitForAeCondition.open(); 1190 } 1191 } 1192 1193 @Override 1194 public void onCaptureCompleted(CameraCaptureSession session, 1195 CaptureRequest request, TotalCaptureResult result) { 1196 int aeState = result.get(CaptureResult.CONTROL_AE_STATE); 1197 if (aeState == CaptureRequest.CONTROL_AE_STATE_CONVERGED || 1198 aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) { 1199 waitForAeCondition.open(); 1200 } 1201 } 1202 }; 1203 startCapture(precaptureRequest.build(), /*repeating*/true, captureCallback, mHandler); 1204 1205 precaptureRequest.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 1206 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 1207 startCapture(precaptureRequest.build(), /*repeating*/false, captureCallback, mHandler); 1208 assertTrue("Timeout out waiting for AE to converge", 1209 waitForAeCondition.block(AE_TIMEOUT_MS)); 1210 } 1211 1212 request.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1213 CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON); 1214 if (maxResolution) { 1215 request.set(CaptureRequest.SENSOR_PIXEL_MODE, 1216 CaptureRequest.SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION); 1217 } 1218 CameraTestUtils.SimpleCaptureCallback resultListener = 1219 new CameraTestUtils.SimpleCaptureCallback(); 1220 1221 CaptureRequest request1 = request.build(); 1222 for (int i = 0; i < numShots; i++) { 1223 startCapture(request1, /*repeating*/false, resultListener, mHandler); 1224 } 1225 List<Pair<List<Image>, CaptureResult>> ret = new ArrayList<>(); 1226 for (int i = 0; i < numShots; i++) { 1227 // Verify capture result and images 1228 CaptureResult result = resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS); 1229 1230 List<Image> resultImages = new ArrayList<Image>(); 1231 for (CameraTestUtils.SimpleImageReaderListener captureListener : captureListeners) { 1232 Image captureImage = 1233 captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS * timeoutScale); 1234 1235 /*CameraTestUtils.validateImage(captureImage, s.getWidth(), s.getHeight(), 1236 ImageFormat.RAW_SENSOR, null);*/ 1237 resultImages.add(captureImage); 1238 } 1239 ret.add(new Pair<List<Image>, CaptureResult>(resultImages, result)); 1240 } 1241 // Stop capture, delete the streams. 1242 stopCapture(/*fast*/false); 1243 1244 return ret; 1245 } 1246 1247 /** 1248 * Use the DNG SDK to validate a DNG file stored in the buffer. 1249 * 1250 * Returns false if the DNG has validation errors. Validation warnings/errors 1251 * will be printed to logcat. 1252 */ validateDngNative(byte[] dngBuffer)1253 private static native boolean validateDngNative(byte[] dngBuffer); 1254 } 1255