1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static android.hardware.camera2.cts.CameraTestUtils.*;
20 
21 import static org.mockito.Mockito.*;
22 
23 import android.graphics.ImageFormat;
24 import android.graphics.SurfaceTexture;
25 import android.hardware.camera2.CameraCaptureSession;
26 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
27 import android.hardware.camera2.CameraDevice;
28 import android.hardware.camera2.CaptureFailure;
29 import android.hardware.camera2.CaptureRequest;
30 import android.hardware.camera2.CaptureResult;
31 import android.hardware.camera2.TotalCaptureResult;
32 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
33 import android.hardware.camera2.cts.helpers.StaticMetadata;
34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
35 import android.hardware.camera2.params.OutputConfiguration;
36 import android.hardware.camera2.params.SessionConfiguration;
37 import android.util.Log;
38 import android.util.Pair;
39 import android.util.Range;
40 import android.util.Size;
41 import android.view.Surface;
42 import android.view.SurfaceHolder;
43 
44 import com.android.ex.camera2.blocking.BlockingSessionCallback;
45 
46 import org.junit.Test;
47 import org.junit.runner.RunWith;
48 import org.junit.runners.Parameterized;
49 import org.mockito.ArgumentCaptor;
50 import org.mockito.ArgumentMatcher;
51 
52 import java.util.ArrayList;
53 import java.util.Arrays;
54 import java.util.HashMap;
55 import java.util.List;
56 
57 /**
58  * CameraDevice preview test by using SurfaceView.
59  */
60 
61 @RunWith(Parameterized.class)
62 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase {
63     private static final String TAG = "SurfaceViewPreviewTest";
64     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
65     private static final int FRAME_TIMEOUT_MS = 1000;
66     private static final int NUM_FRAMES_VERIFIED = 30;
67     private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60;
68     private static final float FRAME_DURATION_ERROR_MARGIN = 0.01f; // 1 percent error margin.
69     private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s
70 
71     @Override
setUp()72     public void setUp() throws Exception {
73         super.setUp();
74     }
75 
76     @Override
tearDown()77     public void tearDown() throws Exception {
78         super.tearDown();
79     }
80 
81     /**
82      * Test all supported preview sizes for each camera device.
83      * <p>
84      * For the first  {@link #NUM_FRAMES_VERIFIED}  of capture results,
85      * the {@link CaptureCallback} callback availability and the capture timestamp
86      * (monotonically increasing) ordering are verified.
87      * </p>
88      */
89     @Test
testCameraPreview()90     public void testCameraPreview() throws Exception {
91         String[] cameraIdsUnderTest = getCameraIdsUnderTest();
92         for (int i = 0; i < cameraIdsUnderTest.length; i++) {
93             try {
94                 Log.i(TAG, "Testing preview for Camera " + cameraIdsUnderTest[i]);
95                 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) {
96                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] +
97                             " does not support color outputs, skipping");
98                     continue;
99                 }
100                 openDevice(cameraIdsUnderTest[i]);
101                 previewTestByCamera();
102             } finally {
103                 closeDevice();
104             }
105         }
106     }
107 
108     /**
109      * Basic test pattern mode preview.
110      * <p>
111      * Only test the test pattern preview and capture result, the image buffer
112      * is not validated.
113      * </p>
114      */
115     @Test
testBasicTestPatternPreview()116     public void testBasicTestPatternPreview() throws Exception {
117         String[] cameraIdsUnderTest = getCameraIdsUnderTest();
118         for (int i = 0; i < cameraIdsUnderTest.length; i++) {
119             try {
120                 Log.i(TAG, "Testing preview for Camera " + cameraIdsUnderTest[i]);
121                 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) {
122                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] +
123                             " does not support color outputs, skipping");
124                     continue;
125                 }
126                 openDevice(cameraIdsUnderTest[i]);
127                 previewTestPatternTestByCamera();
128             } finally {
129                 closeDevice();
130             }
131         }
132     }
133 
134     /**
135      * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview
136      * frame duration and exposure time.
137      */
138     @Test
testPreviewFpsRange()139     public void testPreviewFpsRange() throws Exception {
140         for (String id : getCameraIdsUnderTest()) {
141             try {
142                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
143                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
144                     continue;
145                 }
146                 openDevice(id);
147                 previewFpsRangeTestByCamera();
148             } finally {
149                 closeDevice();
150             }
151         }
152     }
153 
154     /**
155      * Test surface set streaming use cases.
156      *
157      * <p>
158      * The test sets output configuration with increasing surface set IDs for preview and YUV
159      * streams. The max supported preview size is selected for preview stream, and the max
160      * supported YUV size (depending on hw supported level) is selected for YUV stream. This test
161      * also exercises the prepare API.
162      * </p>
163      */
164     @Test
testSurfaceSet()165     public void testSurfaceSet() throws Exception {
166         for (String id : getCameraIdsUnderTest()) {
167             try {
168                 if (!mAllStaticInfo.get(id).isColorOutputSupported()) {
169                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
170                     continue;
171                 }
172                 openDevice(id);
173                 surfaceSetTestByCamera(id);
174             } finally {
175                 closeDevice();
176             }
177         }
178     }
179 
180     /**
181      * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
182      * expected effects on performance.
183      *
184      * - Ensure that prepare() results in onSurfacePrepared() being invoked
185      * - Ensure that prepare() does not cause preview glitches while operating
186      * - Ensure that starting to use a newly-prepared output does not cause additional
187      *   preview glitches to occur
188      */
189     @Test
testPreparePerformance()190     public void testPreparePerformance() throws Throwable {
191         String[] cameraIdsUnderTest = getCameraIdsUnderTest();
192         for (int i = 0; i < cameraIdsUnderTest.length; i++) {
193             try {
194                 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) {
195                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] +
196                             " does not support color outputs, skipping");
197                     continue;
198                 }
199                 openDevice(cameraIdsUnderTest[i]);
200                 preparePerformanceTestByCamera(cameraIdsUnderTest[i]);
201             }
202             finally {
203                 closeDevice();
204             }
205         }
206     }
207 
preparePerformanceTestByCamera(String cameraId)208     private void preparePerformanceTestByCamera(String cameraId) throws Exception {
209         final int MAX_IMAGES_TO_PREPARE = 10;
210         final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
211         final int MAX_RESULTS_TO_WAIT = 10;
212         final int FRAMES_FOR_AVERAGING = 100;
213         final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
214         final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
215 
216         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
217         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
218 
219         // Don't need image data, just drop it right away to minimize overhead
220         ImageDropperListener imageListener = new ImageDropperListener();
221 
222         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
223 
224         CaptureRequest.Builder previewRequest =
225                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
226 
227         // Configure outputs and session
228 
229         updatePreviewSurface(maxPreviewSize);
230 
231         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
232         HashMap<Size, Long> yuvMinFrameDurations =
233                 mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
234         Long readerMinFrameDuration = yuvMinFrameDurations.get(maxYuvSize);
235 
236         List<Surface> outputSurfaces = new ArrayList<Surface>();
237         outputSurfaces.add(mPreviewSurface);
238         outputSurfaces.add(mReaderSurface);
239 
240         CameraCaptureSession.StateCallback mockSessionListener =
241                 mock(CameraCaptureSession.StateCallback.class);
242 
243         mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
244 
245         previewRequest.addTarget(mPreviewSurface);
246         Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
247         previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
248 
249         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
250 
251         // Converge AE
252         waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
253 
254         if (mStaticInfo.isAeLockSupported()) {
255             // Lock AE if possible to improve stability
256             previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
257             mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
258             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
259                 // Legacy mode doesn't output AE state
260                 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
261                         CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
262             }
263         }
264 
265         // Measure frame rate for a bit
266         Pair<Long, Long> frameDurationStats =
267                 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
268 
269         Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
270                         frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
271 
272         // Drain results, do prepare
273         resultListener.drain();
274 
275         mSession.prepare(mReaderSurface);
276 
277         verify(mockSessionListener,
278                 timeout(PREPARE_TIMEOUT_MS).times(1)).
279                 onSurfacePrepared(eq(mSession), eq(mReaderSurface));
280 
281         resultListener.drain();
282 
283         // Get at least one more preview result without prepared target
284         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
285         long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
286 
287         // Now use the prepared stream and ensure there are no hiccups from using it
288         previewRequest.addTarget(mReaderSurface);
289 
290         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
291 
292         Pair<Long, Long> preparedFrameDurationStats =
293                 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
294 
295         Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
296                         preparedFrameDurationStats.first / 1e6,
297                         preparedFrameDurationStats.second / 1e6));
298 
299         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
300             mCollector.expectTrue(
301                 String.format("Camera %s: Preview peak frame interval affected by use of new " +
302                         " stream: preview peak frame interval: %f ms, peak with new stream: %f ms",
303                         cameraId,
304                         frameDurationStats.second / 1e6, preparedFrameDurationStats.second / 1e6),
305                 (preparedFrameDurationStats.second <=
306                         Math.max(frameDurationStats.second, readerMinFrameDuration) *
307                         (1 + PREPARE_PEAK_RATE_BOUNDS)));
308             mCollector.expectTrue(
309                 String.format("Camera %s: Preview average frame interval affected by use of new " +
310                         "stream: preview avg frame duration: %f ms, with new stream: %f ms",
311                         cameraId,
312                         frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
313                 (preparedFrameDurationStats.first <=
314                         Math.max(frameDurationStats.first, readerMinFrameDuration) *
315                         (1 + PREPARE_FRAME_RATE_BOUNDS)));
316         }
317     }
318 
319     /**
320      * Test to verify correct behavior with the same Surface object being used repeatedly with
321      * different native internals, and multiple Surfaces pointing to the same actual consumer object
322      */
323     @Test
testSurfaceEquality()324     public void testSurfaceEquality() throws Exception {
325         String[] cameraIdsUnderTest = getCameraIdsUnderTest();
326         for (int i = 0; i < cameraIdsUnderTest.length; i++) {
327             try {
328                 if (!mAllStaticInfo.get(cameraIdsUnderTest[i]).isColorOutputSupported()) {
329                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] +
330                             " does not support color outputs, skipping");
331                     continue;
332                 }
333                 openDevice(cameraIdsUnderTest[i]);
334                 surfaceEqualityTestByCamera(cameraIdsUnderTest[i]);
335             }
336             finally {
337                 closeDevice();
338             }
339         }
340     }
341 
surfaceEqualityTestByCamera(String cameraId)342     private void surfaceEqualityTestByCamera(String cameraId) throws Exception {
343         final int SOME_FRAMES = 10;
344 
345         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
346 
347         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
348 
349         // Create a SurfaceTexture for a second output
350         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
351         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
352                 maxPreviewSize.getHeight());
353         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
354 
355         updatePreviewSurface(maxPreviewSize);
356 
357         List<Surface> outputSurfaces = new ArrayList<Surface>();
358         outputSurfaces.add(mPreviewSurface);
359         outputSurfaces.add(sharedOutputSurface1);
360 
361         BlockingSessionCallback sessionListener =
362                 new BlockingSessionCallback();
363 
364         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
365         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
366                 SESSION_CONFIGURE_TIMEOUT_MS);
367 
368         CaptureRequest.Builder previewRequest =
369                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
370         previewRequest.addTarget(mPreviewSurface);
371         previewRequest.addTarget(sharedOutputSurface1);
372 
373         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
374 
375         // Wait to get some frames out
376         waitForNumResults(resultListener, SOME_FRAMES);
377 
378         // Drain
379         mSession.abortCaptures();
380         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
381                 SESSION_CONFIGURE_TIMEOUT_MS);
382 
383         // Hide / unhide the SurfaceView to get a new target Surface
384         recreatePreviewSurface();
385 
386         // And resize it again
387         updatePreviewSurface(maxPreviewSize);
388 
389         // Create a second surface that targets the shared SurfaceTexture
390         Surface sharedOutputSurface2 = new Surface(sharedOutputTexture);
391 
392         // Use the new Surfaces for a new session
393         outputSurfaces.clear();
394         outputSurfaces.add(mPreviewSurface);
395         outputSurfaces.add(sharedOutputSurface2);
396 
397         sessionListener = new BlockingSessionCallback();
398 
399         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
400 
401         previewRequest =
402                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
403         previewRequest.addTarget(mPreviewSurface);
404         previewRequest.addTarget(sharedOutputSurface2);
405 
406         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
407 
408         // Wait to get some frames out
409         waitForNumResults(resultListener, SOME_FRAMES);
410     }
411 
412     /*
413      * Verify creation of deferred surface capture sessions
414      */
415     @Test
testDeferredSurfaces()416     public void testDeferredSurfaces() throws Exception {
417         String[] cameraIdsUnderTest = getCameraIdsUnderTest();
418         for (int i = 0; i < cameraIdsUnderTest.length; i++) {
419             try {
420                 StaticMetadata staticInfo = mAllStaticInfo.get(cameraIdsUnderTest[i]);
421                 if (staticInfo.isHardwareLevelLegacy()) {
422                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] + " is legacy, skipping");
423                     continue;
424                 }
425                 if (!staticInfo.isColorOutputSupported()) {
426                     Log.i(TAG, "Camera " + cameraIdsUnderTest[i] +
427                             " does not support color outputs, skipping");
428                     continue;
429                 }
430 
431                 openDevice(cameraIdsUnderTest[i]);
432                 testDeferredSurfacesByCamera(cameraIdsUnderTest[i]);
433             }
434             finally {
435                 closeDevice();
436             }
437         }
438     }
439 
testDeferredSurfacesByCamera(String cameraId)440     private void testDeferredSurfacesByCamera(String cameraId) throws Exception {
441         Size maxPreviewSize = m1080pBoundedOrderedPreviewSizes.get(0);
442 
443         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
444 
445         // Create a SurfaceTexture for a second output
446         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
447         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
448                 maxPreviewSize.getHeight());
449         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
450 
451         class TextureAvailableListener implements SurfaceTexture.OnFrameAvailableListener {
452             @Override
453             public void onFrameAvailable(SurfaceTexture t) {
454                 mGotFrame = true;
455             }
456             public boolean gotFrame() { return mGotFrame; }
457 
458             private volatile boolean mGotFrame = false;
459         }
460         TextureAvailableListener textureAvailableListener = new TextureAvailableListener();
461 
462         sharedOutputTexture.setOnFrameAvailableListener(textureAvailableListener, mHandler);
463 
464         updatePreviewSurface(maxPreviewSize);
465 
466         // Create deferred outputs for surface view and surface texture
467         OutputConfiguration surfaceViewOutput = new OutputConfiguration(maxPreviewSize,
468                 SurfaceHolder.class);
469         OutputConfiguration surfaceTextureOutput = new OutputConfiguration(maxPreviewSize,
470                 SurfaceTexture.class);
471 
472         List<OutputConfiguration> outputSurfaces = new ArrayList<>();
473         outputSurfaces.add(surfaceViewOutput);
474         outputSurfaces.add(surfaceTextureOutput);
475 
476         // Create non-deferred ImageReader output (JPEG for LIMITED-level compatibility)
477         ImageDropperListener imageListener = new ImageDropperListener();
478         createImageReader(mOrderedStillSizes.get(0), ImageFormat.JPEG, /*maxImages*/ 3,
479                 imageListener);
480         OutputConfiguration jpegOutput =
481                 new OutputConfiguration(OutputConfiguration.SURFACE_GROUP_ID_NONE, mReaderSurface);
482         outputSurfaces.add(jpegOutput);
483 
484         // Confirm that zero surface size isn't supported for OutputConfiguration
485         Size[] sizeZeros = { new Size(0, 0), new Size(1, 0), new Size(0, 1) };
486         for (Size size : sizeZeros) {
487             try {
488                 OutputConfiguration bad = new OutputConfiguration(size, SurfaceHolder.class);
489                 fail("OutputConfiguration allowed use of zero surfaceSize");
490             } catch (IllegalArgumentException e) {
491                 //expected
492             }
493         }
494 
495         // Check whether session configuration is supported
496         CameraTestUtils.checkSessionConfigurationSupported(mCamera, mHandler, outputSurfaces,
497                 /*inputConfig*/ null, SessionConfiguration.SESSION_REGULAR, mCameraManager,
498                 /*defaultSupport*/ true, "Deferred session configuration query failed");
499 
500         // Create session
501 
502         BlockingSessionCallback sessionListener =
503                 new BlockingSessionCallback();
504 
505         mSession = configureCameraSessionWithConfig(mCamera, outputSurfaces, sessionListener,
506                 mHandler);
507         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
508                 SESSION_CONFIGURE_TIMEOUT_MS);
509 
510         // Submit JPEG requests
511 
512         CaptureRequest.Builder request = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
513         request.addTarget(mReaderSurface);
514 
515         final int SOME_FRAMES = 10;
516         for (int i = 0; i < SOME_FRAMES; i++) {
517             mSession.capture(request.build(), resultListener, mHandler);
518         }
519 
520         // Wait to get some frames out to ensure we can operate just the one expected surface
521         waitForNumResults(resultListener, SOME_FRAMES);
522         assertTrue("No images received", imageListener.getImageCount() > 0);
523 
524         // Ensure we can't use the deferred surfaces yet
525         request.addTarget(sharedOutputSurface1);
526         try {
527             mSession.capture(request.build(), resultListener, mHandler);
528             fail("Should have received IAE for trying to use a deferred target " +
529                     "that's not yet configured");
530         } catch (IllegalArgumentException e) {
531             // expected
532         }
533 
534         // Add deferred surfaces to their configurations
535         surfaceViewOutput.addSurface(mPreviewSurface);
536         surfaceTextureOutput.addSurface(sharedOutputSurface1);
537 
538         // Verify bad inputs to addSurface
539         try {
540             surfaceViewOutput.addSurface(null);
541             fail("No error from setting a null deferred surface");
542         } catch (NullPointerException e) {
543             // expected
544         }
545         try {
546             surfaceViewOutput.addSurface(mPreviewSurface);
547             fail("Shouldn't be able to set deferred surface twice");
548         } catch (IllegalStateException e) {
549             // expected
550         }
551 
552         // Add first deferred surface to session
553         List<OutputConfiguration> deferredSurfaces = new ArrayList<>();
554         deferredSurfaces.add(surfaceTextureOutput);
555 
556         mSession.finalizeOutputConfigurations(deferredSurfaces);
557 
558         // Try a second time, this should error
559 
560         try {
561             mSession.finalizeOutputConfigurations(deferredSurfaces);
562             fail("Should have received ISE for trying to finish a deferred output twice");
563         } catch (IllegalArgumentException e) {
564             // expected
565         }
566 
567         // Use new deferred surface for a bit
568         imageListener.resetImageCount();
569         for (int i = 0; i < SOME_FRAMES; i++) {
570             mSession.capture(request.build(), resultListener, mHandler);
571         }
572         waitForNumResults(resultListener, SOME_FRAMES);
573         assertTrue("No images received", imageListener.getImageCount() > 0);
574         assertTrue("No texture update received", textureAvailableListener.gotFrame());
575 
576         // Ensure we can't use the last deferred surface yet
577         request.addTarget(mPreviewSurface);
578         try {
579             mSession.capture(request.build(), resultListener, mHandler);
580             fail("Should have received IAE for trying to use a deferred target that's" +
581                     " not yet configured");
582         } catch (IllegalArgumentException e) {
583             // expected
584         }
585 
586         // Add final deferred surface
587         deferredSurfaces.clear();
588         deferredSurfaces.add(surfaceViewOutput);
589 
590         mSession.finalizeOutputConfigurations(deferredSurfaces);
591 
592         // Use final deferred surface for a bit
593         imageListener.resetImageCount();
594         for (int i = 0; i < SOME_FRAMES; i++) {
595             mSession.capture(request.build(), resultListener, mHandler);
596         }
597         waitForNumResults(resultListener, SOME_FRAMES);
598         assertTrue("No images received", imageListener.getImageCount() > 0);
599         // Can't check GL output since we don't have a context to call updateTexImage on, and
600         // the callback only fires once per updateTexImage call.
601         // And there's no way to verify data is going to a SurfaceView
602 
603         // Check for invalid output configurations being handed to a session
604         OutputConfiguration badConfig =
605                 new OutputConfiguration(maxPreviewSize, SurfaceTexture.class);
606         deferredSurfaces.clear();
607         try {
608             mSession.finalizeOutputConfigurations(deferredSurfaces);
609             fail("No error for empty list passed to finalizeOutputConfigurations");
610         } catch (IllegalArgumentException e) {
611             // expected
612         }
613 
614         deferredSurfaces.add(badConfig);
615         try {
616             mSession.finalizeOutputConfigurations(deferredSurfaces);
617             fail("No error for invalid output config being passed to finalizeOutputConfigurations");
618         } catch (IllegalArgumentException e) {
619             // expected
620         }
621 
622     }
623 
624     /**
625      * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
626      * provided capture listener.  If prevTimestamp is positive, it is used for the first interval
627      * calculation; otherwise, the first result is used to establish the starting time.
628      *
629      * Returns the mean interval in the first pair entry, and the largest interval in the second
630      * pair entry
631      */
measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount, long prevTimestamp)632     Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
633             long prevTimestamp) throws Exception {
634         long summedIntervals = 0;
635         long maxInterval = 0;
636         int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
637 
638         for (int i = 0; i < frameCount; i++) {
639             CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
640             long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
641             if (prevTimestamp > 0) {
642                 long interval = timestamp - prevTimestamp;
643                 if (interval > maxInterval) maxInterval = interval;
644                 summedIntervals += interval;
645             }
646             prevTimestamp = timestamp;
647         }
648         return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
649     }
650 
651 
652     /**
653      * Test preview fps range for all supported ranges. The exposure time are frame duration are
654      * validated.
655      */
previewFpsRangeTestByCamera()656     private void previewFpsRangeTestByCamera() throws Exception {
657         Size maxPreviewSz;
658         Range<Integer>[] fpsRanges = getDescendingTargetFpsRanges(mStaticInfo);
659         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
660         Range<Integer> fpsRange;
661         CaptureRequest.Builder requestBuilder =
662                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
663         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
664 
665         for (int i = 0; i < fpsRanges.length; i += 1) {
666             fpsRange = fpsRanges[i];
667             if (mStaticInfo.isHardwareLevelLegacy()) {
668                 // Legacy devices don't report minimum frame duration for preview sizes. The FPS
669                 // range should be valid for any supported preview size.
670                 maxPreviewSz = mOrderedPreviewSizes.get(0);
671             } else {
672                 maxPreviewSz = getMaxPreviewSizeForFpsRange(fpsRange);
673             }
674 
675             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
676             // Turn off auto antibanding to avoid exposure time and frame duration interference
677             // from antibanding algorithm.
678             if (antiBandingOffIsSupported) {
679                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
680                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
681             } else {
682                 // The device doesn't implement the OFF mode, test continues. It need make sure
683                 // that the antibanding algorithm doesn't interfere with the fps range control.
684                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
685                         " satisfy the specified fps range regardless of its current antibanding" +
686                         " mode");
687             }
688 
689             startPreview(requestBuilder, maxPreviewSz, resultListener);
690             resultListener = new SimpleCaptureCallback();
691             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
692 
693             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
694 
695             verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange,
696                     maxPreviewSz);
697             stopPreview();
698             resultListener.drain();
699         }
700     }
701 
verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener, int numFramesVerified, Range<Integer> fpsRange, Size previewSz)702     private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener,
703             int numFramesVerified, Range<Integer> fpsRange, Size previewSz) {
704         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
705         List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked();
706 
707         if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
708             long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
709             long[] frameDurationRange =
710                     new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
711             mCollector.expectInRange(
712                     "Frame duration must be in the range of " + Arrays.toString(frameDurationRange),
713                     frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)),
714                     (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN)));
715             long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
716             mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame"
717                     + "duration %d", expTime, frameDuration), expTime <= frameDuration);
718 
719             Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz);
720             boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size "
721                     + previewSz.toString(), minFrameDuration != null);
722             if (findDuration) {
723                 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than"
724                         + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration);
725             }
726         } else {
727             Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," +
728                     " skipping duration and exposure time check.");
729         }
730     }
731 
732     /**
733      * Test all supported preview sizes for a camera device
734      *
735      * @throws Exception
736      */
previewTestByCamera()737     private void previewTestByCamera() throws Exception {
738         List<Size> previewSizes = getSupportedPreviewSizes(
739                 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
740 
741         for (final Size sz : previewSizes) {
742             if (VERBOSE) {
743                 Log.v(TAG, "Testing camera preview size: " + sz.toString());
744             }
745 
746             // TODO: vary the different settings like crop region to cover more cases.
747             CaptureRequest.Builder requestBuilder =
748                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
749             CaptureCallback mockCaptureCallback =
750                     mock(CameraCaptureSession.CaptureCallback.class);
751 
752             startPreview(requestBuilder, sz, mockCaptureCallback);
753             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
754                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
755             stopPreview();
756         }
757     }
758 
previewTestPatternTestByCamera()759     private void previewTestPatternTestByCamera() throws Exception {
760         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
761         int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked();
762         CaptureRequest.Builder requestBuilder =
763                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
764         CaptureCallback mockCaptureCallback;
765 
766         final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0.
767         for (int mode : testPatternModes) {
768             if (VERBOSE) {
769                 Log.v(TAG, "Test pattern mode: " + mode);
770             }
771             requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode);
772             if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
773                 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA
774                 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA);
775             }
776             mockCaptureCallback = mock(CaptureCallback.class);
777             startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback);
778             verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED,
779                     NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
780         }
781 
782         stopPreview();
783     }
784 
surfaceSetTestByCamera(String cameraId)785     private void surfaceSetTestByCamera(String cameraId) throws Exception {
786         final int MAX_SURFACE_GROUP_ID = 10;
787         Size maxPreviewSz = mOrderedPreviewSizes.get(0);
788         Size yuvSizeBound = maxPreviewSz; // Default case: legacy device
789         if (mStaticInfo.isHardwareLevelLimited()) {
790             yuvSizeBound = mOrderedVideoSizes.get(0);
791         } else if (mStaticInfo.isHardwareLevelAtLeastFull()) {
792             yuvSizeBound = null;
793         }
794         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0);
795 
796         CaptureRequest.Builder requestBuilder =
797                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
798         ImageDropperListener imageListener = new ImageDropperListener();
799 
800         updatePreviewSurface(maxPreviewSz);
801         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener);
802         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
803         OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface);
804         OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface);
805         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId());
806         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId());
807         assertEquals(mPreviewSurface, previewConfig.getSurface());
808         assertEquals(mReaderSurface, yuvConfig.getSurface());
809         outputConfigs.add(previewConfig);
810         outputConfigs.add(yuvConfig);
811         requestBuilder.addTarget(mPreviewSurface);
812         requestBuilder.addTarget(mReaderSurface);
813 
814         // Test different stream set ID.
815         for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE;
816                 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) {
817             if (VERBOSE) {
818                 Log.v(TAG, "test preview with surface group id: ");
819             }
820 
821             previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface);
822             yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface);
823             outputConfigs.clear();
824             outputConfigs.add(previewConfig);
825             outputConfigs.add(yuvConfig);
826 
827             for (OutputConfiguration config : outputConfigs) {
828                 assertEquals(surfaceGroupId, config.getSurfaceGroupId());
829             }
830 
831             CameraCaptureSession.StateCallback mockSessionListener =
832                     mock(CameraCaptureSession.StateCallback.class);
833 
834             mSession = configureCameraSessionWithConfig(mCamera, outputConfigs,
835                     mockSessionListener, mHandler);
836 
837 
838             mSession.prepare(mPreviewSurface);
839             verify(mockSessionListener,
840                     timeout(PREPARE_TIMEOUT_MS).times(1)).
841                     onSurfacePrepared(eq(mSession), eq(mPreviewSurface));
842 
843             mSession.prepare(mReaderSurface);
844             verify(mockSessionListener,
845                     timeout(PREPARE_TIMEOUT_MS).times(1)).
846                     onSurfacePrepared(eq(mSession), eq(mReaderSurface));
847 
848             CaptureRequest request = requestBuilder.build();
849             CaptureCallback mockCaptureCallback =
850                     mock(CameraCaptureSession.CaptureCallback.class);
851             mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler);
852             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
853                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
854         }
855     }
856 
857     private class IsCaptureResultValid implements ArgumentMatcher<TotalCaptureResult> {
858         @Override
matches(TotalCaptureResult obj)859         public boolean matches(TotalCaptureResult obj) {
860             TotalCaptureResult result = obj;
861             Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
862             if (timeStamp != null && timeStamp.longValue() > 0L) {
863                 return true;
864             }
865             return false;
866         }
867     }
868 
verifyCaptureResults( CameraCaptureSession session, CaptureCallback mockListener, int expectResultCount, int timeOutMs)869     private void verifyCaptureResults(
870             CameraCaptureSession session,
871             CaptureCallback mockListener,
872             int expectResultCount,
873             int timeOutMs) {
874         // Should receive expected number of onCaptureStarted callbacks.
875         ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class);
876         ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class);
877         verify(mockListener,
878                 timeout(timeOutMs).atLeast(expectResultCount))
879                         .onCaptureStarted(
880                                 eq(session),
881                                 isA(CaptureRequest.class),
882                                 timestamps.capture(),
883                                 frameNumbers.capture());
884 
885         // Validate timestamps: all timestamps should be larger than 0 and monotonically increase.
886         long timestamp = 0;
887         for (Long nextTimestamp : timestamps.getAllValues()) {
888             assertNotNull("Next timestamp is null!", nextTimestamp);
889             assertTrue("Captures are out of order", timestamp < nextTimestamp);
890             timestamp = nextTimestamp;
891         }
892 
893         // Validate framenumbers: all framenumbers should be consecutive and positive
894         long frameNumber = -1;
895         for (Long nextFrameNumber : frameNumbers.getAllValues()) {
896             assertNotNull("Next frame number is null!", nextFrameNumber);
897             assertTrue("Captures are out of order",
898                     (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber));
899             frameNumber = nextFrameNumber;
900         }
901 
902         // Should receive expected number of capture results.
903         verify(mockListener,
904                 timeout(timeOutMs).atLeast(expectResultCount))
905                         .onCaptureCompleted(
906                                 eq(session),
907                                 isA(CaptureRequest.class),
908                                 argThat(new IsCaptureResultValid()));
909 
910         // Should not receive any capture failed callbacks.
911         verify(mockListener, never())
912                         .onCaptureFailed(
913                                 eq(session),
914                                 isA(CaptureRequest.class),
915                                 isA(CaptureFailure.class));
916     }
917 
918 }
919