1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 #define LOG_TAG "CameraMultiStreamTest"
19 //#define LOG_NDEBUG 0
20 #include "CameraStreamFixture.h"
21 #include "TestExtensions.h"
22 
23 #include <gtest/gtest.h>
24 #include <utils/Log.h>
25 #include <utils/StrongPointer.h>
26 #include <common/CameraDeviceBase.h>
27 #include <hardware/hardware.h>
28 #include <hardware/camera2.h>
29 #include <gui/SurfaceComposerClient.h>
30 #include <gui/Surface.h>
31 
32 #define DEFAULT_FRAME_DURATION 33000000LL // 33ms
33 #define CAMERA_HEAP_COUNT       1
34 #define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
35 #define CAMERA_DISPLAY_FORMAT HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
36 #define CAMERA_MULTI_STREAM_DEBUGGING  0
37 #define CAMERA_FRAME_TIMEOUT    1000000000LL // nsecs (1 secs)
38 #define PREVIEW_RENDERING_TIME_INTERVAL 200000 // in unit of us, 200ms
39 // 1% tolerance margin for exposure sanity check against metadata
40 #define TOLERANCE_MARGIN_METADATA 0.01
41 // 5% tolerance margin for exposure sanity check against capture times
42 #define TOLERANCE_MARGIN_CAPTURE 0.05
43 /* constants for display */
44 #define DISPLAY_BUFFER_HEIGHT 1024
45 #define DISPLAY_BUFFER_WIDTH 1024
46 #define DISPLAY_BUFFER_FORMAT PIXEL_FORMAT_RGB_888
47 
48 // This test intends to test large preview size but less than 1080p.
49 #define PREVIEW_WIDTH_CAP   1920
50 #define PREVIEW_HEIGHT_CAP  1080
51 // This test intends to test small metering burst size that is less than 640x480
52 #define METERING_WIDTH_CAP  640
53 #define METERING_HEIGHT_CAP 480
54 
55 #define EXP_WAIT_MULTIPLIER 2
56 
57 namespace android {
58 namespace camera2 {
59 namespace tests {
60 
61 static const CameraStreamParams DEFAULT_STREAM_PARAMETERS = {
62     /*mFormat*/     CAMERA_EXPOSURE_FORMAT,
63     /*mHeapCount*/  CAMERA_HEAP_COUNT
64 };
65 
66 static const CameraStreamParams DISPLAY_STREAM_PARAMETERS = {
67     /*mFormat*/     CAMERA_DISPLAY_FORMAT,
68     /*mHeapCount*/  CAMERA_HEAP_COUNT
69 };
70 
71 class CameraMultiStreamTest
72     : public ::testing::Test,
73       public CameraStreamFixture {
74 
75 public:
CameraMultiStreamTest()76     CameraMultiStreamTest() : CameraStreamFixture(DEFAULT_STREAM_PARAMETERS) {
77         TEST_EXTENSION_FORKING_CONSTRUCTOR;
78 
79         if (HasFatalFailure()) {
80             return;
81         }
82         /**
83          * Don't create default stream, each test is in charge of creating
84          * its own streams.
85          */
86     }
87 
~CameraMultiStreamTest()88     ~CameraMultiStreamTest() {
89         TEST_EXTENSION_FORKING_DESTRUCTOR;
90     }
91 
92     sp<SurfaceComposerClient> mComposerClient;
93     sp<SurfaceControl> mSurfaceControl;
94 
CreateOnScreenSurface(sp<Surface> & surface)95     void CreateOnScreenSurface(sp<Surface>& surface) {
96         mComposerClient = new SurfaceComposerClient;
97         ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
98 
99         mSurfaceControl = mComposerClient->createSurface(
100                 String8("CameraMultiStreamTest StreamingImage Surface"),
101                 DISPLAY_BUFFER_HEIGHT, DISPLAY_BUFFER_WIDTH,
102                 DISPLAY_BUFFER_FORMAT, 0);
103 
104         ASSERT_NE((void*)NULL, mSurfaceControl.get());
105         ASSERT_TRUE(mSurfaceControl->isValid());
106 
107         SurfaceComposerClient::openGlobalTransaction();
108         ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
109         ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
110         SurfaceComposerClient::closeGlobalTransaction();
111 
112         surface = mSurfaceControl->getSurface();
113 
114         ASSERT_NE((void*)NULL, surface.get());
115     }
116 
117     struct Size {
118         int32_t width;
119         int32_t height;
120     };
121 
122     // Select minimal size by number of pixels.
GetMinSize(const int32_t * data,size_t count,Size * min,int32_t * idx)123     void GetMinSize(const int32_t* data, size_t count,
124             Size* min, int32_t* idx) {
125         ASSERT_NE((int32_t*)NULL, data);
126         int32_t minIdx = 0;
127         int32_t minSize = INT_MAX, tempSize;
128         for (size_t i = 0; i < count; i+=2) {
129             tempSize = data[i] * data[i+1];
130             if (minSize > tempSize) {
131                 minSize = tempSize;
132                 minIdx = i;
133             }
134         }
135         min->width = data[minIdx];
136         min->height = data[minIdx + 1];
137         *idx = minIdx;
138     }
139 
140     // Select maximal size by number of pixels.
GetMaxSize(const int32_t * data,size_t count,Size * max,int32_t * idx)141     void GetMaxSize(const int32_t* data, size_t count,
142             Size* max, int32_t* idx) {
143         ASSERT_NE((int32_t*)NULL, data);
144         int32_t maxIdx = 0;
145         int32_t maxSize = INT_MIN, tempSize;
146         for (size_t i = 0; i < count; i+=2) {
147             tempSize = data[i] * data[i+1];
148             if (maxSize < tempSize) {
149                 maxSize = tempSize;
150                 maxIdx = i;
151             }
152         }
153         max->width = data[maxIdx];
154         max->height = data[maxIdx + 1];
155         *idx = maxIdx;
156     }
157 
158     // Cap size by number of pixels.
CapSize(Size cap,Size input)159     Size CapSize(Size cap, Size input) {
160         if (input.width * input.height > cap.width * cap.height) {
161             return cap;
162         }
163         return input;
164     }
165 
166     struct CameraStream : public RefBase {
167 
168     public:
169         /**
170          * Only initialize the variables here, do the ASSERT check in
171          * SetUp function. To make this stream useful, the SetUp must
172          * be called before using it.
173          */
CameraStreamandroid::camera2::tests::CameraMultiStreamTest::CameraStream174         CameraStream(
175                 int width,
176                 int height,
177                 const sp<CameraDeviceBase>& device,
178                 CameraStreamParams param, sp<Surface> surface,
179                 bool useCpuConsumer)
180             : mDevice(device),
181               mWidth(width),
182               mHeight(height) {
183             mFormat = param.mFormat;
184             if (useCpuConsumer) {
185                 sp<IGraphicBufferProducer> producer;
186                 sp<IGraphicBufferConsumer> consumer;
187                 BufferQueue::createBufferQueue(&producer, &consumer);
188                 mCpuConsumer = new CpuConsumer(consumer, param.mHeapCount);
189                 mCpuConsumer->setName(String8(
190                         "CameraMultiStreamTest::mCpuConsumer"));
191                 mSurface = new Surface(producer);
192             } else {
193                 // Render the stream to screen.
194                 mCpuConsumer = NULL;
195                 mSurface = surface;
196             }
197 
198             mFrameListener = new FrameListener();
199             if (mCpuConsumer != 0) {
200                 mCpuConsumer->setFrameAvailableListener(mFrameListener);
201             }
202         }
203 
204         /**
205          * Finally create camera stream, and do the ASSERT check, since we
206          * can not do it in ctor.
207          */
SetUpandroid::camera2::tests::CameraMultiStreamTest::CameraStream208         void SetUp() {
209             ASSERT_EQ(OK,
210                 mDevice->createStream(mSurface,
211                     mWidth, mHeight, mFormat, HAL_DATASPACE_UNKNOWN,
212                     CAMERA3_STREAM_ROTATION_0, &mStreamId));
213 
214             ASSERT_NE(-1, mStreamId);
215         }
216 
GetStreamIdandroid::camera2::tests::CameraMultiStreamTest::CameraStream217         int GetStreamId() { return mStreamId; }
GetConsumerandroid::camera2::tests::CameraMultiStreamTest::CameraStream218         sp<CpuConsumer> GetConsumer() { return mCpuConsumer; }
GetFrameListenerandroid::camera2::tests::CameraMultiStreamTest::CameraStream219         sp<FrameListener> GetFrameListener() { return mFrameListener; }
220 
221     protected:
~CameraStreamandroid::camera2::tests::CameraMultiStreamTest::CameraStream222         ~CameraStream() {
223             if (mDevice.get()) {
224                 mDevice->waitUntilDrained();
225                 mDevice->deleteStream(mStreamId);
226             }
227             // Clear producer before consumer.
228             mSurface.clear();
229             mCpuConsumer.clear();
230         }
231 
232     private:
233         sp<FrameListener>       mFrameListener;
234         sp<CpuConsumer>         mCpuConsumer;
235         sp<Surface>             mSurface;
236         sp<CameraDeviceBase>    mDevice;
237         int                     mStreamId;
238         int                     mWidth;
239         int                     mHeight;
240         int                     mFormat;
241     };
242 
GetExposureValue(const CameraMetadata & metaData)243     int64_t GetExposureValue(const CameraMetadata& metaData) {
244         camera_metadata_ro_entry_t entry =
245                 metaData.find(ANDROID_SENSOR_EXPOSURE_TIME);
246         EXPECT_EQ(1u, entry.count);
247         if (entry.count == 1) {
248             return entry.data.i64[0];
249         }
250         return -1;
251     }
252 
GetSensitivity(const CameraMetadata & metaData)253     int32_t GetSensitivity(const CameraMetadata& metaData) {
254         camera_metadata_ro_entry_t entry =
255                 metaData.find(ANDROID_SENSOR_SENSITIVITY);
256         EXPECT_EQ(1u, entry.count);
257         if (entry.count == 1) {
258             return entry.data.i32[0];
259         }
260         return -1;
261     }
262 
GetFrameDuration(const CameraMetadata & metaData)263     int64_t GetFrameDuration(const CameraMetadata& metaData) {
264         camera_metadata_ro_entry_t entry =
265                 metaData.find(ANDROID_SENSOR_FRAME_DURATION);
266         EXPECT_EQ(1u, entry.count);
267         if (entry.count == 1) {
268             return entry.data.i64[0];
269         }
270         return -1;
271     }
272 
CreateRequests(CameraMetadata & previewRequest,CameraMetadata & meteringRequest,CameraMetadata & captureRequest,int previewStreamId,int meteringStreamId,int captureStreamId)273     void CreateRequests(CameraMetadata& previewRequest,
274             CameraMetadata& meteringRequest,
275             CameraMetadata& captureRequest,
276             int previewStreamId,
277             int meteringStreamId,
278             int captureStreamId) {
279         int32_t requestId = 0;
280         Vector<int32_t> previewStreamIds;
281         previewStreamIds.push(previewStreamId);
282         ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
283                 &previewRequest));
284         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
285                 previewStreamIds));
286         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
287                 &requestId, 1));
288 
289         // Create metering request, manual settings
290         // Manual control: Disable 3A, noise reduction, edge sharping
291         uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
292         uint8_t nrOff = static_cast<uint8_t>(ANDROID_NOISE_REDUCTION_MODE_OFF);
293         uint8_t sharpOff = static_cast<uint8_t>(ANDROID_EDGE_MODE_OFF);
294         Vector<int32_t> meteringStreamIds;
295         meteringStreamIds.push(meteringStreamId);
296         ASSERT_EQ(OK, mDevice->createDefaultRequest(
297                 CAMERA2_TEMPLATE_PREVIEW,
298                 &meteringRequest));
299         ASSERT_EQ(OK, meteringRequest.update(
300                 ANDROID_REQUEST_OUTPUT_STREAMS,
301                 meteringStreamIds));
302         ASSERT_EQ(OK, meteringRequest.update(
303                 ANDROID_CONTROL_MODE,
304                 &cmOff, 1));
305         ASSERT_EQ(OK, meteringRequest.update(
306                 ANDROID_NOISE_REDUCTION_MODE,
307                 &nrOff, 1));
308         ASSERT_EQ(OK, meteringRequest.update(
309                 ANDROID_EDGE_MODE,
310                 &sharpOff, 1));
311 
312         // Create capture request, manual settings
313         Vector<int32_t> captureStreamIds;
314         captureStreamIds.push(captureStreamId);
315         ASSERT_EQ(OK, mDevice->createDefaultRequest(
316                 CAMERA2_TEMPLATE_PREVIEW,
317                 &captureRequest));
318         ASSERT_EQ(OK, captureRequest.update(
319                 ANDROID_REQUEST_OUTPUT_STREAMS,
320                 captureStreamIds));
321         ASSERT_EQ(OK, captureRequest.update(
322                 ANDROID_CONTROL_MODE,
323                 &cmOff, 1));
324         ASSERT_EQ(OK, captureRequest.update(
325                 ANDROID_NOISE_REDUCTION_MODE,
326                 &nrOff, 1));
327         ASSERT_EQ(OK, captureRequest.update(
328                 ANDROID_EDGE_MODE,
329                 &sharpOff, 1));
330     }
331 
CreateStream(int width,int height,const sp<CameraDeviceBase> & device,CameraStreamParams param=DEFAULT_STREAM_PARAMETERS,sp<Surface> surface=NULL,bool useCpuConsumer=true)332     sp<CameraStream> CreateStream(
333             int width,
334             int height,
335             const sp<CameraDeviceBase>& device,
336             CameraStreamParams param = DEFAULT_STREAM_PARAMETERS,
337             sp<Surface> surface = NULL,
338             bool useCpuConsumer = true) {
339         param.mFormat = MapAutoFormat(param.mFormat);
340         return new CameraStream(width, height, device,
341                 param, surface, useCpuConsumer);
342     }
343 
CaptureBurst(CameraMetadata & request,size_t requestCount,const Vector<int64_t> & exposures,const Vector<int32_t> & sensitivities,const sp<CameraStream> & stream,int64_t minFrameDuration,int32_t * requestIdStart)344     void CaptureBurst(CameraMetadata& request, size_t requestCount,
345             const Vector<int64_t>& exposures,
346             const Vector<int32_t>& sensitivities,
347             const sp<CameraStream>& stream,
348             int64_t minFrameDuration,
349             int32_t* requestIdStart) {
350         ASSERT_EQ(OK, request.update(ANDROID_SENSOR_FRAME_DURATION,
351                 &minFrameDuration, 1));
352         // Submit a series of requests with the specified exposure/gain values.
353         int32_t targetRequestId = *requestIdStart;
354         for (size_t i = 0; i < requestCount; i++) {
355             ASSERT_EQ(OK, request.update(ANDROID_REQUEST_ID, requestIdStart, 1));
356             ASSERT_EQ(OK, request.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposures[i], 1));
357             ASSERT_EQ(OK, request.update(ANDROID_SENSOR_SENSITIVITY, &sensitivities[i], 1));
358             ASSERT_EQ(OK, mDevice->capture(request));
359             ALOGV("Submitting request with: id %d with exposure %" PRId64 ", sensitivity %d",
360                     *requestIdStart, exposures[i], sensitivities[i]);
361             if (CAMERA_MULTI_STREAM_DEBUGGING) {
362                 request.dump(STDOUT_FILENO);
363             }
364             (*requestIdStart)++;
365         }
366         // Get capture burst results.
367         Vector<nsecs_t> captureBurstTimes;
368         sp<CpuConsumer> consumer = stream->GetConsumer();
369         sp<FrameListener> listener = stream->GetFrameListener();
370 
371         // Set wait limit based on expected frame duration.
372         int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
373         for (size_t i = 0; i < requestCount; i++) {
374             ALOGV("Reading request result %zu", i);
375 
376             /**
377              * Raise the timeout to be at least twice as long as the exposure
378              * time. to avoid a false positive when the timeout is too short.
379              */
380             if ((exposures[i] * EXP_WAIT_MULTIPLIER) > waitLimit) {
381                 waitLimit = exposures[i] * EXP_WAIT_MULTIPLIER;
382             }
383 
384             CaptureResult result;
385             CameraMetadata frameMetadata;
386             int32_t resultRequestId;
387             do {
388                 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
389                 ASSERT_EQ(OK, mDevice->getNextResult(&result));
390                 frameMetadata = result.mMetadata;
391 
392                 camera_metadata_entry_t resultEntry = frameMetadata.find(ANDROID_REQUEST_ID);
393                 ASSERT_EQ(1u, resultEntry.count);
394                 resultRequestId = resultEntry.data.i32[0];
395                 if (CAMERA_MULTI_STREAM_DEBUGGING) {
396                     std::cout << "capture result req id: " << resultRequestId << std::endl;
397                 }
398             } while (resultRequestId != targetRequestId);
399             targetRequestId++;
400             ALOGV("Got capture burst result for request %zu", i);
401 
402             // Validate capture result
403             if (CAMERA_MULTI_STREAM_DEBUGGING) {
404                 frameMetadata.dump(STDOUT_FILENO);
405             }
406 
407             // TODO: Need revisit it to figure out an accurate margin.
408             int64_t resultExposure = GetExposureValue(frameMetadata);
409             int32_t resultSensitivity = GetSensitivity(frameMetadata);
410             EXPECT_LE(sensitivities[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultSensitivity);
411             EXPECT_GE(sensitivities[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultSensitivity);
412             EXPECT_LE(exposures[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultExposure);
413             EXPECT_GE(exposures[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultExposure);
414 
415             ASSERT_EQ(OK, listener->waitForFrame(waitLimit));
416             captureBurstTimes.push_back(systemTime());
417             CpuConsumer::LockedBuffer imgBuffer;
418             ASSERT_EQ(OK, consumer->lockNextBuffer(&imgBuffer));
419             ALOGV("Got capture buffer for request %zu", i);
420 
421             /**
422              * TODO: Validate capture buffer. Current brightness calculation
423              * is too slow, it also doesn't account for saturation effects,
424              * which is quite common since we are going over a significant
425              * range of EVs. we need figure out some reliable way to validate
426              * buffer data.
427              */
428 
429             ASSERT_EQ(OK, consumer->unlockBuffer(imgBuffer));
430             if (i > 0) {
431                 nsecs_t timeDelta =
432                         captureBurstTimes[i] - captureBurstTimes[i-1];
433                 EXPECT_GE(timeDelta * ( 1 + TOLERANCE_MARGIN_CAPTURE), exposures[i]);
434             }
435         }
436     }
437 
438     /**
439      * Intentionally shadow default CreateStream function from base class,
440      * because we don't want any test in this class to use the default
441      * stream creation function.
442      */
CreateStream()443     void CreateStream() {
444     }
445 };
446 
447 /**
448  * This test adds multiple stream use case test, basically, test 3
449  * streams:
450  *
451  * 1. Preview stream, with large size that is no bigger than 1080p
452  * we render this stream to display and vary the exposure time for
453  * for certain amount of time for visualization purpose.
454  *
455  * 2. Metering stream, with small size that is no bigger than VGA size.
456  * a burst is issued for different exposure times and analog gains
457  * (or analog gain implemented sensitivities) then check if the capture
458  * result metadata matches the request.
459  *
460  * 3. Capture stream, this is basically similar as meterting stream, but
461  * has large size, which is the largest supported JPEG capture size.
462  *
463  * This multiple stream test is to test if HAL supports:
464  *
465  * 1. Multiple streams like above, HAL should support at least 3 streams
466  * concurrently: one preview stream, 2 other YUV stream.
467  *
468  * 2. Manual control(gain/exposure) of mutiple burst capture.
469  */
470 // Disable this test for now, as we need cleanup the usage of the deprecated tag quite a bit.
TEST_F(CameraMultiStreamTest,DISABLED_MultiBurst)471 TEST_F(CameraMultiStreamTest, DISABLED_MultiBurst) {
472 
473     TEST_EXTENSION_FORKING_INIT;
474 
475     const int32_t* implDefData;
476     size_t implDefCount;
477     const int32_t* jpegData;
478     size_t jpegCount;
479     if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
480         camera_metadata_ro_entry availableProcessedSizes =
481             GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
482         ASSERT_EQ(0u, availableProcessedSizes.count % 2);
483         ASSERT_GE(availableProcessedSizes.count, 2u);
484         camera_metadata_ro_entry availableProcessedMinFrameDurations =
485             GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
486         EXPECT_EQ(availableProcessedSizes.count,
487             availableProcessedMinFrameDurations.count * 2);
488 
489         camera_metadata_ro_entry availableJpegSizes =
490             GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
491         ASSERT_EQ(0u, availableJpegSizes.count % 2);
492         ASSERT_GE(availableJpegSizes.count, 2u);
493         implDefData = availableProcessedSizes.data.i32;
494         implDefCount = availableProcessedSizes.count;
495         jpegData = availableJpegSizes.data.i32;
496         jpegCount = availableJpegSizes.count;
497     } else {
498         getResolutionList(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, &implDefData, &implDefCount);
499         ASSERT_NE(0u, implDefCount)
500             << "Missing implementation defined sizes";
501         ASSERT_EQ(0u, implDefCount % 2);
502         ASSERT_GE(implDefCount, 2u);
503 
504         getResolutionList(HAL_PIXEL_FORMAT_BLOB, &jpegData, &jpegCount);
505         ASSERT_EQ(0u, jpegCount % 2);
506         ASSERT_GE(jpegCount, 2u);
507     }
508 
509     camera_metadata_ro_entry hardwareLevel =
510         GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
511     ASSERT_EQ(1u, hardwareLevel.count);
512     uint8_t level = hardwareLevel.data.u8[0];
513     ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
514     ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
515     if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
516         const ::testing::TestInfo* const test_info =
517             ::testing::UnitTest::GetInstance()->current_test_info();
518         std::cerr << "Skipping test "
519                   << test_info->test_case_name() << "."
520                   << test_info->name()
521                   << " because HAL hardware supported level is limited "
522                   << std::endl;
523         return;
524     }
525 
526     // Find the right sizes for preview, metering, and capture streams
527     int64_t minFrameDuration = DEFAULT_FRAME_DURATION;
528     Size processedMinSize = {0, 0}, processedMaxSize = {0, 0};
529     Size jpegMaxSize = {0, 0};
530 
531     int32_t minIdx, maxIdx;
532     GetMinSize(implDefData, implDefCount, &processedMinSize, &minIdx);
533     GetMaxSize(implDefData, implDefCount, &processedMaxSize, &maxIdx);
534     ALOGV("Found processed max size: %dx%d, min size = %dx%d",
535             processedMaxSize.width, processedMaxSize.height,
536             processedMinSize.width, processedMinSize.height);
537 
538     if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
539         camera_metadata_ro_entry availableProcessedMinFrameDurations =
540             GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
541         minFrameDuration =
542             availableProcessedMinFrameDurations.data.i64[maxIdx / 2];
543     } else {
544         minFrameDuration = getMinFrameDurationFor(
545                 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
546                 processedMaxSize.width, processedMaxSize.height);
547     }
548 
549     EXPECT_GT(minFrameDuration, 0);
550 
551     if (minFrameDuration <= 0) {
552         minFrameDuration = DEFAULT_FRAME_DURATION;
553     }
554 
555     ALOGV("targeted minimal frame duration is: %" PRId64 "ns", minFrameDuration);
556 
557     GetMaxSize(jpegData, jpegCount, &jpegMaxSize, &maxIdx);
558     ALOGV("Found Jpeg size max idx = %d", maxIdx);
559 
560     // Max Jpeg size should be available in processed sizes. Use it for
561     // YUV capture anyway.
562     EXPECT_EQ(processedMaxSize.width, jpegMaxSize.width);
563     EXPECT_EQ(processedMaxSize.height, jpegMaxSize.height);
564 
565     // Cap preview size.
566     Size previewLimit = { PREVIEW_WIDTH_CAP, PREVIEW_HEIGHT_CAP };
567     // FIXME: need make sure the previewLimit is supported by HAL.
568     Size previewSize = CapSize(previewLimit, processedMaxSize);
569     // Cap Metering size.
570     Size meteringLimit = { METERING_WIDTH_CAP, METERING_HEIGHT_CAP };
571     // Cap metering size to VGA (VGA is mandatory by CDD)
572     Size meteringSize = CapSize(meteringLimit, processedMinSize);
573     // Capture stream should be the max size of jpeg sizes.
574     ALOGV("preview size: %dx%d, metering size: %dx%d, capture size: %dx%d",
575             previewSize.width, previewSize.height,
576             meteringSize.width, meteringSize.height,
577             jpegMaxSize.width, jpegMaxSize.height);
578 
579     // Create streams
580     // Preview stream: small resolution, render on the screen.
581     sp<CameraStream> previewStream;
582     {
583         sp<Surface> surface;
584         ASSERT_NO_FATAL_FAILURE(CreateOnScreenSurface(/*out*/surface));
585         previewStream = CreateStream(
586                 previewSize.width,
587                 previewSize.height,
588                 mDevice,
589                 DISPLAY_STREAM_PARAMETERS,
590                 surface,
591                 false);
592         ASSERT_NE((void*)NULL, previewStream.get());
593         ASSERT_NO_FATAL_FAILURE(previewStream->SetUp());
594     }
595     // Metering burst stream: small resolution yuv stream
596     sp<CameraStream> meteringStream =
597             CreateStream(
598                     meteringSize.width,
599                     meteringSize.height,
600                     mDevice);
601     ASSERT_NE((void*)NULL, meteringStream.get());
602     ASSERT_NO_FATAL_FAILURE(meteringStream->SetUp());
603     // Capture burst stream: full resolution yuv stream
604     sp<CameraStream> captureStream =
605             CreateStream(
606                     jpegMaxSize.width,
607                     jpegMaxSize.height,
608                     mDevice);
609     ASSERT_NE((void*)NULL, captureStream.get());
610     ASSERT_NO_FATAL_FAILURE(captureStream->SetUp());
611 
612     // Create Preview request.
613     CameraMetadata previewRequest, meteringRequest, captureRequest;
614     ASSERT_NO_FATAL_FAILURE(CreateRequests(previewRequest, meteringRequest,
615             captureRequest, previewStream->GetStreamId(),
616             meteringStream->GetStreamId(), captureStream->GetStreamId()));
617 
618     // Start preview
619     if (CAMERA_MULTI_STREAM_DEBUGGING) {
620         previewRequest.dump(STDOUT_FILENO);
621     }
622 
623     // Generate exposure and sensitivity lists
624     camera_metadata_ro_entry exposureTimeRange =
625         GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
626     ASSERT_EQ(exposureTimeRange.count, 2u);
627     int64_t minExp = exposureTimeRange.data.i64[0];
628     int64_t maxExp = exposureTimeRange.data.i64[1];
629     ASSERT_GT(maxExp, minExp);
630 
631     camera_metadata_ro_entry sensivityRange =
632         GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
633     ASSERT_EQ(2u, sensivityRange.count);
634     int32_t minSensitivity = sensivityRange.data.i32[0];
635     int32_t maxSensitivity = sensivityRange.data.i32[1];
636     camera_metadata_ro_entry maxAnalogSenEntry =
637             GetStaticEntry(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY);
638     EXPECT_EQ(1u, maxAnalogSenEntry.count);
639     int32_t maxAnalogSensitivity = maxAnalogSenEntry.data.i32[0];
640     EXPECT_LE(maxAnalogSensitivity, maxSensitivity);
641     // Only test the sensitivity implemented by analog gain.
642     if (maxAnalogSensitivity > maxSensitivity) {
643         // Fallback to maxSensitity
644         maxAnalogSensitivity = maxSensitivity;
645     }
646 
647     // sensitivity list, only include the sensitivities that are implemented
648     // purely by analog gain if possible.
649     Vector<int32_t> sensitivities;
650     Vector<int64_t> exposures;
651     size_t count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
652     sensitivities.push_back(minSensitivity);
653     for (size_t i = 1; i < count; i++) {
654         sensitivities.push_back(minSensitivity + i * 100);
655     }
656     sensitivities.push_back(maxAnalogSensitivity);
657     ALOGV("Sensitivity Range: min=%d, max=%d", minSensitivity,
658             maxAnalogSensitivity);
659     int64_t exp = minExp;
660     while (exp < maxExp) {
661         exposures.push_back(exp);
662         exp *= 2;
663     }
664     // Sweep the exposure value for preview, just for visual inspection purpose.
665     uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
666     for (size_t i = 0; i < exposures.size(); i++) {
667         ASSERT_EQ(OK, previewRequest.update(
668                 ANDROID_CONTROL_MODE,
669                 &cmOff, 1));
670         ASSERT_EQ(OK, previewRequest.update(
671                 ANDROID_SENSOR_EXPOSURE_TIME,
672                 &exposures[i], 1));
673         ALOGV("Submitting preview request %zu with exposure %" PRId64,
674                 i, exposures[i]);
675 
676         ASSERT_EQ(OK, mDevice->setStreamingRequest(previewRequest));
677 
678         // Let preview run 200ms on screen for each exposure time.
679         usleep(PREVIEW_RENDERING_TIME_INTERVAL);
680     }
681 
682     size_t requestCount = sensitivities.size();
683     if (requestCount > exposures.size()) {
684         requestCount = exposures.size();
685     }
686 
687     // To maintain the request id uniqueness (preview request id is 0), make burst capture start
688     // request id 1 here.
689     int32_t requestIdStart = 1;
690     /**
691      * Submit metering request, set default frame duration to minimal possible
692      * value, we want the capture to run as fast as possible. HAL should adjust
693      * the frame duration to minimal necessary value to support the requested
694      * exposure value if exposure is larger than frame duration.
695      */
696     CaptureBurst(meteringRequest, requestCount, exposures, sensitivities,
697             meteringStream, minFrameDuration, &requestIdStart);
698 
699     /**
700      * Submit capture request, set default frame duration to minimal possible
701      * value, we want the capture to run as fast as possible. HAL should adjust
702      * the frame duration to minimal necessary value to support the requested
703      * exposure value if exposure is larger than frame duration.
704      */
705     CaptureBurst(captureRequest, requestCount, exposures, sensitivities,
706             captureStream, minFrameDuration, &requestIdStart);
707 
708     ASSERT_EQ(OK, mDevice->clearStreamingRequest());
709 }
710 
711 }
712 }
713 }
714