1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2_test"
18 //#define LOG_NDEBUG 0
19 
20 #include <utils/Log.h>
21 #include <gtest/gtest.h>
22 #include <iostream>
23 #include <fstream>
24 
25 #include <utils/Vector.h>
26 #include <utils/KeyedVector.h>
27 #include <gui/CpuConsumer.h>
28 #include <ui/PixelFormat.h>
29 #include <system/camera_metadata.h>
30 
31 #include "camera2_utils.h"
32 #include "TestExtensions.h"
33 
34 namespace android {
35 namespace camera2 {
36 namespace tests {
37 
38 class Camera2Test: public testing::Test {
39   public:
SetUpModule()40     void SetUpModule() {
41         int res;
42 
43         hw_module_t *module = NULL;
44         res = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
45                 (const hw_module_t **)&module);
46 
47         ASSERT_EQ(0, res)
48                 << "Failure opening camera hardware module: " << res;
49         ASSERT_TRUE(NULL != module)
50                 << "No camera module was set by hw_get_module";
51 
52         IF_ALOGV() {
53             std::cout << "  Camera module name: "
54                     << module->name << std::endl;
55             std::cout << "  Camera module author: "
56                     << module->author << std::endl;
57             std::cout << "  Camera module API version: 0x" << std::hex
58                     << module->module_api_version << std::endl;
59             std::cout << "  Camera module HAL API version: 0x" << std::hex
60                     << module->hal_api_version << std::endl;
61         }
62 
63         int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
64         ASSERT_LE(version2_0, module->module_api_version)
65                 << "Camera module version is 0x"
66                 << std::hex << module->module_api_version
67                 << ", should be at least 2.0. (0x"
68                 << std::hex << CAMERA_MODULE_API_VERSION_2_0 << ")";
69 
70         sCameraModule = reinterpret_cast<camera_module_t*>(module);
71 
72         sNumCameras = sCameraModule->get_number_of_cameras();
73         ASSERT_LT(0, sNumCameras) << "No camera devices available!";
74 
75         IF_ALOGV() {
76             std::cout << "  Camera device count: " << sNumCameras << std::endl;
77         }
78 
79         sCameraSupportsHal2 = new bool[sNumCameras];
80 
81         for (int i = 0; i < sNumCameras; i++) {
82             camera_info info;
83             res = sCameraModule->get_camera_info(i, &info);
84             ASSERT_EQ(0, res)
85                     << "Failure getting camera info for camera " << i;
86             IF_ALOGV() {
87                 std::cout << "  Camera device: " << std::dec
88                           << i << std::endl;;
89                 std::cout << "    Facing: " << std::dec
90                           << info.facing  << std::endl;
91                 std::cout << "    Orientation: " << std::dec
92                           << info.orientation  << std::endl;
93                 std::cout << "    Version: 0x" << std::hex <<
94                         info.device_version  << std::endl;
95             }
96             if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0 &&
97                     info.device_version < CAMERA_DEVICE_API_VERSION_3_0) {
98                 sCameraSupportsHal2[i] = true;
99                 ASSERT_TRUE(NULL != info.static_camera_characteristics);
100                 IF_ALOGV() {
101                     std::cout << "    Static camera metadata:"  << std::endl;
102                     dump_indented_camera_metadata(info.static_camera_characteristics,
103                             0, 1, 6);
104                 }
105             } else {
106                 sCameraSupportsHal2[i] = false;
107             }
108         }
109     }
110 
TearDownModule()111     void TearDownModule() {
112         hw_module_t *module = reinterpret_cast<hw_module_t*>(sCameraModule);
113         ASSERT_EQ(0, HWModuleHelpers::closeModule(module));
114     }
115 
getCameraModule()116     static const camera_module_t *getCameraModule() {
117         return sCameraModule;
118     }
119 
getNumCameras()120     static int getNumCameras() {
121         return sNumCameras;
122     }
123 
isHal2Supported(int id)124     static bool isHal2Supported(int id) {
125         return sCameraSupportsHal2[id];
126     }
127 
openCameraDevice(int id)128     static camera2_device_t *openCameraDevice(int id) {
129         ALOGV("Opening camera %d", id);
130         if (NULL == sCameraSupportsHal2) return NULL;
131         if (id >= sNumCameras) return NULL;
132         if (!sCameraSupportsHal2[id]) return NULL;
133 
134         hw_device_t *device = NULL;
135         const camera_module_t *cam_module = getCameraModule();
136         if (cam_module == NULL) {
137             return NULL;
138         }
139 
140         char camId[10];
141         int res;
142 
143         snprintf(camId, 10, "%d", id);
144         res = cam_module->common.methods->open(
145             (const hw_module_t*)cam_module,
146             camId,
147             &device);
148         if (res != NO_ERROR || device == NULL) {
149             return NULL;
150         }
151         camera2_device_t *cam_device =
152                 reinterpret_cast<camera2_device_t*>(device);
153         return cam_device;
154     }
155 
configureCameraDevice(camera2_device_t * dev,MetadataQueue & requestQueue,MetadataQueue & frameQueue,NotifierListener & listener)156     static status_t configureCameraDevice(camera2_device_t *dev,
157             MetadataQueue &requestQueue,
158             MetadataQueue  &frameQueue,
159             NotifierListener &listener) {
160 
161         status_t err;
162 
163         err = dev->ops->set_request_queue_src_ops(dev,
164                 requestQueue.getToConsumerInterface());
165         if (err != OK) return err;
166 
167         requestQueue.setFromConsumerInterface(dev);
168 
169         err = dev->ops->set_frame_queue_dst_ops(dev,
170                 frameQueue.getToProducerInterface());
171         if (err != OK) return err;
172 
173         err = listener.getNotificationsFrom(dev);
174         if (err != OK) return err;
175 
176         return OK;
177     }
178 
closeCameraDevice(camera2_device_t ** cam_dev)179     static status_t closeCameraDevice(camera2_device_t **cam_dev) {
180         int res;
181         if (*cam_dev == NULL ) return OK;
182 
183         ALOGV("Closing camera %p", cam_dev);
184 
185         hw_device_t *dev = reinterpret_cast<hw_device_t *>(*cam_dev);
186         res = dev->close(dev);
187         *cam_dev = NULL;
188         return res;
189     }
190 
setUpCamera(int id)191     void setUpCamera(int id) {
192         ASSERT_GT(sNumCameras, id);
193         status_t res;
194 
195         if (mDevice != NULL) {
196             closeCameraDevice(&mDevice);
197         }
198         mId = id;
199         mDevice = openCameraDevice(mId);
200         ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
201 
202         camera_info info;
203         res = sCameraModule->get_camera_info(id, &info);
204         ASSERT_EQ(OK, res);
205 
206         mDeviceVersion = info.device_version;
207         mStaticInfo = info.static_camera_characteristics;
208         buildOutputResolutions();
209 
210         res = configureCameraDevice(mDevice,
211                 mRequests,
212                 mFrames,
213                 mNotifications);
214         ASSERT_EQ(OK, res) << "Failure to configure camera device";
215 
216     }
217 
setUpStream(sp<IGraphicBufferProducer> consumer,int width,int height,int format,int * id)218     void setUpStream(sp<IGraphicBufferProducer> consumer,
219             int width, int height, int format, int *id) {
220         status_t res;
221 
222         StreamAdapter* stream = new StreamAdapter(consumer);
223 
224         ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
225         res = stream->connectToDevice(mDevice, width, height, format);
226         ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
227                                  << strerror(-res);
228         mStreams.push_back(stream);
229 
230         *id = stream->getId();
231     }
232 
disconnectStream(int id)233     void disconnectStream(int id) {
234         status_t res;
235         unsigned int i=0;
236         for (; i < mStreams.size(); i++) {
237             if (mStreams[i]->getId() == id) {
238                 res = mStreams[i]->disconnect();
239                 ASSERT_EQ(NO_ERROR, res) <<
240                         "Failed to disconnect stream " << id;
241                 break;
242             }
243         }
244         ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
245     }
246 
buildOutputResolutions()247     void buildOutputResolutions() {
248         status_t res;
249         if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
250             return;
251         }
252         if (mOutputResolutions.isEmpty()) {
253             camera_metadata_ro_entry_t availableStrmConfigs;
254             res = find_camera_metadata_ro_entry(mStaticInfo,
255                     ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
256                     &availableStrmConfigs);
257             ASSERT_EQ(OK, res);
258             ASSERT_EQ(0u, availableStrmConfigs.count % 4);
259             for (uint32_t i = 0; i < availableStrmConfigs.count; i += 4) {
260                 int32_t format = availableStrmConfigs.data.i32[i];
261                 int32_t width = availableStrmConfigs.data.i32[i + 1];
262                 int32_t height = availableStrmConfigs.data.i32[i + 2];
263                 int32_t inOrOut = availableStrmConfigs.data.i32[i + 3];
264                 if (inOrOut == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
265                     int index = mOutputResolutions.indexOfKey(format);
266                     if (index < 0) {
267                         index = mOutputResolutions.add(format, new Vector<int32_t>());
268                         ASSERT_TRUE(index >= 0);
269                     }
270                     Vector<int32_t> *resolutions = mOutputResolutions.editValueAt(index);
271                     resolutions->add(width);
272                     resolutions->add(height);
273                 }
274             }
275         }
276     }
277 
deleteOutputResolutions()278     void deleteOutputResolutions() {
279         for (uint32_t i = 0; i < mOutputResolutions.size(); i++) {
280             Vector<int32_t>* resolutions = mOutputResolutions.editValueAt(i);
281             delete resolutions;
282         }
283         mOutputResolutions.clear();
284     }
285 
getResolutionList(int32_t format,const int32_t ** list,size_t * count)286     void getResolutionList(int32_t format,
287             const int32_t **list,
288             size_t *count) {
289         status_t res;
290         ALOGV("Getting resolutions for format %x", format);
291         if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
292             if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
293                 camera_metadata_ro_entry_t availableFormats;
294                 res = find_camera_metadata_ro_entry(mStaticInfo,
295                         ANDROID_SCALER_AVAILABLE_FORMATS,
296                         &availableFormats);
297                 ASSERT_EQ(OK, res);
298 
299                 uint32_t formatIdx;
300                 for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
301                     if (availableFormats.data.i32[formatIdx] == format) break;
302                 }
303                 ASSERT_NE(availableFormats.count, formatIdx)
304                     << "No support found for format 0x" << std::hex << format;
305             }
306 
307             camera_metadata_ro_entry_t availableSizes;
308             if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
309                 res = find_camera_metadata_ro_entry(mStaticInfo,
310                         ANDROID_SCALER_AVAILABLE_RAW_SIZES,
311                         &availableSizes);
312             } else if (format == HAL_PIXEL_FORMAT_BLOB) {
313                 res = find_camera_metadata_ro_entry(mStaticInfo,
314                         ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
315                         &availableSizes);
316             } else {
317                 res = find_camera_metadata_ro_entry(mStaticInfo,
318                         ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
319                         &availableSizes);
320             }
321             ASSERT_EQ(OK, res);
322 
323             *list = availableSizes.data.i32;
324             *count = availableSizes.count;
325         } else {
326             int index = mOutputResolutions.indexOfKey(format);
327             ASSERT_TRUE(index >= 0);
328             Vector<int32_t>* resolutions = mOutputResolutions.valueAt(index);
329             *list = resolutions->array();
330             *count = resolutions->size();
331         }
332     }
333 
waitUntilDrained()334     status_t waitUntilDrained() {
335         static const uint32_t kSleepTime = 50000; // 50 ms
336         static const uint32_t kMaxSleepTime = 10000000; // 10 s
337         ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
338 
339         // TODO: Set up notifications from HAL, instead of sleeping here
340         uint32_t totalTime = 0;
341         while (mDevice->ops->get_in_progress_count(mDevice) > 0) {
342             usleep(kSleepTime);
343             totalTime += kSleepTime;
344             if (totalTime > kMaxSleepTime) {
345                 ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
346                         mDevice->ops->get_in_progress_count(mDevice), totalTime);
347                 return TIMED_OUT;
348             }
349         }
350         ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
351         return OK;
352     }
353 
SetUp()354     virtual void SetUp() {
355         TEST_EXTENSION_FORKING_SET_UP;
356 
357         SetUpModule();
358 
359         const ::testing::TestInfo* const testInfo =
360                 ::testing::UnitTest::GetInstance()->current_test_info();
361         (void)testInfo;
362 
363         ALOGV("*** Starting test %s in test case %s", testInfo->name(),
364               testInfo->test_case_name());
365         mDevice = NULL;
366     }
367 
TearDown()368     virtual void TearDown() {
369         TEST_EXTENSION_FORKING_TEAR_DOWN;
370 
371         for (unsigned int i = 0; i < mStreams.size(); i++) {
372             delete mStreams[i];
373         }
374         if (mDevice != NULL) {
375             closeCameraDevice(&mDevice);
376         }
377 
378         deleteOutputResolutions();
379         TearDownModule();
380     }
381 
382     int mId;
383     camera2_device    *mDevice;
384     uint32_t mDeviceVersion;
385     const camera_metadata_t *mStaticInfo;
386 
387     MetadataQueue    mRequests;
388     MetadataQueue    mFrames;
389     NotifierListener mNotifications;
390 
391     Vector<StreamAdapter*> mStreams;
392     KeyedVector<int32_t, Vector<int32_t>* > mOutputResolutions;
393 
394   private:
395     static camera_module_t *sCameraModule;
396     static int              sNumCameras;
397     static bool            *sCameraSupportsHal2;
398 
399 };
400 
401 camera_module_t *Camera2Test::sCameraModule = NULL;
402 bool *Camera2Test::sCameraSupportsHal2      = NULL;
403 int Camera2Test::sNumCameras                = 0;
404 
405 static const nsecs_t USEC = 1000;
406 static const nsecs_t MSEC = 1000*USEC;
407 static const nsecs_t SEC = 1000*MSEC;
408 
409 
TEST_F(Camera2Test,OpenClose)410 TEST_F(Camera2Test, OpenClose) {
411 
412     TEST_EXTENSION_FORKING_INIT;
413 
414     status_t res;
415 
416     for (int id = 0; id < getNumCameras(); id++) {
417         if (!isHal2Supported(id)) continue;
418 
419         camera2_device_t *d = openCameraDevice(id);
420         ASSERT_TRUE(NULL != d) << "Failed to open camera device";
421 
422         res = closeCameraDevice(&d);
423         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
424     }
425 }
426 
TEST_F(Camera2Test,Capture1Raw)427 TEST_F(Camera2Test, Capture1Raw) {
428 
429     TEST_EXTENSION_FORKING_INIT;
430 
431     status_t res;
432 
433     for (int id = 0; id < getNumCameras(); id++) {
434         if (!isHal2Supported(id)) continue;
435 
436         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
437 
438         sp<IGraphicBufferProducer> bqProducer;
439         sp<IGraphicBufferConsumer> bqConsumer;
440         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
441         sp<CpuConsumer> rawConsumer = new CpuConsumer(bqConsumer, 1);
442         sp<FrameWaiter> rawWaiter = new FrameWaiter();
443         rawConsumer->setFrameAvailableListener(rawWaiter);
444 
445         const int32_t *rawResolutions;
446         size_t   rawResolutionsCount;
447 
448         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
449 
450         getResolutionList(format,
451                 &rawResolutions, &rawResolutionsCount);
452 
453         if (rawResolutionsCount <= 0) {
454             const ::testing::TestInfo* const test_info =
455                 ::testing::UnitTest::GetInstance()->current_test_info();
456             std::cerr << "Skipping test "
457                       << test_info->test_case_name() << "."
458                       << test_info->name()
459                       << " because the optional format was not available: "
460                       << "RAW_SENSOR" << std::endl;
461             return;
462         }
463 
464         ASSERT_LT((size_t)0, rawResolutionsCount);
465 
466         // Pick first available raw resolution
467         int width = rawResolutions[0];
468         int height = rawResolutions[1];
469 
470         int streamId;
471         ASSERT_NO_FATAL_FAILURE(
472             setUpStream(bqProducer, width, height, format, &streamId) );
473 
474         camera_metadata_t *request;
475         request = allocate_camera_metadata(20, 2000);
476 
477         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
478         add_camera_metadata_entry(request,
479                 ANDROID_REQUEST_METADATA_MODE,
480                 (void**)&metadataMode, 1);
481         uint32_t outputStreams = streamId;
482         add_camera_metadata_entry(request,
483                 ANDROID_REQUEST_OUTPUT_STREAMS,
484                 (void**)&outputStreams, 1);
485 
486         uint64_t exposureTime = 10*MSEC;
487         add_camera_metadata_entry(request,
488                 ANDROID_SENSOR_EXPOSURE_TIME,
489                 (void**)&exposureTime, 1);
490         uint64_t frameDuration = 30*MSEC;
491         add_camera_metadata_entry(request,
492                 ANDROID_SENSOR_FRAME_DURATION,
493                 (void**)&frameDuration, 1);
494         uint32_t sensitivity = 100;
495         add_camera_metadata_entry(request,
496                 ANDROID_SENSOR_SENSITIVITY,
497                 (void**)&sensitivity, 1);
498         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
499         add_camera_metadata_entry(request,
500                 ANDROID_REQUEST_TYPE,
501                 (void**)&requestType, 1);
502 
503         uint32_t hourOfDay = 12;
504         add_camera_metadata_entry(request,
505                 0x80000000, // EMULATOR_HOUROFDAY
506                 &hourOfDay, 1);
507 
508         IF_ALOGV() {
509             std::cout << "Input request: " << std::endl;
510             dump_indented_camera_metadata(request, 0, 1, 2);
511         }
512 
513         res = mRequests.enqueue(request);
514         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
515 
516         res = mFrames.waitForBuffer(exposureTime + SEC);
517         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
518 
519         camera_metadata_t *frame;
520         res = mFrames.dequeue(&frame);
521         ASSERT_EQ(NO_ERROR, res);
522         ASSERT_TRUE(frame != NULL);
523 
524         IF_ALOGV() {
525             std::cout << "Output frame:" << std::endl;
526             dump_indented_camera_metadata(frame, 0, 1, 2);
527         }
528 
529         res = rawWaiter->waitForFrame(exposureTime + SEC);
530         ASSERT_EQ(NO_ERROR, res);
531 
532         CpuConsumer::LockedBuffer buffer;
533         res = rawConsumer->lockNextBuffer(&buffer);
534         ASSERT_EQ(NO_ERROR, res);
535 
536         IF_ALOGV() {
537             const char *dumpname =
538                     "/data/local/tmp/camera2_test-capture1raw-dump.raw";
539             ALOGV("Dumping raw buffer to %s", dumpname);
540             // Write to file
541             std::ofstream rawFile(dumpname);
542             size_t bpp = 2;
543             for (unsigned int y = 0; y < buffer.height; y++) {
544                 rawFile.write(
545                         (const char *)(buffer.data + y * buffer.stride * bpp),
546                         buffer.width * bpp);
547             }
548             rawFile.close();
549         }
550 
551         res = rawConsumer->unlockBuffer(buffer);
552         ASSERT_EQ(NO_ERROR, res);
553 
554         ASSERT_EQ(OK, waitUntilDrained());
555         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
556 
557         res = closeCameraDevice(&mDevice);
558         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
559 
560     }
561 }
562 
TEST_F(Camera2Test,CaptureBurstRaw)563 TEST_F(Camera2Test, CaptureBurstRaw) {
564 
565     TEST_EXTENSION_FORKING_INIT;
566 
567     status_t res;
568 
569     for (int id = 0; id < getNumCameras(); id++) {
570         if (!isHal2Supported(id)) continue;
571 
572         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
573 
574         sp<IGraphicBufferProducer> bqProducer;
575         sp<IGraphicBufferConsumer> bqConsumer;
576         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
577         sp<CpuConsumer> rawConsumer = new CpuConsumer(bqConsumer, 1);
578         sp<FrameWaiter> rawWaiter = new FrameWaiter();
579         rawConsumer->setFrameAvailableListener(rawWaiter);
580 
581         const int32_t *rawResolutions;
582         size_t    rawResolutionsCount;
583 
584         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
585 
586         getResolutionList(format,
587                 &rawResolutions, &rawResolutionsCount);
588 
589         if (rawResolutionsCount <= 0) {
590             const ::testing::TestInfo* const test_info =
591                 ::testing::UnitTest::GetInstance()->current_test_info();
592             std::cerr << "Skipping test "
593                       << test_info->test_case_name() << "."
594                       << test_info->name()
595                       << " because the optional format was not available: "
596                       << "RAW_SENSOR" << std::endl;
597             return;
598         }
599 
600         ASSERT_LT((uint32_t)0, rawResolutionsCount);
601 
602         // Pick first available raw resolution
603         int width = rawResolutions[0];
604         int height = rawResolutions[1];
605 
606         int streamId;
607         ASSERT_NO_FATAL_FAILURE(
608             setUpStream(bqProducer, width, height, format, &streamId) );
609 
610         camera_metadata_t *request;
611         request = allocate_camera_metadata(20, 2000);
612 
613         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
614         add_camera_metadata_entry(request,
615                 ANDROID_REQUEST_METADATA_MODE,
616                 (void**)&metadataMode, 1);
617         uint32_t outputStreams = streamId;
618         add_camera_metadata_entry(request,
619                 ANDROID_REQUEST_OUTPUT_STREAMS,
620                 (void**)&outputStreams, 1);
621 
622         uint64_t frameDuration = 30*MSEC;
623         add_camera_metadata_entry(request,
624                 ANDROID_SENSOR_FRAME_DURATION,
625                 (void**)&frameDuration, 1);
626         uint32_t sensitivity = 100;
627         add_camera_metadata_entry(request,
628                 ANDROID_SENSOR_SENSITIVITY,
629                 (void**)&sensitivity, 1);
630         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
631         add_camera_metadata_entry(request,
632                 ANDROID_REQUEST_TYPE,
633                 (void**)&requestType, 1);
634 
635         uint32_t hourOfDay = 12;
636         add_camera_metadata_entry(request,
637                 0x80000000, // EMULATOR_HOUROFDAY
638                 &hourOfDay, 1);
639 
640         IF_ALOGV() {
641             std::cout << "Input request template: " << std::endl;
642             dump_indented_camera_metadata(request, 0, 1, 2);
643         }
644 
645         int numCaptures = 10;
646 
647         // Enqueue numCaptures requests with increasing exposure time
648 
649         uint64_t exposureTime = 100 * USEC;
650         for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
651             camera_metadata_t *req;
652             req = allocate_camera_metadata(20, 2000);
653             append_camera_metadata(req, request);
654 
655             add_camera_metadata_entry(req,
656                     ANDROID_SENSOR_EXPOSURE_TIME,
657                     (void**)&exposureTime, 1);
658             exposureTime *= 2;
659 
660             res = mRequests.enqueue(req);
661             ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
662                     << strerror(-res);
663         }
664 
665         // Get frames and image buffers one by one
666         uint64_t expectedExposureTime = 100 * USEC;
667         for (int frameCount = 0; frameCount < 10; frameCount++) {
668             res = mFrames.waitForBuffer(SEC + expectedExposureTime);
669             ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
670 
671             camera_metadata_t *frame;
672             res = mFrames.dequeue(&frame);
673             ASSERT_EQ(NO_ERROR, res);
674             ASSERT_TRUE(frame != NULL);
675 
676             camera_metadata_entry_t frameNumber;
677             res = find_camera_metadata_entry(frame,
678                     ANDROID_REQUEST_FRAME_COUNT,
679                     &frameNumber);
680             ASSERT_EQ(NO_ERROR, res);
681             ASSERT_EQ(frameCount, *frameNumber.data.i32);
682 
683             res = rawWaiter->waitForFrame(SEC + expectedExposureTime);
684             ASSERT_EQ(NO_ERROR, res) <<
685                     "Never got raw data for capture " << frameCount;
686 
687             CpuConsumer::LockedBuffer buffer;
688             res = rawConsumer->lockNextBuffer(&buffer);
689             ASSERT_EQ(NO_ERROR, res);
690 
691             IF_ALOGV() {
692                 char dumpname[60];
693                 snprintf(dumpname, 60,
694                         "/data/local/tmp/camera2_test-"
695                         "captureBurstRaw-dump_%d.raw",
696                         frameCount);
697                 ALOGV("Dumping raw buffer to %s", dumpname);
698                 // Write to file
699                 std::ofstream rawFile(dumpname);
700                 for (unsigned int y = 0; y < buffer.height; y++) {
701                     rawFile.write(
702                             (const char *)(buffer.data + y * buffer.stride * 2),
703                             buffer.width * 2);
704                 }
705                 rawFile.close();
706             }
707 
708             res = rawConsumer->unlockBuffer(buffer);
709             ASSERT_EQ(NO_ERROR, res);
710 
711             expectedExposureTime *= 2;
712         }
713     }
714 }
715 
TEST_F(Camera2Test,ConstructDefaultRequests)716 TEST_F(Camera2Test, ConstructDefaultRequests) {
717 
718     TEST_EXTENSION_FORKING_INIT;
719 
720     status_t res;
721 
722     for (int id = 0; id < getNumCameras(); id++) {
723         if (!isHal2Supported(id)) continue;
724 
725         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
726 
727         for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT;
728              i++) {
729             camera_metadata_t *request = NULL;
730             res = mDevice->ops->construct_default_request(mDevice,
731                     i,
732                     &request);
733             EXPECT_EQ(NO_ERROR, res) <<
734                     "Unable to construct request from template type " << i;
735             EXPECT_TRUE(request != NULL);
736             EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request));
737             EXPECT_LT((size_t)0, get_camera_metadata_data_count(request));
738 
739             IF_ALOGV() {
740                 std::cout << "  ** Template type " << i << ":"<<std::endl;
741                 dump_indented_camera_metadata(request, 0, 2, 4);
742             }
743 
744             free_camera_metadata(request);
745         }
746     }
747 }
748 
TEST_F(Camera2Test,Capture1Jpeg)749 TEST_F(Camera2Test, Capture1Jpeg) {
750     status_t res;
751 
752     for (int id = 0; id < getNumCameras(); id++) {
753         if (!isHal2Supported(id)) continue;
754 
755         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
756 
757         sp<IGraphicBufferProducer> bqProducer;
758         sp<IGraphicBufferConsumer> bqConsumer;
759         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
760         sp<CpuConsumer> jpegConsumer = new CpuConsumer(bqConsumer, 1);
761         sp<FrameWaiter> jpegWaiter = new FrameWaiter();
762         jpegConsumer->setFrameAvailableListener(jpegWaiter);
763 
764         const int32_t *jpegResolutions;
765         size_t   jpegResolutionsCount;
766 
767         int format = HAL_PIXEL_FORMAT_BLOB;
768 
769         getResolutionList(format,
770                 &jpegResolutions, &jpegResolutionsCount);
771         ASSERT_LT((size_t)0, jpegResolutionsCount);
772 
773         // Pick first available JPEG resolution
774         int width = jpegResolutions[0];
775         int height = jpegResolutions[1];
776 
777         int streamId;
778         ASSERT_NO_FATAL_FAILURE(
779             setUpStream(bqProducer, width, height, format, &streamId) );
780 
781         camera_metadata_t *request;
782         request = allocate_camera_metadata(20, 2000);
783 
784         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
785         add_camera_metadata_entry(request,
786                 ANDROID_REQUEST_METADATA_MODE,
787                 (void**)&metadataMode, 1);
788         uint32_t outputStreams = streamId;
789         add_camera_metadata_entry(request,
790                 ANDROID_REQUEST_OUTPUT_STREAMS,
791                 (void**)&outputStreams, 1);
792 
793         uint64_t exposureTime = 10*MSEC;
794         add_camera_metadata_entry(request,
795                 ANDROID_SENSOR_EXPOSURE_TIME,
796                 (void**)&exposureTime, 1);
797         uint64_t frameDuration = 30*MSEC;
798         add_camera_metadata_entry(request,
799                 ANDROID_SENSOR_FRAME_DURATION,
800                 (void**)&frameDuration, 1);
801         uint32_t sensitivity = 100;
802         add_camera_metadata_entry(request,
803                 ANDROID_SENSOR_SENSITIVITY,
804                 (void**)&sensitivity, 1);
805         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
806         add_camera_metadata_entry(request,
807                 ANDROID_REQUEST_TYPE,
808                 (void**)&requestType, 1);
809 
810         uint32_t hourOfDay = 12;
811         add_camera_metadata_entry(request,
812                 0x80000000, // EMULATOR_HOUROFDAY
813                 &hourOfDay, 1);
814 
815         IF_ALOGV() {
816             std::cout << "Input request: " << std::endl;
817             dump_indented_camera_metadata(request, 0, 1, 4);
818         }
819 
820         res = mRequests.enqueue(request);
821         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
822 
823         res = mFrames.waitForBuffer(exposureTime + SEC);
824         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
825 
826         camera_metadata_t *frame;
827         res = mFrames.dequeue(&frame);
828         ASSERT_EQ(NO_ERROR, res);
829         ASSERT_TRUE(frame != NULL);
830 
831         IF_ALOGV() {
832             std::cout << "Output frame:" << std::endl;
833             dump_indented_camera_metadata(frame, 0, 1, 4);
834         }
835 
836         res = jpegWaiter->waitForFrame(exposureTime + SEC);
837         ASSERT_EQ(NO_ERROR, res);
838 
839         CpuConsumer::LockedBuffer buffer;
840         res = jpegConsumer->lockNextBuffer(&buffer);
841         ASSERT_EQ(NO_ERROR, res);
842 
843         IF_ALOGV() {
844             const char *dumpname =
845                     "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg";
846             ALOGV("Dumping raw buffer to %s", dumpname);
847             // Write to file
848             std::ofstream jpegFile(dumpname);
849             size_t bpp = 1;
850             for (unsigned int y = 0; y < buffer.height; y++) {
851                 jpegFile.write(
852                         (const char *)(buffer.data + y * buffer.stride * bpp),
853                         buffer.width * bpp);
854             }
855             jpegFile.close();
856         }
857 
858         res = jpegConsumer->unlockBuffer(buffer);
859         ASSERT_EQ(NO_ERROR, res);
860 
861         ASSERT_EQ(OK, waitUntilDrained());
862         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
863 
864         res = closeCameraDevice(&mDevice);
865         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
866 
867     }
868 }
869 
870 } // namespace tests
871 } // namespace camera2
872 } // namespace android
873