1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VtsHalEvsTest"
18 
19 
20 // These values are called out in the EVS design doc (as of Mar 8, 2017)
21 static const int kMaxStreamStartMilliseconds = 500;
22 static const int kMinimumFramesPerSecond = 10;
23 
24 static const int kSecondsToMilliseconds = 1000;
25 static const int kMillisecondsToMicroseconds = 1000;
26 static const float kNanoToMilliseconds = 0.000001f;
27 static const float kNanoToSeconds = 0.000000001f;
28 
29 
30 #include "FrameHandler.h"
31 #include "FrameHandlerUltrasonics.h"
32 
33 #include <cstdio>
34 #include <cstring>
35 #include <cstdlib>
36 #include <thread>
37 #include <unordered_set>
38 
39 #include <hidl/HidlTransportSupport.h>
40 #include <hwbinder/ProcessState.h>
41 #include <utils/Errors.h>
42 #include <utils/StrongPointer.h>
43 
44 #include <android-base/logging.h>
45 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
46 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
47 #include <android/hardware/automotive/evs/1.1/IEvsDisplay.h>
48 #include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
49 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
50 #include <system/camera_metadata.h>
51 #include <ui/DisplayMode.h>
52 #include <ui/DisplayState.h>
53 #include <ui/GraphicBuffer.h>
54 #include <ui/GraphicBufferAllocator.h>
55 
56 #include <gtest/gtest.h>
57 #include <hidl/GtestPrinter.h>
58 #include <hidl/ServiceManagement.h>
59 
60 using namespace ::android::hardware::automotive::evs::V1_1;
61 using namespace std::chrono_literals;
62 
63 using ::android::hardware::Return;
64 using ::android::hardware::Void;
65 using ::android::hardware::hidl_vec;
66 using ::android::hardware::hidl_handle;
67 using ::android::hardware::hidl_string;
68 using ::android::sp;
69 using ::android::wp;
70 using ::android::hardware::camera::device::V3_2::Stream;
71 using ::android::hardware::automotive::evs::V1_1::BufferDesc;
72 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
73 using ::android::hardware::automotive::evs::V1_0::DisplayState;
74 using ::android::hardware::graphics::common::V1_0::PixelFormat;
75 using ::android::frameworks::automotive::display::V1_0::HwDisplayConfig;
76 using ::android::frameworks::automotive::display::V1_0::HwDisplayState;
77 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
78 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
79 using IEvsDisplay_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
80 using IEvsDisplay_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsDisplay;
81 
82 namespace {
83 
84 /*
85  * Plese note that this is different from what is defined in
86  * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
87  * field to store a framerate.
88  */
89 typedef struct {
90     int32_t id;
91     int32_t width;
92     int32_t height;
93     int32_t format;
94     int32_t direction;
95     int32_t framerate;
96 } RawStreamConfig;
97 constexpr const size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
98 
99 } // anonymous namespace
100 
101 
102 // The main test class for EVS
103 class EvsHidlTest : public ::testing::TestWithParam<std::string> {
104 public:
SetUp()105     virtual void SetUp() override {
106         // Make sure we can connect to the enumerator
107         std::string service_name = GetParam();
108         pEnumerator = IEvsEnumerator::getService(service_name);
109         ASSERT_NE(pEnumerator.get(), nullptr);
110         LOG(INFO) << "Test target service: " << service_name;
111 
112         mIsHwModule = pEnumerator->isHardware();
113     }
114 
TearDown()115     virtual void TearDown() override {
116         // Attempt to close any active camera
117         for (auto &&cam : activeCameras) {
118             if (cam != nullptr) {
119                 pEnumerator->closeCamera(cam);
120             }
121         }
122         activeCameras.clear();
123     }
124 
125 protected:
loadCameraList()126     void loadCameraList() {
127         // SetUp() must run first!
128         assert(pEnumerator != nullptr);
129 
130         // Get the camera list
131         pEnumerator->getCameraList_1_1(
132             [this](hidl_vec <CameraDesc> cameraList) {
133                 LOG(INFO) << "Camera list callback received "
134                           << cameraList.size()
135                           << " cameras";
136                 cameraInfo.reserve(cameraList.size());
137                 for (auto&& cam: cameraList) {
138                     LOG(INFO) << "Found camera " << cam.v1.cameraId;
139                     cameraInfo.push_back(cam);
140                 }
141             }
142         );
143     }
144 
loadUltrasonicsArrayList()145     void loadUltrasonicsArrayList() {
146         // SetUp() must run first!
147         assert(pEnumerator != nullptr);
148 
149         // Get the ultrasonics array list
150         pEnumerator->getUltrasonicsArrayList([this](hidl_vec<UltrasonicsArrayDesc> ultraList) {
151             LOG(INFO) << "Ultrasonics array list callback received "
152                       << ultraList.size()
153                       << " arrays";
154             ultrasonicsArraysInfo.reserve(ultraList.size());
155             for (auto&& ultraArray : ultraList) {
156                 LOG(INFO) << "Found ultrasonics array " << ultraArray.ultrasonicsArrayId;
157                 ultrasonicsArraysInfo.push_back(ultraArray);
158             }
159         });
160     }
161 
isLogicalCamera(const camera_metadata_t * metadata)162     bool isLogicalCamera(const camera_metadata_t *metadata) {
163         if (metadata == nullptr) {
164             // A logical camera device must have a valid camera metadata.
165             return false;
166         }
167 
168         // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
169         camera_metadata_ro_entry_t entry;
170         int rc = find_camera_metadata_ro_entry(metadata,
171                                                ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
172                                                &entry);
173         if (0 != rc) {
174             // No capabilities are found.
175             return false;
176         }
177 
178         for (size_t i = 0; i < entry.count; ++i) {
179             uint8_t cap = entry.data.u8[i];
180             if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
181                 return true;
182             }
183         }
184 
185         return false;
186     }
187 
getPhysicalCameraIds(const std::string & id,bool & flag)188     std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
189                                                          bool& flag) {
190         std::unordered_set<std::string> physicalCameras;
191 
192         auto it = cameraInfo.begin();
193         while (it != cameraInfo.end()) {
194             if (it->v1.cameraId == id) {
195                 break;
196             }
197             ++it;
198         }
199 
200         if (it == cameraInfo.end()) {
201             // Unknown camera is requested.  Return an empty list.
202             return physicalCameras;
203         }
204 
205         const camera_metadata_t *metadata =
206             reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
207         flag = isLogicalCamera(metadata);
208         if (!flag) {
209             // EVS assumes that the device w/o a valid metadata is a physical
210             // device.
211             LOG(INFO) << id << " is not a logical camera device.";
212             physicalCameras.emplace(id);
213             return physicalCameras;
214         }
215 
216         // Look for physical camera identifiers
217         camera_metadata_ro_entry entry;
218         int rc = find_camera_metadata_ro_entry(metadata,
219                                                ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
220                                                &entry);
221         if (rc != 0) {
222             LOG(ERROR) << "No physical camera ID is found for a logical camera device";
223         }
224 
225         const uint8_t *ids = entry.data.u8;
226         size_t start = 0;
227         for (size_t i = 0; i < entry.count; ++i) {
228             if (ids[i] == '\0') {
229                 if (start != i) {
230                     std::string id(reinterpret_cast<const char *>(ids + start));
231                     physicalCameras.emplace(id);
232                 }
233                 start = i + 1;
234             }
235         }
236 
237         LOG(INFO) << id
238                   << " consists of "
239                   << physicalCameras.size()
240                   << " physical camera devices";
241         return physicalCameras;
242     }
243 
getFirstStreamConfiguration(camera_metadata_t * metadata)244     Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
245         Stream targetCfg = {};
246         camera_metadata_entry_t streamCfgs;
247         if (!find_camera_metadata_entry(metadata,
248                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
249                  &streamCfgs)) {
250             // Stream configurations are found in metadata
251             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
252             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
253                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
254                     targetCfg.width = ptr->width;
255                     targetCfg.height = ptr->height;
256                     targetCfg.format = static_cast<PixelFormat>(ptr->format);
257                     break;
258                 }
259                 ++ptr;
260             }
261         }
262 
263         return targetCfg;
264     }
265 
266     sp<IEvsEnumerator>              pEnumerator;   // Every test needs access to the service
267     std::vector<CameraDesc>         cameraInfo;    // Empty unless/until loadCameraList() is called
268     bool                            mIsHwModule;   // boolean to tell current module under testing
269                                                    // is HW module implementation.
270     std::deque<sp<IEvsCamera_1_1>>  activeCameras; // A list of active camera handles that are
271                                                    // needed to be cleaned up.
272     std::vector<UltrasonicsArrayDesc>
273             ultrasonicsArraysInfo;                           // Empty unless/until
274                                                              // loadUltrasonicsArrayList() is called
275     std::deque<wp<IEvsCamera_1_1>> activeUltrasonicsArrays;  // A list of active ultrasonic array
276                                                              // handles that are to be cleaned up.
277 };
278 
279 
280 // Test cases, their implementations, and corresponding requirements are
281 // documented at go/aae-evs-public-api-test.
282 
283 /*
284  * CameraOpenClean:
285  * Opens each camera reported by the enumerator and then explicitly closes it via a
286  * call to closeCamera.  Then repeats the test to ensure all cameras can be reopened.
287  */
TEST_P(EvsHidlTest,CameraOpenClean)288 TEST_P(EvsHidlTest, CameraOpenClean) {
289     LOG(INFO) << "Starting CameraOpenClean test";
290 
291     // Get the camera list
292     loadCameraList();
293 
294     // Open and close each camera twice
295     for (auto&& cam: cameraInfo) {
296         bool isLogicalCam = false;
297         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
298         if (mIsHwModule && isLogicalCam) {
299             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
300             continue;
301         }
302 
303         // Read a target resolution from the metadata
304         Stream targetCfg =
305             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
306         ASSERT_GT(targetCfg.width, 0);
307         ASSERT_GT(targetCfg.height, 0);
308 
309         for (int pass = 0; pass < 2; pass++) {
310             sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
311             ASSERT_NE(pCam, nullptr);
312 
313             for (auto&& devName : devices) {
314                 bool matched = false;
315                 pCam->getPhysicalCameraInfo(devName,
316                                             [&devName, &matched](const CameraDesc& info) {
317                                                 matched = devName == info.v1.cameraId;
318                                             });
319                 ASSERT_TRUE(matched);
320             }
321 
322             // Store a camera handle for a clean-up
323             activeCameras.push_back(pCam);
324 
325             // Verify that this camera self-identifies correctly
326             pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
327                                         LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
328                                         EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
329                                     }
330             );
331 
332             // Verify methods for extended info
333             const auto id = 0xFFFFFFFF; // meaningless id
334             hidl_vec<uint8_t> values;
335             auto err = pCam->setExtendedInfo_1_1(id, values);
336             if (isLogicalCam) {
337                 // Logical camera device does not support setExtendedInfo
338                 // method.
339                 ASSERT_EQ(EvsResult::INVALID_ARG, err);
340             } else {
341                 ASSERT_NE(EvsResult::INVALID_ARG, err);
342             }
343 
344 
345             pCam->getExtendedInfo_1_1(id, [&isLogicalCam](const auto& result, const auto& data) {
346                 if (isLogicalCam) {
347                     ASSERT_EQ(EvsResult::INVALID_ARG, result);
348                 } else {
349                     ASSERT_NE(EvsResult::INVALID_ARG, result);
350                     ASSERT_EQ(0, data.size());
351                 }
352             });
353 
354             // Explicitly close the camera so resources are released right away
355             pEnumerator->closeCamera(pCam);
356             activeCameras.clear();
357         }
358     }
359 }
360 
361 
362 /*
363  * CameraOpenAggressive:
364  * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
365  * call.  This ensures that the intended "aggressive open" behavior works.  This is necessary for
366  * the system to be tolerant of shutdown/restart race conditions.
367  */
TEST_P(EvsHidlTest,CameraOpenAggressive)368 TEST_P(EvsHidlTest, CameraOpenAggressive) {
369     LOG(INFO) << "Starting CameraOpenAggressive test";
370 
371     // Get the camera list
372     loadCameraList();
373 
374     // Open and close each camera twice
375     for (auto&& cam: cameraInfo) {
376         bool isLogicalCam = false;
377         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
378         if (mIsHwModule && isLogicalCam) {
379             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
380             continue;
381         }
382 
383         // Read a target resolution from the metadata
384         Stream targetCfg =
385             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
386         ASSERT_GT(targetCfg.width, 0);
387         ASSERT_GT(targetCfg.height, 0);
388 
389         activeCameras.clear();
390         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
391         ASSERT_NE(pCam, nullptr);
392 
393         // Store a camera handle for a clean-up
394         activeCameras.push_back(pCam);
395 
396         // Verify that this camera self-identifies correctly
397         pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
398                                     LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
399                                     EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
400                                 }
401         );
402 
403         sp<IEvsCamera_1_1> pCam2 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
404         ASSERT_NE(pCam2, nullptr);
405 
406         // Store a camera handle for a clean-up
407         activeCameras.push_back(pCam2);
408 
409         ASSERT_NE(pCam, pCam2);
410 
411         Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
412         if (mIsHwModule) {
413             // Verify that the old camera rejects calls via HW module.
414             EXPECT_EQ(EvsResult::OWNERSHIP_LOST, EvsResult(result));
415         } else {
416             // default implementation supports multiple clients.
417             EXPECT_EQ(EvsResult::OK, EvsResult(result));
418         }
419 
420         // Close the superceded camera
421         pEnumerator->closeCamera(pCam);
422         activeCameras.pop_front();
423 
424         // Verify that the second camera instance self-identifies correctly
425         pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
426                                      LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
427                                      EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
428                                  }
429         );
430 
431         // Close the second camera instance
432         pEnumerator->closeCamera(pCam2);
433         activeCameras.pop_front();
434     }
435 
436     // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
437     sleep(1);   // I hate that this is an arbitrary time to wait.  :(  b/36122635
438 }
439 
440 
441 /*
442  * CameraStreamPerformance:
443  * Measure and qualify the stream start up time and streaming frame rate of each reported camera
444  */
TEST_P(EvsHidlTest,CameraStreamPerformance)445 TEST_P(EvsHidlTest, CameraStreamPerformance) {
446     LOG(INFO) << "Starting CameraStreamPerformance test";
447 
448     // Get the camera list
449     loadCameraList();
450 
451     // Test each reported camera
452     for (auto&& cam: cameraInfo) {
453         bool isLogicalCam = false;
454         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
455         if (mIsHwModule && isLogicalCam) {
456             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
457             continue;
458         }
459 
460         // Read a target resolution from the metadata
461         Stream targetCfg =
462             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
463         ASSERT_GT(targetCfg.width, 0);
464         ASSERT_GT(targetCfg.height, 0);
465 
466         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
467         ASSERT_NE(pCam, nullptr);
468 
469         // Store a camera handle for a clean-up
470         activeCameras.push_back(pCam);
471 
472         // Set up a frame receiver object which will fire up its own thread
473         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
474                                                          nullptr,
475                                                          FrameHandler::eAutoReturn);
476 
477         // Start the camera's video stream
478         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
479 
480         bool startResult = frameHandler->startStream();
481         ASSERT_TRUE(startResult);
482 
483         // Ensure the first frame arrived within the expected time
484         frameHandler->waitForFrameCount(1);
485         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
486         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
487 
488         // Extra delays are expected when we attempt to start a video stream on
489         // the logical camera device.  The amount of delay is expected the
490         // number of physical camera devices multiplied by
491         // kMaxStreamStartMilliseconds at most.
492         EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
493                   kMaxStreamStartMilliseconds * devices.size());
494         printf("%s: Measured time to first frame %0.2f ms\n",
495                cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
496         LOG(INFO) << cam.v1.cameraId
497                   << ": Measured time to first frame "
498                   << std::scientific << timeToFirstFrame * kNanoToMilliseconds
499                   << " ms.";
500 
501         // Wait a bit, then ensure we get at least the required minimum number of frames
502         sleep(5);
503         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
504 
505         // Even when the camera pointer goes out of scope, the FrameHandler object will
506         // keep the stream alive unless we tell it to shutdown.
507         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
508         // we have to break that cycle in order for either of them to get cleaned up.
509         frameHandler->shutdown();
510 
511         unsigned framesReceived = 0;
512         frameHandler->getFramesCounters(&framesReceived, nullptr);
513         framesReceived = framesReceived - 1;    // Back out the first frame we already waited for
514         nsecs_t runTime = end - firstFrame;
515         float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
516         printf("Measured camera rate %3.2f fps\n", framesPerSecond);
517         LOG(INFO) << "Measured camera rate "
518                   << std::scientific << framesPerSecond
519                   << " fps.";
520         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
521 
522         // Explicitly release the camera
523         pEnumerator->closeCamera(pCam);
524         activeCameras.clear();
525     }
526 }
527 
528 
529 /*
530  * CameraStreamBuffering:
531  * Ensure the camera implementation behaves properly when the client holds onto buffers for more
532  * than one frame time.  The camera must cleanly skip frames until the client is ready again.
533  */
TEST_P(EvsHidlTest,CameraStreamBuffering)534 TEST_P(EvsHidlTest, CameraStreamBuffering) {
535     LOG(INFO) << "Starting CameraStreamBuffering test";
536 
537     // Maximum number of frames in flight this test case will attempt. This test
538     // case chooses an arbitrary number that is large enough to run a camera
539     // pipeline for a single client.
540     constexpr unsigned int kMaxBuffersToHold = 20;
541 
542     // Initial value for setMaxFramesInFlight() call. This number should be
543     // greater than 1.
544     unsigned int buffersToHold = 2;
545 
546     // Get the camera list
547     loadCameraList();
548 
549     // Test each reported camera
550     for (auto&& cam: cameraInfo) {
551         bool isLogicalCam = false;
552         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
553         if (mIsHwModule && isLogicalCam) {
554             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
555             continue;
556         }
557 
558         // Read a target resolution from the metadata
559         Stream targetCfg =
560             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
561         ASSERT_GT(targetCfg.width, 0);
562         ASSERT_GT(targetCfg.height, 0);
563 
564         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
565         ASSERT_NE(pCam, nullptr);
566 
567         // Store a camera handle for a clean-up
568         activeCameras.push_back(pCam);
569 
570         // Ask for a very large number of buffers in flight to ensure it errors correctly
571         Return<EvsResult> badResult =
572                 pCam->setMaxFramesInFlight(std::numeric_limits<int32_t>::max());
573         EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
574 
575         // Now ask for exactly two buffers in flight as we'll test behavior in that case
576         while (buffersToHold < kMaxBuffersToHold) {
577             Return<EvsResult> goodResult = pCam->setMaxFramesInFlight(buffersToHold);
578             if (goodResult == EvsResult::OK) {
579                 break;
580             }
581 
582             ++buffersToHold;
583         }
584         EXPECT_LE(buffersToHold, kMaxBuffersToHold);
585 
586         // Set up a frame receiver object which will fire up its own thread.
587         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
588                                                          nullptr,
589                                                          FrameHandler::eNoAutoReturn);
590 
591         // Start the camera's video stream
592         bool startResult = frameHandler->startStream();
593         ASSERT_TRUE(startResult);
594 
595         // Check that the video stream stalls once we've gotten exactly the number of buffers
596         // we requested since we told the frameHandler not to return them.
597         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
598         unsigned framesReceived = 0;
599         frameHandler->getFramesCounters(&framesReceived, nullptr);
600         ASSERT_EQ(buffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
601 
602 
603         // Give back one buffer
604         bool didReturnBuffer = frameHandler->returnHeldBuffer();
605         EXPECT_TRUE(didReturnBuffer);
606 
607         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
608         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
609         usleep(110 * kMillisecondsToMicroseconds);
610         frameHandler->getFramesCounters(&framesReceived, nullptr);
611         EXPECT_EQ(buffersToHold+1, framesReceived) << "Stream should've resumed";
612 
613         // Even when the camera pointer goes out of scope, the FrameHandler object will
614         // keep the stream alive unless we tell it to shutdown.
615         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
616         // we have to break that cycle in order for either of them to get cleaned up.
617         frameHandler->shutdown();
618 
619         // Explicitly release the camera
620         pEnumerator->closeCamera(pCam);
621         activeCameras.clear();
622     }
623 }
624 
625 
626 /*
627  * CameraToDisplayRoundTrip:
628  * End to end test of data flowing from the camera to the display.  Each delivered frame of camera
629  * imagery is simply copied to the display buffer and presented on screen.  This is the one test
630  * which a human could observe to see the operation of the system on the physical display.
631  */
TEST_P(EvsHidlTest,CameraToDisplayRoundTrip)632 TEST_P(EvsHidlTest, CameraToDisplayRoundTrip) {
633     LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
634 
635     // Get the camera list
636     loadCameraList();
637 
638     // Request available display IDs
639     uint8_t targetDisplayId = 0;
640     pEnumerator->getDisplayIdList([&targetDisplayId](auto ids) {
641         ASSERT_GT(ids.size(), 0);
642         targetDisplayId = ids[0];
643     });
644 
645     // Test each reported camera
646     for (auto&& cam: cameraInfo) {
647         // Request exclusive access to the first EVS display
648         sp<IEvsDisplay_1_1> pDisplay = pEnumerator->openDisplay_1_1(targetDisplayId);
649         ASSERT_NE(pDisplay, nullptr);
650         LOG(INFO) << "Display " << targetDisplayId << " is already in use.";
651 
652         // Get the display descriptor
653         pDisplay->getDisplayInfo_1_1([](const HwDisplayConfig& config, const HwDisplayState& state) {
654             ASSERT_GT(config.size(), 0);
655             ASSERT_GT(state.size(), 0);
656 
657             android::ui::DisplayMode* pConfig = (android::ui::DisplayMode*)config.data();
658             const auto width = pConfig->resolution.getWidth();
659             const auto height = pConfig->resolution.getHeight();
660             LOG(INFO) << "    Resolution: " << width << "x" << height;
661             ASSERT_GT(width, 0);
662             ASSERT_GT(height, 0);
663 
664             android::ui::DisplayState* pState = (android::ui::DisplayState*)state.data();
665             ASSERT_NE(pState->layerStack, android::ui::INVALID_LAYER_STACK);
666         });
667 
668         bool isLogicalCam = false;
669         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
670         if (mIsHwModule && isLogicalCam) {
671             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
672             continue;
673         }
674 
675         // Read a target resolution from the metadata
676         Stream targetCfg =
677             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
678         ASSERT_GT(targetCfg.width, 0);
679         ASSERT_GT(targetCfg.height, 0);
680 
681         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
682         ASSERT_NE(pCam, nullptr);
683 
684         // Store a camera handle for a clean-up
685         activeCameras.push_back(pCam);
686 
687         // Set up a frame receiver object which will fire up its own thread.
688         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
689                                                          pDisplay,
690                                                          FrameHandler::eAutoReturn);
691 
692 
693         // Activate the display
694         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
695 
696         // Start the camera's video stream
697         bool startResult = frameHandler->startStream();
698         ASSERT_TRUE(startResult);
699 
700         // Wait a while to let the data flow
701         static const int kSecondsToWait = 5;
702         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
703                                  kMaxStreamStartMilliseconds;
704         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
705                                                kSecondsToMilliseconds;
706         sleep(kSecondsToWait);
707         unsigned framesReceived = 0;
708         unsigned framesDisplayed = 0;
709         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
710         EXPECT_EQ(framesReceived, framesDisplayed);
711         EXPECT_GE(framesDisplayed, minimumFramesExpected);
712 
713         // Turn off the display (yes, before the stream stops -- it should be handled)
714         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
715 
716         // Shut down the streamer
717         frameHandler->shutdown();
718 
719         // Explicitly release the camera
720         pEnumerator->closeCamera(pCam);
721         activeCameras.clear();
722 
723         // Explicitly release the display
724         pEnumerator->closeDisplay(pDisplay);
725     }
726 }
727 
728 
729 /*
730  * MultiCameraStream:
731  * Verify that each client can start and stop video streams on the same
732  * underlying camera.
733  */
TEST_P(EvsHidlTest,MultiCameraStream)734 TEST_P(EvsHidlTest, MultiCameraStream) {
735     LOG(INFO) << "Starting MultiCameraStream test";
736 
737     if (mIsHwModule) {
738         // This test is not for HW module implementation.
739         return;
740     }
741 
742     // Get the camera list
743     loadCameraList();
744 
745     // Test each reported camera
746     for (auto&& cam: cameraInfo) {
747         // Read a target resolution from the metadata
748         Stream targetCfg =
749             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
750         ASSERT_GT(targetCfg.width, 0);
751         ASSERT_GT(targetCfg.height, 0);
752 
753         // Create two camera clients.
754         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
755         ASSERT_NE(pCam0, nullptr);
756 
757         // Store a camera handle for a clean-up
758         activeCameras.push_back(pCam0);
759 
760         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
761         ASSERT_NE(pCam1, nullptr);
762 
763         // Store a camera handle for a clean-up
764         activeCameras.push_back(pCam1);
765 
766         // Set up per-client frame receiver objects which will fire up its own thread
767         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
768                                                           nullptr,
769                                                           FrameHandler::eAutoReturn);
770         ASSERT_NE(frameHandler0, nullptr);
771 
772         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
773                                                           nullptr,
774                                                           FrameHandler::eAutoReturn);
775         ASSERT_NE(frameHandler1, nullptr);
776 
777         // Start the camera's video stream via client 0
778         bool startResult = false;
779         startResult = frameHandler0->startStream() &&
780                       frameHandler1->startStream();
781         ASSERT_TRUE(startResult);
782 
783         // Ensure the stream starts
784         frameHandler0->waitForFrameCount(1);
785         frameHandler1->waitForFrameCount(1);
786 
787         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
788 
789         // Wait a bit, then ensure both clients get at least the required minimum number of frames
790         sleep(5);
791         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
792         unsigned framesReceived0 = 0, framesReceived1 = 0;
793         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
794         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
795         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
796         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
797         nsecs_t runTime = end - firstFrame;
798         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
799         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
800         LOG(INFO) << "Measured camera rate "
801                   << std::scientific << framesPerSecond0 << " fps and "
802                   << framesPerSecond1 << " fps";
803         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
804         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
805 
806         // Shutdown one client
807         frameHandler0->shutdown();
808 
809         // Read frame counters again
810         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
811         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
812 
813         // Wait a bit again
814         sleep(5);
815         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
816         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
817         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
818         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
819         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
820 
821         // Shutdown another
822         frameHandler1->shutdown();
823 
824         // Explicitly release the camera
825         pEnumerator->closeCamera(pCam0);
826         pEnumerator->closeCamera(pCam1);
827         activeCameras.clear();
828 
829         // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
830         // destruction of active camera objects; this may be related with two
831         // issues.
832         sleep(1);
833     }
834 }
835 
836 
837 /*
838  * CameraParameter:
839  * Verify that a client can adjust a camera parameter.
840  */
TEST_P(EvsHidlTest,CameraParameter)841 TEST_P(EvsHidlTest, CameraParameter) {
842     LOG(INFO) << "Starting CameraParameter test";
843 
844     // Get the camera list
845     loadCameraList();
846 
847     // Test each reported camera
848     Return<EvsResult> result = EvsResult::OK;
849     for (auto&& cam: cameraInfo) {
850         bool isLogicalCam = false;
851         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
852         if (isLogicalCam) {
853             // TODO(b/145465724): Support camera parameter programming on
854             // logical devices.
855             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
856             continue;
857         }
858 
859         // Read a target resolution from the metadata
860         Stream targetCfg =
861             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
862         ASSERT_GT(targetCfg.width, 0);
863         ASSERT_GT(targetCfg.height, 0);
864 
865         // Create a camera client
866         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
867         ASSERT_NE(pCam, nullptr);
868 
869         // Store a camera
870         activeCameras.push_back(pCam);
871 
872         // Get the parameter list
873         std::vector<CameraParam> cmds;
874         pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
875                 cmds.reserve(cmdList.size());
876                 for (auto &&cmd : cmdList) {
877                     cmds.push_back(cmd);
878                 }
879             }
880         );
881 
882         if (cmds.size() < 1) {
883             continue;
884         }
885 
886         // Set up per-client frame receiver objects which will fire up its own thread
887         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
888                                                          nullptr,
889                                                          FrameHandler::eAutoReturn);
890         ASSERT_NE(frameHandler, nullptr);
891 
892         // Start the camera's video stream
893         bool startResult = frameHandler->startStream();
894         ASSERT_TRUE(startResult);
895 
896         // Ensure the stream starts
897         frameHandler->waitForFrameCount(1);
898 
899         result = pCam->setMaster();
900         ASSERT_EQ(EvsResult::OK, result);
901 
902         for (auto &cmd : cmds) {
903             // Get a valid parameter value range
904             int32_t minVal, maxVal, step;
905             pCam->getIntParameterRange(
906                 cmd,
907                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
908                     minVal = val0;
909                     maxVal = val1;
910                     step   = val2;
911                 }
912             );
913 
914             EvsResult result = EvsResult::OK;
915             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
916                 // Try to turn off auto-focus
917                 std::vector<int32_t> values;
918                 pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
919                                    [&result, &values](auto status, auto effectiveValues) {
920                                        result = status;
921                                        if (status == EvsResult::OK) {
922                                           for (auto &&v : effectiveValues) {
923                                               values.push_back(v);
924                                           }
925                                        }
926                                    });
927                 ASSERT_EQ(EvsResult::OK, result);
928                 for (auto &&v : values) {
929                     ASSERT_EQ(v, 0);
930                 }
931             }
932 
933             // Try to program a parameter with a random value [minVal, maxVal]
934             int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
935             std::vector<int32_t> values;
936 
937             // Rounding down
938             val0 = val0 - (val0 % step);
939             pCam->setIntParameter(cmd, val0,
940                                [&result, &values](auto status, auto effectiveValues) {
941                                    result = status;
942                                    if (status == EvsResult::OK) {
943                                       for (auto &&v : effectiveValues) {
944                                           values.push_back(v);
945                                       }
946                                    }
947                                });
948 
949             ASSERT_EQ(EvsResult::OK, result);
950 
951             values.clear();
952             pCam->getIntParameter(cmd,
953                                [&result, &values](auto status, auto readValues) {
954                                    result = status;
955                                    if (status == EvsResult::OK) {
956                                       for (auto &&v : readValues) {
957                                           values.push_back(v);
958                                       }
959                                    }
960                                });
961             ASSERT_EQ(EvsResult::OK, result);
962             for (auto &&v : values) {
963                 ASSERT_EQ(val0, v) << "Values are not matched.";
964             }
965         }
966 
967         result = pCam->unsetMaster();
968         ASSERT_EQ(EvsResult::OK, result);
969 
970         // Shutdown
971         frameHandler->shutdown();
972 
973         // Explicitly release the camera
974         pEnumerator->closeCamera(pCam);
975         activeCameras.clear();
976     }
977 }
978 
979 
980 /*
981  * CameraPrimaryClientRelease
982  * Verify that non-primary client gets notified when the primary client either
983  * terminates or releases a role.
984  */
TEST_P(EvsHidlTest,CameraPrimaryClientRelease)985 TEST_P(EvsHidlTest, CameraPrimaryClientRelease) {
986     LOG(INFO) << "Starting CameraPrimaryClientRelease test";
987 
988     if (mIsHwModule) {
989         // This test is not for HW module implementation.
990         return;
991     }
992 
993     // Get the camera list
994     loadCameraList();
995 
996     // Test each reported camera
997     for (auto&& cam: cameraInfo) {
998         bool isLogicalCam = false;
999         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
1000         if (isLogicalCam) {
1001             // TODO(b/145465724): Support camera parameter programming on
1002             // logical devices.
1003             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
1004             continue;
1005         }
1006 
1007         // Read a target resolution from the metadata
1008         Stream targetCfg =
1009             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1010         ASSERT_GT(targetCfg.width, 0);
1011         ASSERT_GT(targetCfg.height, 0);
1012 
1013         // Create two camera clients.
1014         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1015         ASSERT_NE(pCamPrimary, nullptr);
1016 
1017         // Store a camera handle for a clean-up
1018         activeCameras.push_back(pCamPrimary);
1019 
1020         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1021         ASSERT_NE(pCamSecondary, nullptr);
1022 
1023         // Store a camera handle for a clean-up
1024         activeCameras.push_back(pCamSecondary);
1025 
1026         // Set up per-client frame receiver objects which will fire up its own thread
1027         sp<FrameHandler> frameHandlerPrimary =
1028             new FrameHandler(pCamPrimary, cam,
1029                              nullptr,
1030                              FrameHandler::eAutoReturn);
1031         ASSERT_NE(frameHandlerPrimary, nullptr);
1032         sp<FrameHandler> frameHandlerSecondary =
1033             new FrameHandler(pCamSecondary, cam,
1034                              nullptr,
1035                              FrameHandler::eAutoReturn);
1036         ASSERT_NE(frameHandlerSecondary, nullptr);
1037 
1038         // Set one client as the primary client
1039         EvsResult result = pCamPrimary->setMaster();
1040         ASSERT_TRUE(result == EvsResult::OK);
1041 
1042         // Try to set another client as the primary client.
1043         result = pCamSecondary->setMaster();
1044         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1045 
1046         // Start the camera's video stream via a primary client client.
1047         bool startResult = frameHandlerPrimary->startStream();
1048         ASSERT_TRUE(startResult);
1049 
1050         // Ensure the stream starts
1051         frameHandlerPrimary->waitForFrameCount(1);
1052 
1053         // Start the camera's video stream via another client
1054         startResult = frameHandlerSecondary->startStream();
1055         ASSERT_TRUE(startResult);
1056 
1057         // Ensure the stream starts
1058         frameHandlerSecondary->waitForFrameCount(1);
1059 
1060         // Non-primary client expects to receive a primary client role relesed
1061         // notification.
1062         EvsEventDesc aTargetEvent  = {};
1063         EvsEventDesc aNotification = {};
1064 
1065         bool listening = false;
1066         std::mutex eventLock;
1067         std::condition_variable eventCond;
1068         std::thread listener = std::thread(
1069             [&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
1070                 // Notify that a listening thread is running.
1071                 listening = true;
1072                 eventCond.notify_all();
1073 
1074                 EvsEventDesc aTargetEvent;
1075                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1076                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
1077                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1078                 }
1079 
1080             }
1081         );
1082 
1083         // Wait until a listening thread starts.
1084         std::unique_lock<std::mutex> lock(eventLock);
1085         auto timer = std::chrono::system_clock::now();
1086         while (!listening) {
1087             timer += 1s;
1088             eventCond.wait_until(lock, timer);
1089         }
1090         lock.unlock();
1091 
1092         // Release a primary client role.
1093         pCamPrimary->unsetMaster();
1094 
1095         // Join a listening thread.
1096         if (listener.joinable()) {
1097             listener.join();
1098         }
1099 
1100         // Verify change notifications.
1101         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1102                   static_cast<EvsEventType>(aNotification.aType));
1103 
1104         // Non-primary becomes a primary client.
1105         result = pCamSecondary->setMaster();
1106         ASSERT_TRUE(result == EvsResult::OK);
1107 
1108         // Previous primary client fails to become a primary client.
1109         result = pCamPrimary->setMaster();
1110         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1111 
1112         listening = false;
1113         listener = std::thread(
1114             [&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1115                 // Notify that a listening thread is running.
1116                 listening = true;
1117                 eventCond.notify_all();
1118 
1119                 EvsEventDesc aTargetEvent;
1120                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1121                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1122                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1123                 }
1124 
1125             }
1126         );
1127 
1128         // Wait until a listening thread starts.
1129         timer = std::chrono::system_clock::now();
1130         lock.lock();
1131         while (!listening) {
1132             eventCond.wait_until(lock, timer + 1s);
1133         }
1134         lock.unlock();
1135 
1136         // Closing current primary client.
1137         frameHandlerSecondary->shutdown();
1138 
1139         // Join a listening thread.
1140         if (listener.joinable()) {
1141             listener.join();
1142         }
1143 
1144         // Verify change notifications.
1145         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1146                   static_cast<EvsEventType>(aNotification.aType));
1147 
1148         // Closing streams.
1149         frameHandlerPrimary->shutdown();
1150 
1151         // Explicitly release the camera
1152         pEnumerator->closeCamera(pCamPrimary);
1153         pEnumerator->closeCamera(pCamSecondary);
1154         activeCameras.clear();
1155     }
1156 }
1157 
1158 
1159 /*
1160  * MultiCameraParameter:
1161  * Verify that primary and non-primary clients behave as expected when they try to adjust
1162  * camera parameters.
1163  */
TEST_P(EvsHidlTest,MultiCameraParameter)1164 TEST_P(EvsHidlTest, MultiCameraParameter) {
1165     LOG(INFO) << "Starting MultiCameraParameter test";
1166 
1167     if (mIsHwModule) {
1168         // This test is not for HW module implementation.
1169         return;
1170     }
1171 
1172     // Get the camera list
1173     loadCameraList();
1174 
1175     // Test each reported camera
1176     for (auto&& cam: cameraInfo) {
1177         bool isLogicalCam = false;
1178         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
1179         if (isLogicalCam) {
1180             // TODO(b/145465724): Support camera parameter programming on
1181             // logical devices.
1182             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
1183             continue;
1184         }
1185 
1186         // Read a target resolution from the metadata
1187         Stream targetCfg =
1188             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1189         ASSERT_GT(targetCfg.width, 0);
1190         ASSERT_GT(targetCfg.height, 0);
1191 
1192         // Create two camera clients.
1193         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1194         ASSERT_NE(pCamPrimary, nullptr);
1195 
1196         // Store a camera handle for a clean-up
1197         activeCameras.push_back(pCamPrimary);
1198 
1199         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1200         ASSERT_NE(pCamSecondary, nullptr);
1201 
1202         // Store a camera handle for a clean-up
1203         activeCameras.push_back(pCamSecondary);
1204 
1205         // Get the parameter list
1206         std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1207         pCamPrimary->getParameterList([&camPrimaryCmds](hidl_vec<CameraParam> cmdList) {
1208                 camPrimaryCmds.reserve(cmdList.size());
1209                 for (auto &&cmd : cmdList) {
1210                     camPrimaryCmds.push_back(cmd);
1211                 }
1212             }
1213         );
1214 
1215         pCamSecondary->getParameterList([&camSecondaryCmds](hidl_vec<CameraParam> cmdList) {
1216                 camSecondaryCmds.reserve(cmdList.size());
1217                 for (auto &&cmd : cmdList) {
1218                     camSecondaryCmds.push_back(cmd);
1219                 }
1220             }
1221         );
1222 
1223         if (camPrimaryCmds.size() < 1 ||
1224             camSecondaryCmds.size() < 1) {
1225             // Skip a camera device if it does not support any parameter.
1226             continue;
1227         }
1228 
1229         // Set up per-client frame receiver objects which will fire up its own thread
1230         sp<FrameHandler> frameHandlerPrimary =
1231             new FrameHandler(pCamPrimary, cam,
1232                              nullptr,
1233                              FrameHandler::eAutoReturn);
1234         ASSERT_NE(frameHandlerPrimary, nullptr);
1235         sp<FrameHandler> frameHandlerSecondary =
1236             new FrameHandler(pCamSecondary, cam,
1237                              nullptr,
1238                              FrameHandler::eAutoReturn);
1239         ASSERT_NE(frameHandlerSecondary, nullptr);
1240 
1241         // Set one client as the primary client.
1242         EvsResult result = pCamPrimary->setMaster();
1243         ASSERT_EQ(EvsResult::OK, result);
1244 
1245         // Try to set another client as the primary client.
1246         result = pCamSecondary->setMaster();
1247         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1248 
1249         // Start the camera's video stream via a primary client client.
1250         bool startResult = frameHandlerPrimary->startStream();
1251         ASSERT_TRUE(startResult);
1252 
1253         // Ensure the stream starts
1254         frameHandlerPrimary->waitForFrameCount(1);
1255 
1256         // Start the camera's video stream via another client
1257         startResult = frameHandlerSecondary->startStream();
1258         ASSERT_TRUE(startResult);
1259 
1260         // Ensure the stream starts
1261         frameHandlerSecondary->waitForFrameCount(1);
1262 
1263         int32_t val0 = 0;
1264         std::vector<int32_t> values;
1265         EvsEventDesc aNotification0 = {};
1266         EvsEventDesc aNotification1 = {};
1267         for (auto &cmd : camPrimaryCmds) {
1268             // Get a valid parameter value range
1269             int32_t minVal, maxVal, step;
1270             pCamPrimary->getIntParameterRange(
1271                 cmd,
1272                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1273                     minVal = val0;
1274                     maxVal = val1;
1275                     step   = val2;
1276                 }
1277             );
1278 
1279             EvsResult result = EvsResult::OK;
1280             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1281                 // Try to turn off auto-focus
1282                 values.clear();
1283                 pCamPrimary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1284                                    [&result, &values](auto status, auto effectiveValues) {
1285                                        result = status;
1286                                        if (status == EvsResult::OK) {
1287                                           for (auto &&v : effectiveValues) {
1288                                               values.push_back(v);
1289                                           }
1290                                        }
1291                                    });
1292                 ASSERT_EQ(EvsResult::OK, result);
1293                 for (auto &&v : values) {
1294                     ASSERT_EQ(v, 0);
1295                 }
1296             }
1297 
1298             // Calculate a parameter value to program.
1299             val0 = minVal + (std::rand() % (maxVal - minVal));
1300             val0 = val0 - (val0 % step);
1301 
1302             // Prepare and start event listeners.
1303             bool listening0 = false;
1304             bool listening1 = false;
1305             std::condition_variable eventCond;
1306             std::thread listener0 = std::thread(
1307                 [cmd, val0,
1308                  &aNotification0, &frameHandlerPrimary, &listening0, &listening1, &eventCond]() {
1309                     listening0 = true;
1310                     if (listening1) {
1311                         eventCond.notify_all();
1312                     }
1313 
1314                     EvsEventDesc aTargetEvent;
1315                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1316                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1317                     aTargetEvent.payload[1] = val0;
1318                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1319                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1320                     }
1321                 }
1322             );
1323             std::thread listener1 = std::thread(
1324                 [cmd, val0,
1325                  &aNotification1, &frameHandlerSecondary, &listening0, &listening1, &eventCond]() {
1326                     listening1 = true;
1327                     if (listening0) {
1328                         eventCond.notify_all();
1329                     }
1330 
1331                     EvsEventDesc aTargetEvent;
1332                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1333                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1334                     aTargetEvent.payload[1] = val0;
1335                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1336                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1337                     }
1338                 }
1339             );
1340 
1341             // Wait until a listening thread starts.
1342             std::mutex eventLock;
1343             std::unique_lock<std::mutex> lock(eventLock);
1344             auto timer = std::chrono::system_clock::now();
1345             while (!listening0 || !listening1) {
1346                 eventCond.wait_until(lock, timer + 1s);
1347             }
1348             lock.unlock();
1349 
1350             // Try to program a parameter
1351             values.clear();
1352             pCamPrimary->setIntParameter(cmd, val0,
1353                                      [&result, &values](auto status, auto effectiveValues) {
1354                                          result = status;
1355                                          if (status == EvsResult::OK) {
1356                                             for (auto &&v : effectiveValues) {
1357                                                 values.push_back(v);
1358                                             }
1359                                          }
1360                                      });
1361 
1362             ASSERT_EQ(EvsResult::OK, result);
1363             for (auto &&v : values) {
1364                 ASSERT_EQ(val0, v) << "Values are not matched.";
1365             }
1366 
1367             // Join a listening thread.
1368             if (listener0.joinable()) {
1369                 listener0.join();
1370             }
1371             if (listener1.joinable()) {
1372                 listener1.join();
1373             }
1374 
1375             // Verify a change notification
1376             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1377                       static_cast<EvsEventType>(aNotification0.aType));
1378             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1379                       static_cast<EvsEventType>(aNotification1.aType));
1380             ASSERT_EQ(cmd,
1381                       static_cast<CameraParam>(aNotification0.payload[0]));
1382             ASSERT_EQ(cmd,
1383                       static_cast<CameraParam>(aNotification1.payload[0]));
1384             for (auto &&v : values) {
1385                 ASSERT_EQ(v,
1386                           static_cast<int32_t>(aNotification0.payload[1]));
1387                 ASSERT_EQ(v,
1388                           static_cast<int32_t>(aNotification1.payload[1]));
1389             }
1390 
1391             // Clients expects to receive a parameter change notification
1392             // whenever a primary client client adjusts it.
1393             values.clear();
1394             pCamPrimary->getIntParameter(cmd,
1395                                      [&result, &values](auto status, auto readValues) {
1396                                          result = status;
1397                                          if (status == EvsResult::OK) {
1398                                             for (auto &&v : readValues) {
1399                                                 values.push_back(v);
1400                                             }
1401                                          }
1402                                      });
1403             ASSERT_EQ(EvsResult::OK, result);
1404             for (auto &&v : values) {
1405                 ASSERT_EQ(val0, v) << "Values are not matched.";
1406             }
1407         }
1408 
1409         // Try to adjust a parameter via non-primary client
1410         values.clear();
1411         pCamSecondary->setIntParameter(camSecondaryCmds[0], val0,
1412                                     [&result, &values](auto status, auto effectiveValues) {
1413                                         result = status;
1414                                         if (status == EvsResult::OK) {
1415                                             for (auto &&v : effectiveValues) {
1416                                                 values.push_back(v);
1417                                             }
1418                                         }
1419                                     });
1420         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1421 
1422         // Non-primary client attempts to be a primary client
1423         result = pCamSecondary->setMaster();
1424         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1425 
1426         // Primary client retires from a primary client role
1427         bool listening = false;
1428         std::condition_variable eventCond;
1429         std::thread listener = std::thread(
1430             [&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1431                 listening = true;
1432                 eventCond.notify_all();
1433 
1434                 EvsEventDesc aTargetEvent;
1435                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1436                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1437                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1438                 }
1439             }
1440         );
1441 
1442         std::mutex eventLock;
1443         auto timer = std::chrono::system_clock::now();
1444         std::unique_lock<std::mutex> lock(eventLock);
1445         while (!listening) {
1446             eventCond.wait_until(lock, timer + 1s);
1447         }
1448         lock.unlock();
1449 
1450         result = pCamPrimary->unsetMaster();
1451         ASSERT_EQ(EvsResult::OK, result);
1452 
1453         if (listener.joinable()) {
1454             listener.join();
1455         }
1456         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1457                   static_cast<EvsEventType>(aNotification0.aType));
1458 
1459         // Try to adjust a parameter after being retired
1460         values.clear();
1461         pCamPrimary->setIntParameter(camPrimaryCmds[0], val0,
1462                                  [&result, &values](auto status, auto effectiveValues) {
1463                                      result = status;
1464                                      if (status == EvsResult::OK) {
1465                                         for (auto &&v : effectiveValues) {
1466                                             values.push_back(v);
1467                                         }
1468                                      }
1469                                  });
1470         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1471 
1472         // Non-primary client becomes a primary client
1473         result = pCamSecondary->setMaster();
1474         ASSERT_EQ(EvsResult::OK, result);
1475 
1476         // Try to adjust a parameter via new primary client
1477         for (auto &cmd : camSecondaryCmds) {
1478             // Get a valid parameter value range
1479             int32_t minVal, maxVal, step;
1480             pCamSecondary->getIntParameterRange(
1481                 cmd,
1482                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1483                     minVal = val0;
1484                     maxVal = val1;
1485                     step   = val2;
1486                 }
1487             );
1488 
1489             EvsResult result = EvsResult::OK;
1490             values.clear();
1491             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1492                 // Try to turn off auto-focus
1493                 values.clear();
1494                 pCamSecondary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1495                                    [&result, &values](auto status, auto effectiveValues) {
1496                                        result = status;
1497                                        if (status == EvsResult::OK) {
1498                                           for (auto &&v : effectiveValues) {
1499                                               values.push_back(v);
1500                                           }
1501                                        }
1502                                    });
1503                 ASSERT_EQ(EvsResult::OK, result);
1504                 for (auto &&v : values) {
1505                     ASSERT_EQ(v, 0);
1506                 }
1507             }
1508 
1509             // Calculate a parameter value to program.  This is being rounding down.
1510             val0 = minVal + (std::rand() % (maxVal - minVal));
1511             val0 = val0 - (val0 % step);
1512 
1513             // Prepare and start event listeners.
1514             bool listening0 = false;
1515             bool listening1 = false;
1516             std::condition_variable eventCond;
1517             std::thread listener0 = std::thread(
1518                 [&]() {
1519                     listening0 = true;
1520                     if (listening1) {
1521                         eventCond.notify_all();
1522                     }
1523 
1524                     EvsEventDesc aTargetEvent;
1525                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1526                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1527                     aTargetEvent.payload[1] = val0;
1528                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1529                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1530                     }
1531                 }
1532             );
1533             std::thread listener1 = std::thread(
1534                 [&]() {
1535                     listening1 = true;
1536                     if (listening0) {
1537                         eventCond.notify_all();
1538                     }
1539 
1540                     EvsEventDesc aTargetEvent;
1541                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1542                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1543                     aTargetEvent.payload[1] = val0;
1544                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1545                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1546                     }
1547                 }
1548             );
1549 
1550             // Wait until a listening thread starts.
1551             std::mutex eventLock;
1552             std::unique_lock<std::mutex> lock(eventLock);
1553             auto timer = std::chrono::system_clock::now();
1554             while (!listening0 || !listening1) {
1555                 eventCond.wait_until(lock, timer + 1s);
1556             }
1557             lock.unlock();
1558 
1559             // Try to program a parameter
1560             values.clear();
1561             pCamSecondary->setIntParameter(cmd, val0,
1562                                         [&result, &values](auto status, auto effectiveValues) {
1563                                             result = status;
1564                                             if (status == EvsResult::OK) {
1565                                                 for (auto &&v : effectiveValues) {
1566                                                     values.push_back(v);
1567                                                 }
1568                                             }
1569                                         });
1570             ASSERT_EQ(EvsResult::OK, result);
1571 
1572             // Clients expects to receive a parameter change notification
1573             // whenever a primary client client adjusts it.
1574             values.clear();
1575             pCamSecondary->getIntParameter(cmd,
1576                                         [&result, &values](auto status, auto readValues) {
1577                                             result = status;
1578                                             if (status == EvsResult::OK) {
1579                                                 for (auto &&v : readValues) {
1580                                                     values.push_back(v);
1581                                                 }
1582                                             }
1583                                         });
1584             ASSERT_EQ(EvsResult::OK, result);
1585             for (auto &&v : values) {
1586                 ASSERT_EQ(val0, v) << "Values are not matched.";
1587             }
1588 
1589             // Join a listening thread.
1590             if (listener0.joinable()) {
1591                 listener0.join();
1592             }
1593             if (listener1.joinable()) {
1594                 listener1.join();
1595             }
1596 
1597             // Verify a change notification
1598             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1599                       static_cast<EvsEventType>(aNotification0.aType));
1600             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1601                       static_cast<EvsEventType>(aNotification1.aType));
1602             ASSERT_EQ(cmd,
1603                       static_cast<CameraParam>(aNotification0.payload[0]));
1604             ASSERT_EQ(cmd,
1605                       static_cast<CameraParam>(aNotification1.payload[0]));
1606             for (auto &&v : values) {
1607                 ASSERT_EQ(v,
1608                           static_cast<int32_t>(aNotification0.payload[1]));
1609                 ASSERT_EQ(v,
1610                           static_cast<int32_t>(aNotification1.payload[1]));
1611             }
1612         }
1613 
1614         // New primary client retires from the role
1615         result = pCamSecondary->unsetMaster();
1616         ASSERT_EQ(EvsResult::OK, result);
1617 
1618         // Shutdown
1619         frameHandlerPrimary->shutdown();
1620         frameHandlerSecondary->shutdown();
1621 
1622         // Explicitly release the camera
1623         pEnumerator->closeCamera(pCamPrimary);
1624         pEnumerator->closeCamera(pCamSecondary);
1625         activeCameras.clear();
1626     }
1627 }
1628 
1629 
1630 /*
1631  * HighPriorityCameraClient:
1632  * EVS client, which owns the display, is priortized and therefore can take over
1633  * a primary client role from other EVS clients without the display.
1634  */
TEST_P(EvsHidlTest,HighPriorityCameraClient)1635 TEST_P(EvsHidlTest, HighPriorityCameraClient) {
1636     LOG(INFO) << "Starting HighPriorityCameraClient test";
1637 
1638     if (mIsHwModule) {
1639         // This test is not for HW module implementation.
1640         return;
1641     }
1642 
1643     // Get the camera list
1644     loadCameraList();
1645 
1646     // Test each reported camera
1647     for (auto&& cam: cameraInfo) {
1648         // Request exclusive access to the EVS display
1649         sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
1650         ASSERT_NE(pDisplay, nullptr);
1651 
1652         // Read a target resolution from the metadata
1653         Stream targetCfg =
1654             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1655         ASSERT_GT(targetCfg.width, 0);
1656         ASSERT_GT(targetCfg.height, 0);
1657 
1658         // Create two clients
1659         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1660         ASSERT_NE(pCam0, nullptr);
1661 
1662         // Store a camera handle for a clean-up
1663         activeCameras.push_back(pCam0);
1664 
1665         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1666         ASSERT_NE(pCam1, nullptr);
1667 
1668         // Store a camera handle for a clean-up
1669         activeCameras.push_back(pCam1);
1670 
1671         // Get the parameter list; this test will use the first command in both
1672         // lists.
1673         std::vector<CameraParam> cam0Cmds, cam1Cmds;
1674         pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
1675                 cam0Cmds.reserve(cmdList.size());
1676                 for (auto &&cmd : cmdList) {
1677                     cam0Cmds.push_back(cmd);
1678                 }
1679             }
1680         );
1681 
1682         pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
1683                 cam1Cmds.reserve(cmdList.size());
1684                 for (auto &&cmd : cmdList) {
1685                     cam1Cmds.push_back(cmd);
1686                 }
1687             }
1688         );
1689         if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1690             // Cannot execute this test.
1691             return;
1692         }
1693 
1694         // Set up a frame receiver object which will fire up its own thread.
1695         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
1696                                                           pDisplay,
1697                                                           FrameHandler::eAutoReturn);
1698         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
1699                                                           nullptr,
1700                                                           FrameHandler::eAutoReturn);
1701 
1702         // Activate the display
1703         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
1704 
1705         // Start the camera's video stream
1706         ASSERT_TRUE(frameHandler0->startStream());
1707         ASSERT_TRUE(frameHandler1->startStream());
1708 
1709         // Ensure the stream starts
1710         frameHandler0->waitForFrameCount(1);
1711         frameHandler1->waitForFrameCount(1);
1712 
1713         // Client 1 becomes a primary client and programs a parameter.
1714         EvsResult result = EvsResult::OK;
1715         // Get a valid parameter value range
1716         int32_t minVal, maxVal, step;
1717         pCam1->getIntParameterRange(
1718             cam1Cmds[0],
1719             [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1720                 minVal = val0;
1721                 maxVal = val1;
1722                 step   = val2;
1723             }
1724         );
1725 
1726         // Client1 becomes a primary client
1727         result = pCam1->setMaster();
1728         ASSERT_EQ(EvsResult::OK, result);
1729 
1730         std::vector<int32_t> values;
1731         EvsEventDesc aTargetEvent  = {};
1732         EvsEventDesc aNotification = {};
1733         bool listening = false;
1734         std::mutex eventLock;
1735         std::condition_variable eventCond;
1736         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1737             std::thread listener = std::thread(
1738                 [&frameHandler0, &aNotification, &listening, &eventCond] {
1739                     listening = true;
1740                     eventCond.notify_all();
1741 
1742                     EvsEventDesc aTargetEvent;
1743                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1744                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1745                     aTargetEvent.payload[1] = 0;
1746                     if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1747                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1748                     }
1749                 }
1750             );
1751 
1752             // Wait until a lister starts.
1753             std::unique_lock<std::mutex> lock(eventLock);
1754             auto timer = std::chrono::system_clock::now();
1755             while (!listening) {
1756                 eventCond.wait_until(lock, timer + 1s);
1757             }
1758             lock.unlock();
1759 
1760             // Try to turn off auto-focus
1761             pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1762                                [&result, &values](auto status, auto effectiveValues) {
1763                                    result = status;
1764                                    if (status == EvsResult::OK) {
1765                                       for (auto &&v : effectiveValues) {
1766                                           values.push_back(v);
1767                                       }
1768                                    }
1769                                });
1770             ASSERT_EQ(EvsResult::OK, result);
1771             for (auto &&v : values) {
1772                 ASSERT_EQ(v, 0);
1773             }
1774 
1775             // Join a listener
1776             if (listener.joinable()) {
1777                 listener.join();
1778             }
1779 
1780             // Make sure AUTO_FOCUS is off.
1781             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1782                       EvsEventType::PARAMETER_CHANGED);
1783         }
1784 
1785         // Try to program a parameter with a random value [minVal, maxVal] after
1786         // rounding it down.
1787         int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
1788         val0 = val0 - (val0 % step);
1789 
1790         std::thread listener = std::thread(
1791             [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1792                 listening = true;
1793                 eventCond.notify_all();
1794 
1795                 EvsEventDesc aTargetEvent;
1796                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1797                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1798                 aTargetEvent.payload[1] = val0;
1799                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1800                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1801                 }
1802             }
1803         );
1804 
1805         // Wait until a lister starts.
1806         listening = false;
1807         std::unique_lock<std::mutex> lock(eventLock);
1808         auto timer = std::chrono::system_clock::now();
1809         while (!listening) {
1810             eventCond.wait_until(lock, timer + 1s);
1811         }
1812         lock.unlock();
1813 
1814         values.clear();
1815         pCam1->setIntParameter(cam1Cmds[0], val0,
1816                             [&result, &values](auto status, auto effectiveValues) {
1817                                 result = status;
1818                                 if (status == EvsResult::OK) {
1819                                     for (auto &&v : effectiveValues) {
1820                                         values.push_back(v);
1821                                     }
1822                                 }
1823                             });
1824         ASSERT_EQ(EvsResult::OK, result);
1825         for (auto &&v : values) {
1826             ASSERT_EQ(val0, v);
1827         }
1828 
1829         // Join a listener
1830         if (listener.joinable()) {
1831             listener.join();
1832         }
1833 
1834         // Verify a change notification
1835         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1836                   EvsEventType::PARAMETER_CHANGED);
1837         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1838                   cam1Cmds[0]);
1839         for (auto &&v : values) {
1840             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1841         }
1842 
1843         listener = std::thread(
1844             [&frameHandler1, &aNotification, &listening, &eventCond] {
1845                 listening = true;
1846                 eventCond.notify_all();
1847 
1848                 EvsEventDesc aTargetEvent;
1849                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1850                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1851                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1852                 }
1853             }
1854         );
1855 
1856         // Wait until a lister starts.
1857         listening = false;
1858         lock.lock();
1859         timer = std::chrono::system_clock::now();
1860         while (!listening) {
1861             eventCond.wait_until(lock, timer + 1s);
1862         }
1863         lock.unlock();
1864 
1865         // Client 0 steals a primary client role
1866         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
1867 
1868         // Join a listener
1869         if (listener.joinable()) {
1870             listener.join();
1871         }
1872 
1873         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1874                   EvsEventType::MASTER_RELEASED);
1875 
1876         // Client 0 programs a parameter
1877         val0 = minVal + (std::rand() % (maxVal - minVal));
1878 
1879         // Rounding down
1880         val0 = val0 - (val0 % step);
1881 
1882         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1883             std::thread listener = std::thread(
1884                 [&frameHandler1, &aNotification, &listening, &eventCond] {
1885                     listening = true;
1886                     eventCond.notify_all();
1887 
1888                     EvsEventDesc aTargetEvent;
1889                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1890                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1891                     aTargetEvent.payload[1] = 0;
1892                     if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1893                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1894                     }
1895                 }
1896             );
1897 
1898             // Wait until a lister starts.
1899             std::unique_lock<std::mutex> lock(eventLock);
1900             auto timer = std::chrono::system_clock::now();
1901             while (!listening) {
1902                 eventCond.wait_until(lock, timer + 1s);
1903             }
1904             lock.unlock();
1905 
1906             // Try to turn off auto-focus
1907             values.clear();
1908             pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1909                                [&result, &values](auto status, auto effectiveValues) {
1910                                    result = status;
1911                                    if (status == EvsResult::OK) {
1912                                       for (auto &&v : effectiveValues) {
1913                                           values.push_back(v);
1914                                       }
1915                                    }
1916                                });
1917             ASSERT_EQ(EvsResult::OK, result);
1918             for (auto &&v : values) {
1919                 ASSERT_EQ(v, 0);
1920             }
1921 
1922             // Join a listener
1923             if (listener.joinable()) {
1924                 listener.join();
1925             }
1926 
1927             // Make sure AUTO_FOCUS is off.
1928             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1929                       EvsEventType::PARAMETER_CHANGED);
1930         }
1931 
1932         listener = std::thread(
1933             [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1934                 listening = true;
1935                 eventCond.notify_all();
1936 
1937                 EvsEventDesc aTargetEvent;
1938                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1939                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1940                 aTargetEvent.payload[1] = val0;
1941                 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1942                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1943                 }
1944             }
1945         );
1946 
1947         // Wait until a lister starts.
1948         listening = false;
1949         timer = std::chrono::system_clock::now();
1950         lock.lock();
1951         while (!listening) {
1952             eventCond.wait_until(lock, timer + 1s);
1953         }
1954         lock.unlock();
1955 
1956         values.clear();
1957         pCam0->setIntParameter(cam0Cmds[0], val0,
1958                             [&result, &values](auto status, auto effectiveValues) {
1959                                 result = status;
1960                                 if (status == EvsResult::OK) {
1961                                     for (auto &&v : effectiveValues) {
1962                                         values.push_back(v);
1963                                     }
1964                                 }
1965                             });
1966         ASSERT_EQ(EvsResult::OK, result);
1967 
1968         // Join a listener
1969         if (listener.joinable()) {
1970             listener.join();
1971         }
1972         // Verify a change notification
1973         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1974                   EvsEventType::PARAMETER_CHANGED);
1975         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1976                   cam0Cmds[0]);
1977         for (auto &&v : values) {
1978             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1979         }
1980 
1981         // Turn off the display (yes, before the stream stops -- it should be handled)
1982         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
1983 
1984         // Shut down the streamer
1985         frameHandler0->shutdown();
1986         frameHandler1->shutdown();
1987 
1988         // Explicitly release the camera
1989         pEnumerator->closeCamera(pCam0);
1990         pEnumerator->closeCamera(pCam1);
1991         activeCameras.clear();
1992 
1993         // Explicitly release the display
1994         pEnumerator->closeDisplay(pDisplay);
1995     }
1996 }
1997 
1998 
1999 /*
2000  * CameraUseStreamConfigToDisplay:
2001  * End to end test of data flowing from the camera to the display.  Similar to
2002  * CameraToDisplayRoundTrip test case but this case retrieves available stream
2003  * configurations from EVS and uses one of them to start a video stream.
2004  */
TEST_P(EvsHidlTest,CameraUseStreamConfigToDisplay)2005 TEST_P(EvsHidlTest, CameraUseStreamConfigToDisplay) {
2006     LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
2007 
2008     // Get the camera list
2009     loadCameraList();
2010 
2011     // Test each reported camera
2012     for (auto&& cam: cameraInfo) {
2013         bool isLogicalCam = false;
2014         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2015         if (isLogicalCam) {
2016             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
2017             continue;
2018         }
2019 
2020         // Request exclusive access to the EVS display
2021         sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
2022         ASSERT_NE(pDisplay, nullptr);
2023 
2024         // choose a configuration that has a frame rate faster than minReqFps.
2025         Stream targetCfg = {};
2026         const int32_t minReqFps = 15;
2027         int32_t maxArea = 0;
2028         camera_metadata_entry_t streamCfgs;
2029         bool foundCfg = false;
2030         if (!find_camera_metadata_entry(
2031                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2032                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2033                  &streamCfgs)) {
2034             // Stream configurations are found in metadata
2035             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2036             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2037                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
2038                     if (ptr->width * ptr->height > maxArea &&
2039                         ptr->framerate >= minReqFps) {
2040                         targetCfg.width = ptr->width;
2041                         targetCfg.height = ptr->height;
2042                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
2043 
2044                         maxArea = ptr->width * ptr->height;
2045                         foundCfg = true;
2046                     }
2047                 }
2048                 ++ptr;
2049             }
2050         }
2051 
2052         if (!foundCfg) {
2053             // Current EVS camera does not provide stream configurations in the
2054             // metadata.
2055             continue;
2056         }
2057 
2058         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2059         ASSERT_NE(pCam, nullptr);
2060 
2061         // Store a camera handle for a clean-up
2062         activeCameras.push_back(pCam);
2063 
2064         // Set up a frame receiver object which will fire up its own thread.
2065         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2066                                                          pDisplay,
2067                                                          FrameHandler::eAutoReturn);
2068 
2069 
2070         // Activate the display
2071         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
2072 
2073         // Start the camera's video stream
2074         bool startResult = frameHandler->startStream();
2075         ASSERT_TRUE(startResult);
2076 
2077         // Wait a while to let the data flow
2078         static const int kSecondsToWait = 5;
2079         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
2080                                  kMaxStreamStartMilliseconds;
2081         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
2082                                                kSecondsToMilliseconds;
2083         sleep(kSecondsToWait);
2084         unsigned framesReceived = 0;
2085         unsigned framesDisplayed = 0;
2086         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
2087         EXPECT_EQ(framesReceived, framesDisplayed);
2088         EXPECT_GE(framesDisplayed, minimumFramesExpected);
2089 
2090         // Turn off the display (yes, before the stream stops -- it should be handled)
2091         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
2092 
2093         // Shut down the streamer
2094         frameHandler->shutdown();
2095 
2096         // Explicitly release the camera
2097         pEnumerator->closeCamera(pCam);
2098         activeCameras.clear();
2099 
2100         // Explicitly release the display
2101         pEnumerator->closeDisplay(pDisplay);
2102     }
2103 }
2104 
2105 
2106 /*
2107  * MultiCameraStreamUseConfig:
2108  * Verify that each client can start and stop video streams on the same
2109  * underlying camera with same configuration.
2110  */
TEST_P(EvsHidlTest,MultiCameraStreamUseConfig)2111 TEST_P(EvsHidlTest, MultiCameraStreamUseConfig) {
2112     LOG(INFO) << "Starting MultiCameraStream test";
2113 
2114     if (mIsHwModule) {
2115         // This test is not for HW module implementation.
2116         return;
2117     }
2118 
2119     // Get the camera list
2120     loadCameraList();
2121 
2122     // Test each reported camera
2123     for (auto&& cam: cameraInfo) {
2124         // choose a configuration that has a frame rate faster than minReqFps.
2125         Stream targetCfg = {};
2126         const int32_t minReqFps = 15;
2127         int32_t maxArea = 0;
2128         camera_metadata_entry_t streamCfgs;
2129         bool foundCfg = false;
2130         if (!find_camera_metadata_entry(
2131                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2132                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2133                  &streamCfgs)) {
2134             // Stream configurations are found in metadata
2135             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2136             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2137                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
2138                     if (ptr->width * ptr->height > maxArea &&
2139                         ptr->framerate >= minReqFps) {
2140                         targetCfg.width = ptr->width;
2141                         targetCfg.height = ptr->height;
2142                         targetCfg.format = static_cast<PixelFormat>(ptr->format);
2143 
2144                         maxArea = ptr->width * ptr->height;
2145                         foundCfg = true;
2146                     }
2147                 }
2148                 ++ptr;
2149             }
2150         }
2151 
2152         if (!foundCfg) {
2153             LOG(INFO) << "Device " << cam.v1.cameraId
2154                       << " does not provide a list of supported stream configurations, skipped";
2155             continue;
2156         }
2157 
2158         // Create the first camera client with a selected stream configuration.
2159         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2160         ASSERT_NE(pCam0, nullptr);
2161 
2162         // Store a camera handle for a clean-up
2163         activeCameras.push_back(pCam0);
2164 
2165         // Try to create the second camera client with different stream
2166         // configuration.
2167         int32_t id = targetCfg.id;
2168         targetCfg.id += 1;  // EVS manager sees only the stream id.
2169         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2170         ASSERT_EQ(pCam1, nullptr);
2171 
2172         // Store a camera handle for a clean-up
2173         activeCameras.push_back(pCam0);
2174 
2175         // Try again with same stream configuration.
2176         targetCfg.id = id;
2177         pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2178         ASSERT_NE(pCam1, nullptr);
2179 
2180         // Set up per-client frame receiver objects which will fire up its own thread
2181         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
2182                                                           nullptr,
2183                                                           FrameHandler::eAutoReturn);
2184         ASSERT_NE(frameHandler0, nullptr);
2185 
2186         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
2187                                                           nullptr,
2188                                                           FrameHandler::eAutoReturn);
2189         ASSERT_NE(frameHandler1, nullptr);
2190 
2191         // Start the camera's video stream via client 0
2192         bool startResult = false;
2193         startResult = frameHandler0->startStream() &&
2194                       frameHandler1->startStream();
2195         ASSERT_TRUE(startResult);
2196 
2197         // Ensure the stream starts
2198         frameHandler0->waitForFrameCount(1);
2199         frameHandler1->waitForFrameCount(1);
2200 
2201         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
2202 
2203         // Wait a bit, then ensure both clients get at least the required minimum number of frames
2204         sleep(5);
2205         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
2206         unsigned framesReceived0 = 0, framesReceived1 = 0;
2207         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2208         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2209         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
2210         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
2211         nsecs_t runTime = end - firstFrame;
2212         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
2213         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
2214         LOG(INFO) << "Measured camera rate "
2215                   << std::scientific << framesPerSecond0 << " fps and "
2216                   << framesPerSecond1 << " fps";
2217         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
2218         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
2219 
2220         // Shutdown one client
2221         frameHandler0->shutdown();
2222 
2223         // Read frame counters again
2224         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2225         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2226 
2227         // Wait a bit again
2228         sleep(5);
2229         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
2230         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
2231         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
2232         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
2233         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
2234 
2235         // Shutdown another
2236         frameHandler1->shutdown();
2237 
2238         // Explicitly release the camera
2239         pEnumerator->closeCamera(pCam0);
2240         pEnumerator->closeCamera(pCam1);
2241         activeCameras.clear();
2242     }
2243 }
2244 
2245 
2246 /*
2247  * LogicalCameraMetadata:
2248  * Opens logical camera reported by the enumerator and validate its metadata by
2249  * checking its capability and locating supporting physical camera device
2250  * identifiers.
2251  */
TEST_P(EvsHidlTest,LogicalCameraMetadata)2252 TEST_P(EvsHidlTest, LogicalCameraMetadata) {
2253     LOG(INFO) << "Starting LogicalCameraMetadata test";
2254 
2255     // Get the camera list
2256     loadCameraList();
2257 
2258     // Open and close each camera twice
2259     for (auto&& cam: cameraInfo) {
2260         bool isLogicalCam = false;
2261         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2262         if (isLogicalCam) {
2263             ASSERT_GE(devices.size(), 1) <<
2264                 "Logical camera device must have at least one physical camera device ID in its metadata.";
2265         }
2266     }
2267 }
2268 
2269 
2270 /*
2271  * CameraStreamExternalBuffering:
2272  * This is same with CameraStreamBuffering except frame buffers are allocated by
2273  * the test client and then imported by EVS framework.
2274  */
TEST_P(EvsHidlTest,CameraStreamExternalBuffering)2275 TEST_P(EvsHidlTest, CameraStreamExternalBuffering) {
2276     LOG(INFO) << "Starting CameraStreamExternalBuffering test";
2277 
2278     // Arbitrary constant (should be > 1 and not too big)
2279     static const unsigned int kBuffersToHold = 3;
2280 
2281     // Get the camera list
2282     loadCameraList();
2283 
2284     // Acquire the graphics buffer allocator
2285     android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
2286     const auto usage =
2287             GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
2288 
2289     // Test each reported camera
2290     for (auto&& cam : cameraInfo) {
2291         // Read a target resolution from the metadata
2292         Stream targetCfg =
2293             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
2294         ASSERT_GT(targetCfg.width, 0);
2295         ASSERT_GT(targetCfg.height, 0);
2296 
2297         // Allocate buffers to use
2298         hidl_vec<BufferDesc> buffers;
2299         buffers.resize(kBuffersToHold);
2300         for (auto i = 0; i < kBuffersToHold; ++i) {
2301             unsigned pixelsPerLine;
2302             buffer_handle_t memHandle = nullptr;
2303             android::status_t result =
2304                     alloc.allocate(targetCfg.width, targetCfg.height,
2305                                    (android::PixelFormat)targetCfg.format,
2306                                    /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2307                                    /* graphicBufferId = */ 0,
2308                                    /* requestorName = */ "CameraStreamExternalBufferingTest");
2309             if (result != android::NO_ERROR) {
2310                 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2311                 // Release previous allocated buffers
2312                 for (auto j = 0; j < i; j++) {
2313                     alloc.free(buffers[i].buffer.nativeHandle);
2314                 }
2315                 return;
2316             } else {
2317                 BufferDesc buf;
2318                 AHardwareBuffer_Desc* pDesc =
2319                         reinterpret_cast<AHardwareBuffer_Desc*>(&buf.buffer.description);
2320                 pDesc->width = targetCfg.width;
2321                 pDesc->height = targetCfg.height;
2322                 pDesc->layers = 1;
2323                 pDesc->format = static_cast<uint32_t>(targetCfg.format);
2324                 pDesc->usage = usage;
2325                 pDesc->stride = pixelsPerLine;
2326                 buf.buffer.nativeHandle = memHandle;
2327                 buf.bufferId = i;  // Unique number to identify this buffer
2328                 buffers[i] = buf;
2329             }
2330         }
2331 
2332         bool isLogicalCam = false;
2333         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2334 
2335         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2336         ASSERT_NE(pCam, nullptr);
2337 
2338         // Store a camera handle for a clean-up
2339         activeCameras.push_back(pCam);
2340 
2341         // Request to import buffers
2342         EvsResult result = EvsResult::OK;
2343         int delta = 0;
2344         pCam->importExternalBuffers(buffers,
2345                                     [&] (auto _result, auto _delta) {
2346                                         result = _result;
2347                                         delta = _delta;
2348                                     });
2349         if (isLogicalCam) {
2350             EXPECT_EQ(result, EvsResult::UNDERLYING_SERVICE_ERROR);
2351             continue;
2352         }
2353 
2354         EXPECT_EQ(result, EvsResult::OK);
2355         EXPECT_GE(delta, kBuffersToHold);
2356 
2357         // Set up a frame receiver object which will fire up its own thread.
2358         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2359                                                          nullptr,
2360                                                          FrameHandler::eNoAutoReturn);
2361 
2362         // Start the camera's video stream
2363         bool startResult = frameHandler->startStream();
2364         ASSERT_TRUE(startResult);
2365 
2366         // Check that the video stream stalls once we've gotten exactly the number of buffers
2367         // we requested since we told the frameHandler not to return them.
2368         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
2369         unsigned framesReceived = 0;
2370         frameHandler->getFramesCounters(&framesReceived, nullptr);
2371         ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2372 
2373 
2374         // Give back one buffer
2375         bool didReturnBuffer = frameHandler->returnHeldBuffer();
2376         EXPECT_TRUE(didReturnBuffer);
2377 
2378         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2379         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2380         unsigned framesReceivedAfter = 0;
2381         usleep(110 * kMillisecondsToMicroseconds);
2382         frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2383         EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2384 
2385         // Even when the camera pointer goes out of scope, the FrameHandler object will
2386         // keep the stream alive unless we tell it to shutdown.
2387         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2388         // we have to break that cycle in order for either of them to get cleaned up.
2389         frameHandler->shutdown();
2390 
2391         // Explicitly release the camera
2392         pEnumerator->closeCamera(pCam);
2393         activeCameras.clear();
2394         // Release buffers
2395         for (auto& b : buffers) {
2396             alloc.free(b.buffer.nativeHandle);
2397         }
2398         buffers.resize(0);
2399     }
2400 }
2401 
2402 
2403 /*
2404  * UltrasonicsArrayOpenClean:
2405  * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2406  * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2407  * can be reopened.
2408  */
TEST_P(EvsHidlTest,UltrasonicsArrayOpenClean)2409 TEST_P(EvsHidlTest, UltrasonicsArrayOpenClean) {
2410     LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2411 
2412     // Get the ultrasonics array list
2413     loadUltrasonicsArrayList();
2414 
2415     // Open and close each ultrasonics array twice
2416     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2417         for (int pass = 0; pass < 2; pass++) {
2418             sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2419                     pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2420             ASSERT_NE(pUltrasonicsArray, nullptr);
2421 
2422             // Verify that this ultrasonics array self-identifies correctly
2423             pUltrasonicsArray->getUltrasonicArrayInfo([&ultraInfo](UltrasonicsArrayDesc desc) {
2424                 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2425                 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2426             });
2427 
2428             // Explicitly close the ultrasonics array so resources are released right away
2429             pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2430         }
2431     }
2432 }
2433 
2434 
2435 // Starts a stream and verifies all data received is valid.
TEST_P(EvsHidlTest,UltrasonicsVerifyStreamData)2436 TEST_P(EvsHidlTest, UltrasonicsVerifyStreamData) {
2437     LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2438 
2439     // Get the ultrasonics array list
2440     loadUltrasonicsArrayList();
2441 
2442     // For each ultrasonics array.
2443     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2444         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2445 
2446         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2447                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2448         ASSERT_NE(pUltrasonicsArray, nullptr);
2449 
2450         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2451 
2452         // Start stream.
2453         EvsResult result = pUltrasonicsArray->startStream(frameHandler);
2454         ASSERT_EQ(result, EvsResult::OK);
2455 
2456         // Wait 5 seconds to receive frames.
2457         sleep(5);
2458 
2459         // Stop stream.
2460         pUltrasonicsArray->stopStream();
2461 
2462         EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2463         EXPECT_TRUE(frameHandler->areAllFramesValid());
2464 
2465         // Explicitly close the ultrasonics array so resources are released right away
2466         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2467     }
2468 }
2469 
2470 
2471 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsHidlTest,UltrasonicsSetFramesInFlight)2472 TEST_P(EvsHidlTest, UltrasonicsSetFramesInFlight) {
2473     LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2474 
2475     // Get the ultrasonics array list
2476     loadUltrasonicsArrayList();
2477 
2478     // For each ultrasonics array.
2479     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2480         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2481 
2482         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2483                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2484         ASSERT_NE(pUltrasonicsArray, nullptr);
2485 
2486         EvsResult result = pUltrasonicsArray->setMaxFramesInFlight(10);
2487         EXPECT_EQ(result, EvsResult::OK);
2488 
2489         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2490 
2491         // Start stream.
2492         result = pUltrasonicsArray->startStream(frameHandler);
2493         ASSERT_EQ(result, EvsResult::OK);
2494 
2495         result = pUltrasonicsArray->setMaxFramesInFlight(5);
2496         EXPECT_EQ(result, EvsResult::OK);
2497 
2498         // Stop stream.
2499         pUltrasonicsArray->stopStream();
2500 
2501         // Explicitly close the ultrasonics array so resources are released right away
2502         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2503     }
2504 }
2505 
2506 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsHidlTest);
2507 INSTANTIATE_TEST_SUITE_P(
2508     PerInstance,
2509     EvsHidlTest,
2510     testing::ValuesIn(android::hardware::getAllHalInstanceNames(IEvsEnumerator::descriptor)),
2511     android::hardware::PrintInstanceNameToString);
2512 
2513