1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VirtualCamera.h"
18 #include "HalCamera.h"
19 #include "Enumerator.h"
20
21 #include <android/hardware_buffer.h>
22 #include <android-base/file.h>
23 #include <android-base/logging.h>
24 #include <android-base/stringprintf.h>
25
26 using ::android::base::StringAppendF;
27 using ::android::base::StringPrintf;
28 using ::android::base::WriteStringToFd;
29 using ::android::hardware::automotive::evs::V1_0::DisplayState;
30
31
32 namespace android {
33 namespace automotive {
34 namespace evs {
35 namespace V1_1 {
36 namespace implementation {
37
38
VirtualCamera(const std::vector<sp<HalCamera>> & halCameras)39 VirtualCamera::VirtualCamera(const std::vector<sp<HalCamera>>& halCameras) :
40 mStreamState(STOPPED) {
41 for (auto&& cam : halCameras) {
42 mHalCamera.try_emplace(cam->getId(), cam);
43 }
44 }
45
46
~VirtualCamera()47 VirtualCamera::~VirtualCamera() {
48 shutdown();
49 }
50
51
shutdown()52 void VirtualCamera::shutdown() {
53 // In normal operation, the stream should already be stopped by the time we get here
54 if (mStreamState == RUNNING) {
55 // Note that if we hit this case, no terminating frame will be sent to the client,
56 // but they're probably already dead anyway.
57 LOG(WARNING) << "Virtual camera being shutdown while stream is running";
58
59 // Tell the frame delivery pipeline we don't want any more frames
60 mStreamState = STOPPING;
61
62 for (auto&& [key, hwCamera] : mHalCamera) {
63 auto pHwCamera = hwCamera.promote();
64 if (pHwCamera == nullptr) {
65 LOG(WARNING) << "Camera device " << key << " is not alive.";
66 continue;
67 }
68
69 if (mFramesHeld[key].size() > 0) {
70 LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
71
72 // Return to the underlying hardware camera any buffers the client was holding
73 for (auto&& heldBuffer : mFramesHeld[key]) {
74 // Tell our parent that we're done with this buffer
75 pHwCamera->doneWithFrame(heldBuffer);
76 }
77 mFramesHeld[key].clear();
78 }
79
80 // Retire from a master client
81 pHwCamera->unsetMaster(this);
82
83 // Give the underlying hardware camera the heads up that it might be time to stop
84 pHwCamera->clientStreamEnding(this);
85 }
86
87 // Join a capture thread
88 if (mCaptureThread.joinable()) {
89 mCaptureThread.join();
90 }
91
92 mFramesHeld.clear();
93
94 // Drop our reference to our associated hardware camera
95 mHalCamera.clear();
96 }
97 }
98
99
getHalCameras()100 std::vector<sp<HalCamera>> VirtualCamera::getHalCameras() {
101 std::vector<sp<HalCamera>> cameras;
102 for (auto&& [key, cam] : mHalCamera) {
103 auto ptr = cam.promote();
104 if (ptr != nullptr) {
105 cameras.emplace_back(ptr);
106 }
107 }
108
109 return cameras;
110 }
111
112
deliverFrame(const BufferDesc_1_1 & bufDesc)113 bool VirtualCamera::deliverFrame(const BufferDesc_1_1& bufDesc) {
114 if (mStreamState == STOPPED) {
115 // A stopped stream gets no frames
116 LOG(ERROR) << "A stopped stream should not get any frames";
117 return false;
118 } else if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
119 // Indicate that we declined to send the frame to the client because they're at quota
120 LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
121 << " of " << mFramesAllowed;
122
123 if (mStream_1_1 != nullptr) {
124 // Report a frame drop to v1.1 client.
125 EvsEventDesc event;
126 event.deviceId = bufDesc.deviceId;
127 event.aType = EvsEventType::FRAME_DROPPED;
128 auto result = mStream_1_1->notify(event);
129 if (!result.isOk()) {
130 LOG(ERROR) << "Error delivering end of stream event";
131 }
132 }
133
134 return false;
135 } else {
136 // Keep a record of this frame so we can clean up if we have to in case of client death
137 mFramesHeld[bufDesc.deviceId].emplace_back(bufDesc);
138
139 // v1.0 client uses an old frame-delivery mechanism.
140 if (mStream_1_1 == nullptr) {
141 // Forward a frame to v1.0 client
142 BufferDesc_1_0 frame_1_0 = {};
143 const AHardwareBuffer_Desc* pDesc =
144 reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
145 frame_1_0.width = pDesc->width;
146 frame_1_0.height = pDesc->height;
147 frame_1_0.format = pDesc->format;
148 frame_1_0.usage = pDesc->usage;
149 frame_1_0.stride = pDesc->stride;
150 frame_1_0.memHandle = bufDesc.buffer.nativeHandle;
151 frame_1_0.pixelSize = bufDesc.pixelSize;
152 frame_1_0.bufferId = bufDesc.bufferId;
153
154 mStream->deliverFrame(frame_1_0);
155 } else if (!mCaptureThread.joinable()) {
156 // A capture thread does not run only it failed to create a
157 // timeline.
158 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
159 // Pass this buffer through to our client
160 hardware::hidl_vec<BufferDesc_1_1> frames;
161 frames.resize(1);
162 auto pHwCamera = mHalCamera.begin()->second.promote();
163 if (pHwCamera != nullptr) {
164 frames[0] = mFramesHeld[mHalCamera.begin()->first].back();
165 }
166
167 mStream_1_1->deliverFrame_1_1(frames);
168 }
169 }
170
171 return true;
172 }
173 }
174
175
notify(const EvsEventDesc & event)176 bool VirtualCamera::notify(const EvsEventDesc& event) {
177 switch(event.aType) {
178 case EvsEventType::STREAM_STOPPED:
179 if (mStreamState != STOPPING) {
180 // Warn if we got an unexpected stream termination
181 LOG(WARNING) << "Stream unexpectedly stopped, current status "
182 << mStreamState;
183 }
184
185 // Mark the stream as stopped.
186 mStreamState = STOPPED;
187
188 if (mStream_1_1 == nullptr) {
189 // Send a null frame instead, for v1.0 client
190 auto result = mStream->deliverFrame({});
191 if (!result.isOk()) {
192 LOG(ERROR) << "Error delivering end of stream marker";
193 }
194 }
195 break;
196
197 // v1.0 client will ignore all other events.
198 case EvsEventType::PARAMETER_CHANGED:
199 LOG(DEBUG) << "A camera parameter " << event.payload[0]
200 << " is set to " << event.payload[1];
201 break;
202
203 case EvsEventType::MASTER_RELEASED:
204 LOG(DEBUG) << "The master client has been released";
205 break;
206
207 default:
208 LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
209 break;
210 }
211
212 if (mStream_1_1 != nullptr) {
213 // Forward a received event to the v1.1 client
214 auto result = mStream_1_1->notify(event);
215 if (!result.isOk()) {
216 LOG(ERROR) << "Failed to forward an event";
217 return false;
218 }
219 }
220
221 return true;
222 }
223
224
225 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb info_cb)226 Return<void> VirtualCamera::getCameraInfo(getCameraInfo_cb info_cb) {
227 // Straight pass through to hardware layer
228 if (mHalCamera.size() > 1) {
229 LOG(ERROR) << __FUNCTION__
230 << " must NOT be called on a logical camera object.";
231 info_cb({});
232 return Void();
233 }
234
235 auto halCamera = mHalCamera.begin()->second.promote();
236 if (halCamera != nullptr) {
237 return halCamera->getHwCamera()->getCameraInfo(info_cb);
238 } else {
239 info_cb({});
240 return Void();
241 }
242 }
243
244
setMaxFramesInFlight(uint32_t bufferCount)245 Return<EvsResult> VirtualCamera::setMaxFramesInFlight(uint32_t bufferCount) {
246 // How many buffers are we trying to add (or remove if negative)
247 int bufferCountChange = bufferCount - mFramesAllowed;
248
249 // Ask our parent for more buffers
250 bool result = true;
251 std::vector<sp<HalCamera>> changedCameras;
252 for (auto&& [key, hwCamera] : mHalCamera) {
253 auto pHwCam = hwCamera.promote();
254 if (pHwCam == nullptr) {
255 continue;
256 }
257
258 result = pHwCam->changeFramesInFlight(bufferCountChange);
259 if (!result) {
260 LOG(ERROR) << key
261 << ": Failed to change buffer count by " << bufferCountChange
262 << " to " << bufferCount;
263 break;
264 }
265
266 changedCameras.emplace_back(pHwCam);
267 }
268
269 // Update our notion of how many frames we're allowed
270 mFramesAllowed = bufferCount;
271
272 if (!result) {
273 // Rollback changes because we failed to update all cameras
274 for (auto&& hwCamera : changedCameras) {
275 LOG(WARNING) << "Rollback a change on " << hwCamera->getId();
276 hwCamera->changeFramesInFlight(-bufferCountChange);
277 }
278
279 // Restore the original buffer count
280 mFramesAllowed -= bufferCountChange;
281 return EvsResult::BUFFER_NOT_AVAILABLE;
282 } else {
283 return EvsResult::OK;
284 }
285 }
286
287
startVideoStream(const::android::sp<IEvsCameraStream_1_0> & stream)288 Return<EvsResult> VirtualCamera::startVideoStream(const ::android::sp<IEvsCameraStream_1_0>& stream) {
289 // We only support a single stream at a time
290 if (mStreamState != STOPPED) {
291 LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
292 return EvsResult::STREAM_ALREADY_RUNNING;
293 }
294
295 // Validate our held frame count is starting out at zero as we expect
296 assert(mFramesHeld.size() == 0);
297
298 // Record the user's callback for use when we have a frame ready
299 mStream = stream;
300 mStream_1_1 = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
301 if (mStream_1_1 == nullptr) {
302 LOG(INFO) << "Start video stream for v1.0 client.";
303 } else {
304 LOG(INFO) << "Start video stream for v1.1 client.";
305 }
306
307 mStreamState = RUNNING;
308
309 // Tell the underlying camera hardware that we want to stream
310 auto iter = mHalCamera.begin();
311 while (iter != mHalCamera.end()) {
312 auto pHwCamera = iter->second.promote();
313 if (pHwCamera == nullptr) {
314 LOG(ERROR) << "Failed to start a video stream on " << iter->first;
315 continue;
316 }
317
318 LOG(INFO) << __FUNCTION__
319 << " starts a video stream on " << iter->first;
320 Return<EvsResult> result = pHwCamera->clientStreamStarting();
321 if ((!result.isOk()) || (result != EvsResult::OK)) {
322 // If we failed to start the underlying stream, then we're not actually running
323 mStream = mStream_1_1 = nullptr;
324 mStreamState = STOPPED;
325
326 // Request to stop streams started by this client.
327 auto rb = mHalCamera.begin();
328 while (rb != iter) {
329 auto ptr = rb->second.promote();
330 if (ptr != nullptr) {
331 ptr->clientStreamEnding(this);
332 }
333 ++rb;
334 }
335 return EvsResult::UNDERLYING_SERVICE_ERROR;
336 }
337 ++iter;
338 }
339
340 // Start a thread that waits on the fence and forwards collected frames
341 // to the v1.1 client.
342 // If the system does not support a sw sync, EVS does not support a logical
343 // camera device and, therefore, VirtualCamera will subscribe only to a
344 // single hw camera.
345 auto pHwCamera = mHalCamera.begin()->second.promote();
346 if (mStream_1_1 != nullptr && pHwCamera != nullptr && pHwCamera->isSyncSupported()) {
347 mCaptureThread = std::thread([this]() {
348 // TODO(b/145466570): With a proper camera hang handler, we may want
349 // to reduce an amount of timeout.
350 constexpr int kFrameTimeoutMs = 5000; // timeout in ms.
351 int64_t lastFrameTimestamp = -1;
352 while (mStreamState == RUNNING) {
353 UniqueFence fence;
354 unsigned count = 0;
355 for (auto&& [key, hwCamera] : mHalCamera) {
356 auto pHwCamera = hwCamera.promote();
357 if (pHwCamera == nullptr) {
358 LOG(WARNING) << "Invalid camera " << key << " is ignored.";
359 continue;
360 }
361
362 UniqueFence another = pHwCamera->requestNewFrame(this, lastFrameTimestamp);
363 if (!another) {
364 LOG(WARNING) << key << " returned an invalid fence.";
365 continue;
366 }
367
368 fence = UniqueFence::Merge("MergedFrameFence",
369 fence,
370 another);
371 ++count;
372 }
373
374 if (fence.Wait(kFrameTimeoutMs) < 0) {
375 // TODO(b/145466570): Replace this temporarily camera hang
376 // handler.
377 PLOG(ERROR) << this << ": Camera hangs?";
378 break;
379 } else if (mStreamState == RUNNING) {
380 // Fetch frames and forward to the client
381 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
382 // Pass this buffer through to our client
383 hardware::hidl_vec<BufferDesc_1_1> frames;
384 frames.resize(count);
385 unsigned i = 0;
386 for (auto&& [key, hwCamera] : mHalCamera) {
387 auto pHwCamera = hwCamera.promote();
388 if (pHwCamera == nullptr) {
389 continue;
390 }
391
392 const auto frame = mFramesHeld[key].back();
393 if (frame.timestamp > lastFrameTimestamp) {
394 lastFrameTimestamp = frame.timestamp;
395 }
396 frames[i++] = frame;
397 }
398 mStream_1_1->deliverFrame_1_1(frames);
399 }
400 }
401 }
402 });
403 }
404
405 // TODO(changyeon):
406 // Detect and exit if we encounter a stalled stream or unresponsive driver?
407 // Consider using a timer and watching for frame arrival?
408
409 return EvsResult::OK;
410 }
411
412
doneWithFrame(const BufferDesc_1_0 & buffer)413 Return<void> VirtualCamera::doneWithFrame(const BufferDesc_1_0& buffer) {
414 if (buffer.memHandle == nullptr) {
415 LOG(ERROR) << "Ignoring doneWithFrame called with invalid handle";
416 } else if (mFramesHeld.size() > 1) {
417 LOG(ERROR) << __FUNCTION__
418 << " must NOT be called on a logical camera object.";
419 } else {
420 // Find this buffer in our "held" list
421 auto& frameQueue = mFramesHeld.begin()->second;
422 auto it = frameQueue.begin();
423 while (it != frameQueue.end()) {
424 if (it->bufferId == buffer.bufferId) {
425 // found it!
426 break;
427 }
428 ++it;
429 }
430 if (it == frameQueue.end()) {
431 // We should always find the frame in our "held" list
432 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
433 << buffer.bufferId;
434 } else {
435 // Take this frame out of our "held" list
436 frameQueue.erase(it);
437
438 // Tell our parent that we're done with this buffer
439 auto pHwCamera = mHalCamera.begin()->second.promote();
440 if (pHwCamera != nullptr) {
441 pHwCamera->doneWithFrame(buffer);
442 } else {
443 LOG(WARNING) << "Possible memory leak because a device "
444 << mHalCamera.begin()->first
445 << " is not valid.";
446 }
447 }
448 }
449
450 return Void();
451 }
452
453
stopVideoStream()454 Return<void> VirtualCamera::stopVideoStream() {
455 if (mStreamState == RUNNING) {
456 // Tell the frame delivery pipeline we don't want any more frames
457 mStreamState = STOPPING;
458
459 // Deliver an empty frame to close out the frame stream
460 if (mStream_1_1 != nullptr) {
461 // v1.1 client waits for a stream stopped event
462 EvsEventDesc event;
463 event.aType = EvsEventType::STREAM_STOPPED;
464 auto result = mStream_1_1->notify(event);
465 if (!result.isOk()) {
466 LOG(ERROR) << "Error delivering end of stream event";
467 }
468 } else {
469 // v1.0 client expects a null frame at the end of the stream
470 auto result = mStream->deliverFrame({});
471 if (!result.isOk()) {
472 LOG(ERROR) << "Error delivering end of stream marker";
473 }
474 }
475
476 // Since we are single threaded, no frame can be delivered while this function is running,
477 // so we can go directly to the STOPPED state here on the server.
478 // Note, however, that there still might be frames already queued that client will see
479 // after returning from the client side of this call.
480 mStreamState = STOPPED;
481
482 // Give the underlying hardware camera the heads up that it might be time to stop
483 for (auto&& [key, hwCamera] : mHalCamera) {
484 auto pHwCamera = hwCamera.promote();
485 if (pHwCamera != nullptr) {
486 pHwCamera->clientStreamEnding(this);
487 }
488 }
489
490 // Join a thread
491 if (mCaptureThread.joinable()) {
492 mCaptureThread.join();
493 }
494
495 }
496
497 return Void();
498 }
499
500
getExtendedInfo(uint32_t opaqueIdentifier)501 Return<int32_t> VirtualCamera::getExtendedInfo(uint32_t opaqueIdentifier) {
502 if (mHalCamera.size() > 1) {
503 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
504 return 0;
505 }
506
507 // Pass straight through to the hardware device
508 auto pHwCamera = mHalCamera.begin()->second.promote();
509 if (pHwCamera != nullptr) {
510 return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier);
511 } else {
512 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
513 return 0;
514 }
515 }
516
517
setExtendedInfo(uint32_t opaqueIdentifier,int32_t opaqueValue)518 Return<EvsResult> VirtualCamera::setExtendedInfo(uint32_t opaqueIdentifier, int32_t opaqueValue) {
519 if (mHalCamera.size() > 1) {
520 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
521 return EvsResult::INVALID_ARG;
522 }
523
524 // Pass straight through to the hardware device
525 auto pHwCamera = mHalCamera.begin()->second.promote();
526 if (pHwCamera != nullptr) {
527 return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
528 } else {
529 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
530 return EvsResult::INVALID_ARG;
531 }
532 }
533
534
535 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb)536 Return<void> VirtualCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb) {
537 if (mHalCamera.size() > 1) {
538 // Logical camera description is stored in VirtualCamera object.
539 info_cb(*mDesc);
540 return Void();
541 }
542
543 // Straight pass through to hardware layer
544 auto pHwCamera = mHalCamera.begin()->second.promote();
545 if (pHwCamera == nullptr) {
546 // Return an empty list
547 info_cb({});
548 return Void();
549 }
550
551 auto hwCamera_1_1 =
552 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
553 if (hwCamera_1_1 != nullptr) {
554 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
555 } else {
556 // Return an empty list
557 info_cb({});
558 return Void();
559 }
560 }
561
562
getPhysicalCameraInfo(const hidl_string & deviceId,getPhysicalCameraInfo_cb info_cb)563 Return<void> VirtualCamera::getPhysicalCameraInfo(const hidl_string& deviceId,
564 getPhysicalCameraInfo_cb info_cb) {
565 auto device = mHalCamera.find(deviceId);
566 if (device != mHalCamera.end()) {
567 // Straight pass through to hardware layer
568 auto pHwCamera = device->second.promote();
569 if (pHwCamera != nullptr) {
570 auto hwCamera_1_1 =
571 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
572 if (hwCamera_1_1 != nullptr) {
573 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
574 } else {
575 LOG(WARNING) << "Failed to promote HW camera to v1.1.";
576 }
577 } else {
578 LOG(WARNING) << "Camera device " << deviceId << " is not alive.";
579 }
580 } else {
581 LOG(WARNING) << " Requested device " << deviceId
582 << " does not back this device.";
583 }
584
585 // Return an empty list
586 info_cb({});
587 return Void();
588 }
589
590
doneWithFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1> & buffers)591 Return<EvsResult> VirtualCamera::doneWithFrame_1_1(
592 const hardware::hidl_vec<BufferDesc_1_1>& buffers) {
593
594 for (auto&& buffer : buffers) {
595 if (buffer.buffer.nativeHandle == nullptr) {
596 LOG(WARNING) << "Ignoring doneWithFrame called with invalid handle";
597 } else {
598 // Find this buffer in our "held" list
599 auto it = mFramesHeld[buffer.deviceId].begin();
600 while (it != mFramesHeld[buffer.deviceId].end()) {
601 if (it->bufferId == buffer.bufferId) {
602 // found it!
603 break;
604 }
605 ++it;
606 }
607 if (it == mFramesHeld[buffer.deviceId].end()) {
608 // We should always find the frame in our "held" list
609 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
610 << buffer.bufferId;
611 } else {
612 // Take this frame out of our "held" list
613 mFramesHeld[buffer.deviceId].erase(it);
614
615 // Tell our parent that we're done with this buffer
616 auto pHwCamera = mHalCamera[buffer.deviceId].promote();
617 if (pHwCamera != nullptr) {
618 pHwCamera->doneWithFrame(buffer);
619 } else {
620 LOG(WARNING) << "Possible memory leak; "
621 << buffer.deviceId << " is not valid.";
622 }
623 }
624 }
625 }
626
627 return EvsResult::OK;
628 }
629
630
setMaster()631 Return<EvsResult> VirtualCamera::setMaster() {
632 if (mHalCamera.size() > 1) {
633 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
634 return EvsResult::INVALID_ARG;
635 }
636
637 auto pHwCamera = mHalCamera.begin()->second.promote();
638 if (pHwCamera != nullptr) {
639 return pHwCamera->setMaster(this);
640 } else {
641 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
642 return EvsResult::INVALID_ARG;
643 }
644 }
645
646
forceMaster(const sp<IEvsDisplay_1_0> & display)647 Return<EvsResult> VirtualCamera::forceMaster(const sp<IEvsDisplay_1_0>& display) {
648 if (mHalCamera.size() > 1) {
649 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
650 return EvsResult::INVALID_ARG;
651 }
652
653 if (display.get() == nullptr) {
654 LOG(ERROR) << __FUNCTION__
655 << ": Passed display is invalid";
656 return EvsResult::INVALID_ARG;
657 }
658
659 DisplayState state = display->getDisplayState();
660 if (state == DisplayState::NOT_OPEN ||
661 state == DisplayState::DEAD ||
662 state >= DisplayState::NUM_STATES) {
663 LOG(ERROR) << __FUNCTION__
664 << ": Passed display is in invalid state";
665 return EvsResult::INVALID_ARG;
666 }
667
668 auto pHwCamera = mHalCamera.begin()->second.promote();
669 if (pHwCamera != nullptr) {
670 return pHwCamera->forceMaster(this);
671 } else {
672 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
673 return EvsResult::INVALID_ARG;
674 }
675 }
676
677
unsetMaster()678 Return<EvsResult> VirtualCamera::unsetMaster() {
679 if (mHalCamera.size() > 1) {
680 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
681 return EvsResult::INVALID_ARG;
682 }
683
684 auto pHwCamera = mHalCamera.begin()->second.promote();
685 if (pHwCamera != nullptr) {
686 return pHwCamera->unsetMaster(this);
687 } else {
688 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
689 return EvsResult::INVALID_ARG;
690 }
691 }
692
693
getParameterList(getParameterList_cb _hidl_cb)694 Return<void> VirtualCamera::getParameterList(getParameterList_cb _hidl_cb) {
695 if (mHalCamera.size() > 1) {
696 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
697
698 // Return an empty list
699 _hidl_cb({});
700 return Void();
701 }
702
703 // Straight pass through to hardware layer
704 auto pHwCamera = mHalCamera.begin()->second.promote();
705 if (pHwCamera == nullptr) {
706 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
707
708 // Return an empty list
709 _hidl_cb({});
710 return Void();
711 }
712
713 auto hwCamera_1_1 =
714 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
715 if (hwCamera_1_1 != nullptr) {
716 return hwCamera_1_1->getParameterList(_hidl_cb);
717 } else {
718 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
719 << " does not support a parameter programming.";
720
721 // Return an empty list
722 _hidl_cb({});
723 return Void();
724 }
725 }
726
727
getIntParameterRange(CameraParam id,getIntParameterRange_cb _hidl_cb)728 Return<void> VirtualCamera::getIntParameterRange(CameraParam id,
729 getIntParameterRange_cb _hidl_cb) {
730 if (mHalCamera.size() > 1) {
731 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
732
733 // Return [0, 0, 0]
734 _hidl_cb(0, 0, 0);
735 return Void();
736 }
737
738 // Straight pass through to hardware layer
739 auto pHwCamera = mHalCamera.begin()->second.promote();
740 if (pHwCamera == nullptr) {
741 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
742
743 // Return [0, 0, 0]
744 _hidl_cb(0, 0, 0);
745 return Void();
746 }
747
748 auto hwCamera_1_1 =
749 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
750 if (hwCamera_1_1 != nullptr) {
751 return hwCamera_1_1->getIntParameterRange(id, _hidl_cb);
752 } else {
753 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
754 << " does not support a parameter programming.";
755
756 // Return [0, 0, 0]
757 _hidl_cb(0, 0, 0);
758 return Void();
759 }
760 return Void();
761 }
762
763
setIntParameter(CameraParam id,int32_t value,setIntParameter_cb _hidl_cb)764 Return<void> VirtualCamera::setIntParameter(CameraParam id,
765 int32_t value,
766 setIntParameter_cb _hidl_cb) {
767 hardware::hidl_vec<int32_t> values;
768 EvsResult status = EvsResult::INVALID_ARG;
769 if (mHalCamera.size() > 1) {
770 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
771 _hidl_cb(status, values);
772 return Void();
773 }
774
775 auto pHwCamera = mHalCamera.begin()->second.promote();
776 if (pHwCamera == nullptr) {
777 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
778 _hidl_cb(status, values);
779 return Void();
780 }
781
782 status = pHwCamera->setParameter(this, id, value);
783
784 values.resize(1);
785 values[0] = value;
786 _hidl_cb(status, values);
787
788 return Void();
789 }
790
791
getIntParameter(CameraParam id,getIntParameter_cb _hidl_cb)792 Return<void> VirtualCamera::getIntParameter(CameraParam id,
793 getIntParameter_cb _hidl_cb) {
794 hardware::hidl_vec<int32_t> values;
795 EvsResult status = EvsResult::INVALID_ARG;
796 if (mHalCamera.size() > 1) {
797 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
798 _hidl_cb(status, values);
799 return Void();
800 }
801
802 auto pHwCamera = mHalCamera.begin()->second.promote();
803 if (pHwCamera == nullptr) {
804 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
805 _hidl_cb(status, values);
806 return Void();
807 }
808
809 int32_t value;
810 status = pHwCamera->getParameter(id, value);
811
812 values.resize(1);
813 values[0] = value;
814 _hidl_cb(status, values);
815
816 return Void();
817 }
818
819
setExtendedInfo_1_1(uint32_t opaqueIdentifier,const hidl_vec<uint8_t> & opaqueValue)820 Return<EvsResult> VirtualCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier,
821 const hidl_vec<uint8_t>& opaqueValue) {
822 hardware::hidl_vec<int32_t> values;
823 if (mHalCamera.size() > 1) {
824 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
825 return EvsResult::INVALID_ARG;
826 }
827
828 auto pHwCamera = mHalCamera.begin()->second.promote();
829 if (pHwCamera == nullptr) {
830 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
831 return EvsResult::INVALID_ARG;
832 } else {
833 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
834 if (hwCamera != nullptr) {
835 return hwCamera->setExtendedInfo_1_1(opaqueIdentifier, opaqueValue);
836 } else {
837 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
838 return EvsResult::INVALID_ARG;
839 }
840 }
841 }
842
843
getExtendedInfo_1_1(uint32_t opaqueIdentifier,getExtendedInfo_1_1_cb _hidl_cb)844 Return<void> VirtualCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier,
845 getExtendedInfo_1_1_cb _hidl_cb) {
846 hardware::hidl_vec<uint8_t> values;
847 EvsResult status = EvsResult::INVALID_ARG;
848 if (mHalCamera.size() > 1) {
849 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
850 _hidl_cb(status, values);
851 return Void();
852 }
853
854 auto pHwCamera = mHalCamera.begin()->second.promote();
855 if (pHwCamera == nullptr) {
856 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
857 _hidl_cb(status, values);
858 } else {
859 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
860 if (hwCamera != nullptr) {
861 hwCamera->getExtendedInfo_1_1(opaqueIdentifier, _hidl_cb);
862 } else {
863 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
864 _hidl_cb(status, values);
865 }
866 }
867
868 return Void();
869 }
870
871
872 Return<void>
importExternalBuffers(const hidl_vec<BufferDesc_1_1> & buffers,importExternalBuffers_cb _hidl_cb)873 VirtualCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers,
874 importExternalBuffers_cb _hidl_cb) {
875 if (mHalCamera.size() > 1) {
876 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
877 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
878 return {};
879 }
880
881 auto pHwCamera = mHalCamera.begin()->second.promote();
882 if (pHwCamera == nullptr) {
883 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
884 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
885 return {};
886 }
887
888 int delta = 0;
889 if (!pHwCamera->changeFramesInFlight(buffers, &delta)) {
890 LOG(ERROR) << "Failed to add extenral capture buffers.";
891 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
892 return {};
893 }
894
895 mFramesAllowed += delta;
896 _hidl_cb(EvsResult::OK, delta);
897 return {};
898 }
899
900
toString(const char * indent) const901 std::string VirtualCamera::toString(const char* indent) const {
902 std::string buffer;
903 StringAppendF(&buffer, "%sLogical camera device: %s\n"
904 "%sFramesAllowed: %u\n"
905 "%sFrames in use:\n",
906 indent, mHalCamera.size() > 1 ? "T" : "F",
907 indent, mFramesAllowed,
908 indent);
909
910 std::string next_indent(indent);
911 next_indent += "\t";
912 for (auto&& [id, queue] : mFramesHeld) {
913 StringAppendF(&buffer, "%s%s: %d\n",
914 next_indent.c_str(),
915 id.c_str(),
916 static_cast<int>(queue.size()));
917 }
918 StringAppendF(&buffer, "%sCurrent stream state: %d\n",
919 indent, mStreamState);
920
921 return buffer;
922 }
923
924
925 } // namespace implementation
926 } // namespace V1_1
927 } // namespace evs
928 } // namespace automotive
929 } // namespace android
930