1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VirtualCamera.h"
18
19 #include "Enumerator.h"
20 #include "HalCamera.h"
21 #include "ScopedTrace.h"
22 #include "utils/include/Utils.h"
23
24 #include <android-base/file.h>
25 #include <android-base/logging.h>
26 #include <android-base/stringprintf.h>
27 #include <android/hardware_buffer.h>
28
29 #include <assert.h>
30
31 #include <chrono>
32
33 namespace aidl::android::automotive::evs::implementation {
34
35 using ::aidl::android::hardware::automotive::evs::BufferDesc;
36 using ::aidl::android::hardware::automotive::evs::CameraDesc;
37 using ::aidl::android::hardware::automotive::evs::CameraParam;
38 using ::aidl::android::hardware::automotive::evs::DisplayState;
39 using ::aidl::android::hardware::automotive::evs::EvsEventDesc;
40 using ::aidl::android::hardware::automotive::evs::EvsEventType;
41 using ::aidl::android::hardware::automotive::evs::EvsResult;
42 using ::aidl::android::hardware::automotive::evs::IEvsCameraStream;
43 using ::aidl::android::hardware::automotive::evs::IEvsDisplay;
44 using ::aidl::android::hardware::automotive::evs::ParameterRange;
45 using ::aidl::android::hardware::common::NativeHandle;
46 using ::aidl::android::hardware::graphics::common::HardwareBuffer;
47 using ::android::base::StringAppendF;
48 using ::ndk::ScopedAStatus;
49 using ::std::chrono_literals::operator""s;
50
VirtualCamera(const std::vector<std::shared_ptr<HalCamera>> & halCameras)51 VirtualCamera::VirtualCamera(const std::vector<std::shared_ptr<HalCamera>>& halCameras) :
52 mStreamState(STOPPED) {
53 for (auto&& cam : halCameras) {
54 mHalCamera.insert_or_assign(cam->getId(), cam);
55 }
56 }
57
~VirtualCamera()58 VirtualCamera::~VirtualCamera() {
59 shutdown();
60 }
61
doneWithFrame(const std::vector<BufferDesc> & buffers)62 ScopedAStatus VirtualCamera::doneWithFrame(const std::vector<BufferDesc>& buffers) {
63 ScopedTrace trace(__PRETTY_FUNCTION__,
64 buffers.empty() ? std::numeric_limits<int>::min() : buffers[0].bufferId);
65 std::lock_guard lock(mMutex);
66
67 for (auto&& buffer : buffers) {
68 // Find this buffer in our "held" list
69 auto it = std::find_if(mFramesHeld[buffer.deviceId].begin(),
70 mFramesHeld[buffer.deviceId].end(),
71 [id = buffer.bufferId](const BufferDesc& buffer) {
72 return id == buffer.bufferId;
73 });
74 if (it == mFramesHeld[buffer.deviceId].end()) {
75 // We should always find the frame in our "held" list
76 LOG(WARNING) << "Ignoring doneWithFrame called with unrecognized frame id "
77 << buffer.bufferId;
78 continue;
79 }
80
81 // Move this frame out of our "held" list
82 mFramesUsed[buffer.deviceId].push_back(std::move(*it));
83 mFramesHeld[buffer.deviceId].erase(it);
84 }
85
86 mReturnFramesSignal.notify_all();
87 return ScopedAStatus::ok();
88 }
89
forcePrimaryClient(const std::shared_ptr<IEvsDisplay> & display)90 ScopedAStatus VirtualCamera::forcePrimaryClient(const std::shared_ptr<IEvsDisplay>& display) {
91 if (!isValid()) {
92 LOG(ERROR) << "No hardware camera is available.";
93 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
94 } else if (isLogicalCamera()) {
95 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
96 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
97 }
98
99 if (display == nullptr) {
100 LOG(ERROR) << __FUNCTION__ << ": Passed display is invalid";
101 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
102 }
103
104 DisplayState state = DisplayState::DEAD;
105 auto status = display->getDisplayState(&state);
106 if (!status.isOk()) {
107 LOG(ERROR) << "Failed to read current display state";
108 return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
109 }
110
111 auto displayStateRange = ::ndk::enum_range<DisplayState>();
112 if (state == DisplayState::NOT_OPEN || state == DisplayState::DEAD ||
113 std::find(displayStateRange.begin(), displayStateRange.end(), state) ==
114 displayStateRange.end()) {
115 LOG(ERROR) << __FUNCTION__ << ": Passed display is in invalid state";
116 return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
117 }
118
119 // mHalCamera is guaranteed to have at least one element.
120 auto pHwCamera = mHalCamera.begin()->second.lock();
121 if (pHwCamera == nullptr) {
122 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
123 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
124 }
125
126 return pHwCamera->forcePrimaryClient(ref<VirtualCamera>());
127 }
128
getCameraInfo(CameraDesc * _aidl_return)129 ScopedAStatus VirtualCamera::getCameraInfo(CameraDesc* _aidl_return) {
130 if (!isValid()) {
131 LOG(ERROR) << "No hardware camera is available.";
132 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
133 } else if (isLogicalCamera()) {
134 // Logical camera description is stored in VirtualCamera object.
135 *_aidl_return = *mDesc;
136 return ScopedAStatus::ok();
137 }
138
139 // Straight pass through to hardware layer
140 auto pHwCamera = mHalCamera.begin()->second.lock();
141 if (pHwCamera == nullptr) {
142 // Return an empty list
143 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
144 }
145
146 return pHwCamera->getHwCamera()->getCameraInfo(_aidl_return);
147 }
148
getExtendedInfo(int32_t opaqueIdentifier,std::vector<uint8_t> * value)149 ScopedAStatus VirtualCamera::getExtendedInfo(int32_t opaqueIdentifier,
150 std::vector<uint8_t>* value) {
151 if (!isValid()) {
152 LOG(ERROR) << "No hardware camera is available.";
153 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
154 } else if (isLogicalCamera()) {
155 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
156 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
157 }
158
159 auto pHwCamera = mHalCamera.begin()->second.lock();
160 if (pHwCamera == nullptr) {
161 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
162 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
163 }
164
165 return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier, value);
166 }
167
getIntParameter(CameraParam id,std::vector<int32_t> * value)168 ScopedAStatus VirtualCamera::getIntParameter(CameraParam id, std::vector<int32_t>* value) {
169 if (!isValid()) {
170 LOG(ERROR) << "No hardware camera is available.";
171 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
172 } else if (isLogicalCamera()) {
173 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
174 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
175 }
176
177 auto pHwCamera = mHalCamera.begin()->second.lock();
178 if (pHwCamera == nullptr) {
179 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
180 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
181 }
182
183 return pHwCamera->getHwCamera()->getIntParameter(id, value);
184 }
185
getIntParameterRange(CameraParam id,ParameterRange * _aidl_return)186 ScopedAStatus VirtualCamera::getIntParameterRange(CameraParam id, ParameterRange* _aidl_return) {
187 if (!isValid()) {
188 LOG(ERROR) << "No hardware camera is available.";
189 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
190 } else if (isLogicalCamera()) {
191 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
192 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
193 }
194
195 // Straight pass through to hardware layer
196 auto pHwCamera = mHalCamera.begin()->second.lock();
197 if (pHwCamera == nullptr) {
198 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
199 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
200 }
201
202 return pHwCamera->getHwCamera()->getIntParameterRange(id, _aidl_return);
203 }
204
getParameterList(std::vector<CameraParam> * _aidl_return)205 ScopedAStatus VirtualCamera::getParameterList(std::vector<CameraParam>* _aidl_return) {
206 if (!isValid()) {
207 LOG(ERROR) << "No hardware camera is available.";
208 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
209 } else if (isLogicalCamera()) {
210 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
211 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
212 }
213
214 // Straight pass through to hardware layer
215 auto pHwCamera = mHalCamera.begin()->second.lock();
216 if (pHwCamera == nullptr) {
217 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
218 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
219 }
220
221 return pHwCamera->getHwCamera()->getParameterList(_aidl_return);
222 }
223
getPhysicalCameraInfo(const std::string & deviceId,CameraDesc * _aidl_return)224 ScopedAStatus VirtualCamera::getPhysicalCameraInfo(const std::string& deviceId,
225 CameraDesc* _aidl_return) {
226 auto device = mHalCamera.find(deviceId);
227 if (device == mHalCamera.end()) {
228 LOG(ERROR) << " Requested device " << deviceId << " does not back this device.";
229 return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
230 }
231
232 // Straight pass through to hardware layer
233 auto pHwCamera = device->second.lock();
234 if (pHwCamera == nullptr) {
235 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
236 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
237 }
238
239 return pHwCamera->getHwCamera()->getCameraInfo(_aidl_return);
240 }
241
importExternalBuffers(const std::vector<BufferDesc> & buffers,int32_t * _aidl_return)242 ScopedAStatus VirtualCamera::importExternalBuffers(const std::vector<BufferDesc>& buffers,
243 int32_t* _aidl_return) {
244 if (!isValid()) {
245 LOG(ERROR) << "No hardware camera is available.";
246 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
247 } else if (isLogicalCamera()) {
248 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
249 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
250 }
251
252 auto pHwCamera = mHalCamera.begin()->second.lock();
253 if (pHwCamera == nullptr) {
254 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
255 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
256 }
257
258 int delta = 0;
259 if (!pHwCamera->changeFramesInFlight(buffers, &delta)) {
260 LOG(ERROR) << "Failed to add extenral capture buffers.";
261 return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
262 }
263
264 mFramesAllowed += delta;
265 *_aidl_return = delta;
266 return ScopedAStatus::ok();
267 }
268
pauseVideoStream()269 ScopedAStatus VirtualCamera::pauseVideoStream() {
270 if (!isValid()) {
271 LOG(ERROR) << "No hardware camera is available.";
272 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
273 }
274
275 auto pHwCamera = mHalCamera.begin()->second.lock();
276 if (pHwCamera == nullptr) {
277 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
278 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
279 }
280
281 return pHwCamera->getHwCamera()->pauseVideoStream();
282 }
283
resumeVideoStream()284 ScopedAStatus VirtualCamera::resumeVideoStream() {
285 if (!isValid()) {
286 LOG(ERROR) << "No hardware camera is available.";
287 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
288 }
289
290 auto pHwCamera = mHalCamera.begin()->second.lock();
291 if (pHwCamera == nullptr) {
292 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
293 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
294 }
295
296 return pHwCamera->getHwCamera()->resumeVideoStream();
297 }
298
setExtendedInfo(int32_t opaqueIdentifier,const std::vector<uint8_t> & opaqueValue)299 ScopedAStatus VirtualCamera::setExtendedInfo(int32_t opaqueIdentifier,
300 const std::vector<uint8_t>& opaqueValue) {
301 if (!isValid()) {
302 LOG(ERROR) << "No hardware camera is available.";
303 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
304 } else if (isLogicalCamera()) {
305 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
306 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
307 }
308
309 auto pHwCamera = mHalCamera.begin()->second.lock();
310 if (pHwCamera == nullptr) {
311 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
312 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
313 }
314
315 return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
316 }
317
setIntParameter(CameraParam id,int32_t value,std::vector<int32_t> * effectiveValue)318 ScopedAStatus VirtualCamera::setIntParameter(CameraParam id, int32_t value,
319 std::vector<int32_t>* effectiveValue) {
320 if (!isValid()) {
321 LOG(ERROR) << "No hardware camera is available.";
322 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
323 } else if (isLogicalCamera()) {
324 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
325 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
326 }
327
328 auto pHwCamera = mHalCamera.begin()->second.lock();
329 if (pHwCamera == nullptr) {
330 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
331 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
332 }
333
334 auto status = pHwCamera->setParameter(ref<VirtualCamera>(), id, &value);
335 if (status.isOk()) {
336 effectiveValue->push_back(value);
337 }
338 return status;
339 }
340
setPrimaryClient()341 ScopedAStatus VirtualCamera::setPrimaryClient() {
342 if (!isValid()) {
343 LOG(ERROR) << "No hardware camera is available.";
344 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
345 } else if (isLogicalCamera()) {
346 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
347 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
348 }
349
350 auto pHwCamera = mHalCamera.begin()->second.lock();
351 if (pHwCamera == nullptr) {
352 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
353 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
354 }
355
356 return pHwCamera->setPrimaryClient(ref<VirtualCamera>());
357 }
358
setMaxFramesInFlight(int32_t bufferCount)359 ScopedAStatus VirtualCamera::setMaxFramesInFlight(int32_t bufferCount) {
360 if (bufferCount < 1) {
361 LOG(ERROR) << "Given bufferCount = " << bufferCount
362 << " is invalid; it must be greater than zero.";
363 return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
364 }
365
366 // How many buffers are we trying to add (or remove if negative)
367 int bufferCountChange = bufferCount - mFramesAllowed;
368
369 // Ask our parent for more buffers
370 bool result = true;
371 std::vector<std::shared_ptr<HalCamera>> changedCameras;
372 for (auto&& [key, hwCamera] : mHalCamera) {
373 auto pHwCamera = hwCamera.lock();
374 if (!pHwCamera) {
375 continue;
376 }
377
378 result = pHwCamera->changeFramesInFlight(bufferCountChange);
379 if (!result) {
380 LOG(ERROR) << key << ": Failed to change buffer count by " << bufferCountChange
381 << " to " << bufferCount;
382 break;
383 }
384
385 changedCameras.push_back(std::move(pHwCamera));
386 }
387
388 // Update our notion of how many frames we're allowed
389 mFramesAllowed = bufferCount;
390
391 if (!result) {
392 // Rollback changes because we failed to update all cameras
393 for (auto&& hwCamera : changedCameras) {
394 LOG(WARNING) << "Rollback a change on " << hwCamera->getId();
395 hwCamera->changeFramesInFlight(-bufferCountChange);
396 }
397
398 // Restore the original buffer count
399 mFramesAllowed -= bufferCountChange;
400 return Utils::buildScopedAStatusFromEvsResult(EvsResult::BUFFER_NOT_AVAILABLE);
401 }
402
403 return ScopedAStatus::ok();
404 }
405
startVideoStream(const std::shared_ptr<IEvsCameraStream> & receiver)406 ScopedAStatus VirtualCamera::startVideoStream(const std::shared_ptr<IEvsCameraStream>& receiver) {
407 ScopedTrace trace(__PRETTY_FUNCTION__);
408 std::lock_guard lock(mMutex);
409
410 if (!receiver) {
411 LOG(ERROR) << "Given IEvsCameraStream object is invalid.";
412 return Utils::buildScopedAStatusFromEvsResult(EvsResult::INVALID_ARG);
413 }
414
415 // We only support a single stream at a time
416 if (mStreamState != STOPPED) {
417 LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
418 return Utils::buildScopedAStatusFromEvsResult(EvsResult::STREAM_ALREADY_RUNNING);
419 }
420
421 // Validate our held frame count is starting out at zero as we expect
422 assert(mFramesHeld.empty());
423
424 // Record the user's callback for use when we have a frame ready
425 mStream = receiver;
426 mStreamState = RUNNING;
427
428 // Tell the underlying camera hardware that we want to stream
429 bool cleanUpAndReturn = true;
430 auto iter = mHalCamera.begin();
431 while (iter != mHalCamera.end()) {
432 std::shared_ptr<HalCamera> pHwCamera = iter->second.lock();
433 if (!pHwCamera) {
434 LOG(WARNING) << "Failed to start a video stream on " << iter->first;
435 ++iter;
436 continue;
437 }
438
439 LOG(INFO) << __FUNCTION__ << " starts a video stream on " << iter->first;
440 if (!pHwCamera->clientStreamStarting().isOk()) {
441 LOG(ERROR) << "Failed to start a video stream on " << iter->first;
442 cleanUpAndReturn = true;
443 break;
444 }
445
446 cleanUpAndReturn = false;
447 ++iter;
448 }
449
450 if (cleanUpAndReturn) {
451 // If we failed to start the underlying stream, then we're not actually running
452 mStream = nullptr;
453 mStreamState = STOPPED;
454
455 // Request to stop streams started by this client.
456 auto rb = mHalCamera.begin();
457 while (rb != iter) {
458 auto ptr = rb->second.lock();
459 if (ptr) {
460 ptr->clientStreamEnding(this);
461 }
462 ++rb;
463 }
464
465 return Utils::buildScopedAStatusFromEvsResult(EvsResult::UNDERLYING_SERVICE_ERROR);
466 }
467
468 mCaptureThread = std::thread([this]() {
469 // TODO(b/145466570): With a proper camera hang handler, we may want
470 // to reduce an amount of timeout.
471 constexpr auto kFrameTimeout = 5s; // timeout in seconds.
472 int64_t lastFrameTimestamp = -1;
473 EvsResult status = EvsResult::OK;
474 while (true) {
475 ScopedTrace trace("Processing a frame buffer", lastFrameTimestamp);
476 std::unique_lock lock(mMutex);
477 ::android::base::ScopedLockAssertion assume_lock(mMutex);
478
479 if (mStreamState != RUNNING) {
480 // A video stream is stopped while a capture thread is acquiring
481 // a lock.
482 LOG(DEBUG) << "Requested to stop capturing frames";
483 break;
484 }
485
486 unsigned count = 0;
487 for (auto&& [key, hwCamera] : mHalCamera) {
488 std::shared_ptr<HalCamera> pHwCamera = hwCamera.lock();
489 if (!pHwCamera) {
490 LOG(WARNING) << "Invalid camera " << key << " is ignored.";
491 continue;
492 }
493
494 pHwCamera->requestNewFrame(ref<VirtualCamera>(), lastFrameTimestamp);
495 mSourceCameras.insert(pHwCamera->getId());
496 ++count;
497 }
498
499 if (count < 1) {
500 LOG(ERROR) << "No camera is available.";
501 status = EvsResult::RESOURCE_NOT_AVAILABLE;
502 break;
503 }
504
505 if (!mFramesReadySignal.wait_for(lock, kFrameTimeout, [this]() REQUIRES(mMutex) {
506 // Stops waiting if
507 // 1) we've requested to stop capturing
508 // new frames
509 // 2) or, we've got all frames
510 return mStreamState != RUNNING || mSourceCameras.empty();
511 })) {
512 // This happens when either a new frame does not arrive
513 // before a timer expires or we're requested to stop
514 // capturing frames.
515 LOG(DEBUG) << "Timer for a new frame expires";
516 status = EvsResult::UNDERLYING_SERVICE_ERROR;
517 break;
518 }
519
520 if (mStreamState != RUNNING || !mStream) {
521 // A video stream is stopped while a capture thread is waiting
522 // for a new frame or we have lost a client.
523 LOG(DEBUG) << "Requested to stop capturing frames or lost a client";
524 break;
525 }
526
527 // Fetch frames and forward to the client
528 if (mFramesHeld.empty()) {
529 // We do not have any frame to forward.
530 continue;
531 }
532
533 // Pass this buffer through to our client
534 std::vector<BufferDesc> frames;
535 frames.resize(count);
536 unsigned i = 0;
537 for (auto&& [key, hwCamera] : mHalCamera) {
538 std::shared_ptr<HalCamera> pHwCamera = hwCamera.lock();
539 if (!pHwCamera || mFramesHeld[key].empty()) {
540 continue;
541 }
542
543 // Duplicate the latest buffer and forward it to the
544 // active clients
545 auto frame = Utils::dupBufferDesc(mFramesHeld[key].back(),
546 /* doDup= */ true);
547 if (frame.timestamp > lastFrameTimestamp) {
548 lastFrameTimestamp = frame.timestamp;
549 }
550 frames[i++] = std::move(frame);
551 }
552
553 if (!mStream->deliverFrame(frames).isOk()) {
554 LOG(WARNING) << "Failed to forward frames";
555 }
556 }
557
558 LOG(DEBUG) << "Exiting a capture thread";
559 if (status != EvsResult::OK && mStream) {
560 EvsEventDesc event{
561 .aType = status == EvsResult::RESOURCE_NOT_AVAILABLE
562 ? EvsEventType::STREAM_ERROR
563 : EvsEventType::TIMEOUT,
564 .payload = {static_cast<int32_t>(status)},
565 };
566 if (!mStream->notify(std::move(event)).isOk()) {
567 LOG(WARNING) << "Error delivering a stream event"
568 << static_cast<int32_t>(event.aType);
569 }
570 }
571 });
572
573 mReturnThread = std::thread([this]() {
574 while (true) {
575 ScopedTrace trace("Returning frame buffers");
576 std::unordered_map<std::string, std::vector<BufferDesc>> framesUsed;
577 {
578 std::unique_lock lock(mMutex);
579 ::android::base::ScopedLockAssertion assume_lock(mMutex);
580
581 mReturnFramesSignal.wait(lock, [this]() REQUIRES(mMutex) {
582 return mStreamState != RUNNING || !mFramesUsed.empty();
583 });
584
585 if (mStreamState != RUNNING) {
586 // A video stream is stopped while a capture thread is waiting
587 // for a new frame or we have lost a client.
588 LOG(DEBUG) << "Requested to stop capturing frames or lost a client";
589 break;
590 }
591
592 for (auto&& [hwCameraId, buffers] : mFramesUsed) {
593 std::vector<BufferDesc> bufferToReturn(std::make_move_iterator(buffers.begin()),
594 std::make_move_iterator(buffers.end()));
595 framesUsed.insert_or_assign(hwCameraId, std::move(bufferToReturn));
596 }
597
598 mFramesUsed.clear();
599 }
600
601 // Tell our parent that we're done with this buffer
602 for (auto&& [hwCameraId, buffers] : framesUsed) {
603 std::shared_ptr<HalCamera> pHwCamera = mHalCamera[hwCameraId].lock();
604 if (!pHwCamera) {
605 LOG(WARNING) << "Possible memory leak; " << hwCameraId << " is not valid.";
606 continue;
607 }
608
609 for (auto&& buffer : buffers) {
610 if (!pHwCamera->doneWithFrame(std::move(buffer)).isOk()) {
611 LOG(WARNING) << "Failed to return a buffer " << buffer.bufferId << " to "
612 << hwCameraId;
613 }
614 }
615 }
616 }
617
618 LOG(DEBUG) << "Exiting a return thread";
619 });
620
621 // TODO(b/213108625):
622 // Detect and exit if we encounter a stalled stream or unresponsive driver?
623 // Consider using a timer and watching for frame arrival?
624
625 return ScopedAStatus::ok();
626 }
627
stopVideoStream()628 ScopedAStatus VirtualCamera::stopVideoStream() {
629 ScopedTrace trace(__PRETTY_FUNCTION__);
630 {
631 std::lock_guard lock(mMutex);
632 if (mStreamState != RUNNING) {
633 // No action is required.
634 return ScopedAStatus::ok();
635 }
636
637 // Tell the frame delivery pipeline we don't want any more frames
638 mStreamState = STOPPING;
639
640 // Awake the capture and buffer-return threads; they will be terminated.
641 mSourceCameras.clear();
642 mFramesReadySignal.notify_all();
643 mReturnFramesSignal.notify_all();
644
645 // Deliver the stream-ending notification
646 EvsEventDesc event{
647 .aType = EvsEventType::STREAM_STOPPED,
648 };
649 if (mStream && !mStream->notify(event).isOk()) {
650 LOG(WARNING) << "Error delivering end of stream event";
651 }
652
653 // Since we are single threaded, no frame can be delivered while this function is running,
654 // so we can go directly to the STOPPED state here on the server.
655 // Note, however, that there still might be frames already queued that client will see
656 // after returning from the client side of this call.
657 mStreamState = STOPPED;
658 }
659
660 // Give the underlying hardware camera the heads up that it might be time to stop
661 for (auto&& [_, hwCamera] : mHalCamera) {
662 auto pHwCamera = hwCamera.lock();
663 if (pHwCamera) {
664 pHwCamera->clientStreamEnding(this);
665 }
666 }
667
668 // Join a capture and buffer-return threads.
669 if (mCaptureThread.joinable()) {
670 mCaptureThread.join();
671 }
672
673 if (mReturnThread.joinable()) {
674 mReturnThread.join();
675 }
676
677 return ScopedAStatus::ok();
678 }
679
unsetPrimaryClient()680 ScopedAStatus VirtualCamera::unsetPrimaryClient() {
681 if (!isValid()) {
682 // Safely ignores a request if no hardware camera is active.
683 return ScopedAStatus::ok();
684 }
685
686 if (isLogicalCamera()) {
687 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
688 return Utils::buildScopedAStatusFromEvsResult(EvsResult::NOT_SUPPORTED);
689 }
690
691 auto pHwCamera = mHalCamera.begin()->second.lock();
692 if (!pHwCamera) {
693 LOG(ERROR) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
694 return Utils::buildScopedAStatusFromEvsResult(EvsResult::RESOURCE_NOT_AVAILABLE);
695 }
696
697 return pHwCamera->unsetPrimaryClient(this);
698 }
699
shutdown()700 void VirtualCamera::shutdown() {
701 ScopedTrace trace(__PRETTY_FUNCTION__);
702 {
703 std::lock_guard lock(mMutex);
704
705 // In normal operation, the stream should already be stopped by the time we get here
706 if (mStreamState != RUNNING) {
707 return;
708 }
709
710 // Note that if we hit this case, no terminating frame will be sent to the client,
711 // but they're probably already dead anyway.
712 LOG(WARNING) << "Virtual camera being shutdown while stream is running";
713
714 // Tell the frame delivery pipeline we don't want any more frames
715 mStreamState = STOPPING;
716
717 // Returns buffers held by this client
718 for (auto&& [key, hwCamera] : mHalCamera) {
719 auto pHwCamera = hwCamera.lock();
720 if (!pHwCamera) {
721 LOG(WARNING) << "Camera device " << key << " is not alive.";
722 continue;
723 }
724
725 if (!mFramesHeld[key].empty()) {
726 LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
727
728 // Return to the underlying hardware camera any buffers the client was holding
729 while (!mFramesHeld[key].empty()) {
730 auto it = mFramesHeld[key].begin();
731 pHwCamera->doneWithFrame(std::move(*it));
732 mFramesHeld[key].erase(it);
733 }
734 }
735
736 // Retire from a primary client
737 pHwCamera->unsetPrimaryClient(this);
738
739 // Give the underlying hardware camera the heads up that it might be time to stop
740 pHwCamera->clientStreamEnding(this);
741
742 // Retire from the participating HW camera's client list
743 pHwCamera->disownVirtualCamera(this);
744 }
745
746 mFramesHeld.clear();
747 mFramesUsed.clear();
748
749 // Awake the capture and buffer-return threads; they will be terminated.
750 mFramesReadySignal.notify_all();
751 mReturnFramesSignal.notify_all();
752 }
753
754 // Join a capture and buffer-return threads.
755 if (mCaptureThread.joinable()) {
756 mCaptureThread.join();
757 }
758
759 if (mReturnThread.joinable()) {
760 mReturnThread.join();
761 }
762
763 // Drop our reference to our associated hardware camera
764 mHalCamera.clear();
765 }
766
getHalCameras()767 std::vector<std::shared_ptr<HalCamera>> VirtualCamera::getHalCameras() {
768 std::vector<std::shared_ptr<HalCamera>> cameras;
769 for (auto&& [key, cam] : mHalCamera) {
770 auto ptr = cam.lock();
771 if (ptr) {
772 cameras.push_back(std::move(ptr));
773 }
774 }
775
776 return cameras;
777 }
778
deliverFrame(const BufferDesc & bufDesc)779 bool VirtualCamera::deliverFrame(const BufferDesc& bufDesc) {
780 ScopedTrace trace(__PRETTY_FUNCTION__, bufDesc.bufferId);
781 std::lock_guard lock(mMutex);
782
783 if (mStreamState == STOPPED) {
784 // A stopped stream gets no frames
785 LOG(ERROR) << "A stopped stream should not get any frames";
786 return false;
787 }
788
789 if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
790 // Indicate that we declined to send the frame to the client because they're at quota
791 LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
792 << " of " << mFramesAllowed;
793
794 if (mStream) {
795 // Report a frame drop to the client.
796 EvsEventDesc event;
797 event.deviceId = bufDesc.deviceId;
798 event.aType = EvsEventType::FRAME_DROPPED;
799 if (!mStream->notify(event).isOk()) {
800 LOG(WARNING) << "Error delivering end of stream event";
801 }
802 }
803
804 // Marks that a new frame has arrived though it was not accepted
805 mSourceCameras.erase(bufDesc.deviceId);
806 mFramesReadySignal.notify_all();
807
808 return false;
809 }
810
811 // Keep a record of this frame so we can clean up if we have to in case of client death
812 mFramesHeld[bufDesc.deviceId].push_back(
813 std::move(Utils::dupBufferDesc(bufDesc, /* doDup= */ true)));
814
815 // v1.0 client uses an old frame-delivery mechanism.
816 if (mCaptureThread.joinable()) {
817 // Keep forwarding frames as long as a capture thread is alive
818 // Notify a new frame receipt
819 mSourceCameras.erase(bufDesc.deviceId);
820 mFramesReadySignal.notify_all();
821 }
822
823 return true;
824 }
825
notify(const EvsEventDesc & event)826 bool VirtualCamera::notify(const EvsEventDesc& event) {
827 ScopedTrace trace(__PRETTY_FUNCTION__, static_cast<int>(event.aType));
828 switch (event.aType) {
829 case EvsEventType::STREAM_STOPPED: {
830 {
831 std::lock_guard lock(mMutex);
832 if (mStreamState != RUNNING) {
833 // We're not actively consuming a video stream or already in
834 // a process to stop a video stream.
835 return true;
836 }
837
838 // Warn if we got an unexpected stream termination
839 LOG(WARNING) << "Stream unexpectedly stopped, current status " << mStreamState;
840 }
841
842 // Clean up the resource and forward an event to the client
843 stopVideoStream();
844 return true;
845 }
846
847 // v1.0 client will ignore all other events.
848 case EvsEventType::PARAMETER_CHANGED:
849 LOG(DEBUG) << "A camera parameter " << event.payload[0] << " is set to "
850 << event.payload[1];
851 break;
852
853 case EvsEventType::MASTER_RELEASED:
854 LOG(DEBUG) << "The primary client has been released";
855 break;
856
857 default:
858 LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
859 break;
860 }
861
862 // Forward a received event to the v1.1 client
863 if (mStream && !mStream->notify(event).isOk()) {
864 LOG(ERROR) << "Failed to forward an event";
865 return false;
866 }
867
868 return true;
869 }
870
toString(const char * indent) const871 std::string VirtualCamera::toString(const char* indent) const {
872 std::string buffer;
873 StringAppendF(&buffer,
874 "%sLogical camera device: %s\n"
875 "%sFramesAllowed: %u\n"
876 "%sFrames in use:\n",
877 indent, mHalCamera.size() > 1 ? "T" : "F", indent, mFramesAllowed, indent);
878
879 std::string next_indent(indent);
880 next_indent += "\t";
881 for (auto&& [id, queue] : mFramesHeld) {
882 StringAppendF(&buffer, "%s%s: %d\n", next_indent.data(), id.data(),
883 static_cast<int>(queue.size()));
884 }
885 StringAppendF(&buffer, "%sCurrent stream state: %d\n", indent, mStreamState);
886
887 return buffer;
888 }
889
890 } // namespace aidl::android::automotive::evs::implementation
891