1 /*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "SurroundView3dSession.h"
18
19 #include <android-base/logging.h>
20 #include <android/hardware_buffer.h>
21 #include <android/hidl/memory/1.0/IMemory.h>
22 #include <hidlmemory/mapping.h>
23 #include <system/camera_metadata.h>
24 #include <utils/SystemClock.h>
25
26 #include <array>
27 #include <thread>
28 #include <set>
29
30 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
31
32 #include "CameraUtils.h"
33 #include "sv_3d_params.h"
34
35 using ::android::hardware::automotive::evs::V1_0::EvsResult;
36 using ::android::hardware::camera::device::V3_2::Stream;
37 using ::android::hardware::hidl_memory;
38 using ::android::hidl::memory::V1_0::IMemory;
39
40 using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
41
42 namespace android {
43 namespace hardware {
44 namespace automotive {
45 namespace sv {
46 namespace V1_0 {
47 namespace implementation {
48
49 typedef struct {
50 int32_t id;
51 int32_t width;
52 int32_t height;
53 int32_t format;
54 int32_t direction;
55 int32_t framerate;
56 } RawStreamConfig;
57
58 static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
59 static const uint8_t kGrayColor = 128;
60 static const int kNumFrames = 4;
61 static const int kNumChannels = 4;
62
FramesHandler(sp<IEvsCamera> pCamera,sp<SurroundView3dSession> pSession)63 SurroundView3dSession::FramesHandler::FramesHandler(
64 sp<IEvsCamera> pCamera, sp<SurroundView3dSession> pSession)
65 : mCamera(pCamera),
66 mSession(pSession) {}
67
deliverFrame(const BufferDesc_1_0 & bufDesc_1_0)68 Return<void> SurroundView3dSession::FramesHandler::deliverFrame(
69 const BufferDesc_1_0& bufDesc_1_0) {
70 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
71 mCamera->doneWithFrame(bufDesc_1_0);
72
73 return {};
74 }
75
deliverFrame_1_1(const hidl_vec<BufferDesc_1_1> & buffers)76 Return<void> SurroundView3dSession::FramesHandler::deliverFrame_1_1(
77 const hidl_vec<BufferDesc_1_1>& buffers) {
78 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
79 mSession->mSequenceId++;
80
81 {
82 scoped_lock<mutex> lock(mSession->mAccessLock);
83 if (mSession->mProcessingEvsFrames) {
84 LOG(WARNING) << "EVS frames are being processed. Skip frames:"
85 << mSession->mSequenceId;
86 mCamera->doneWithFrame_1_1(buffers);
87 return {};
88 }
89 }
90
91 if (buffers.size() != kNumFrames) {
92 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
93 << ", which is different from the number " << kNumFrames
94 << ", specified in config file";
95 return {};
96 }
97
98 {
99 scoped_lock<mutex> lock(mSession->mAccessLock);
100 for (int i = 0; i < kNumFrames; i++) {
101 LOG(DEBUG) << "Copying buffer No." << i
102 << " to Surround View Service";
103 mSession->copyFromBufferToPointers(buffers[i],
104 mSession->mInputPointers[i]);
105 }
106 }
107
108 mCamera->doneWithFrame_1_1(buffers);
109
110 // Notify the session that a new set of frames is ready
111 {
112 scoped_lock<mutex> lock(mSession->mAccessLock);
113 mSession->mProcessingEvsFrames = true;
114 }
115 mSession->mFramesSignal.notify_all();
116
117 return {};
118 }
119
notify(const EvsEventDesc & event)120 Return<void> SurroundView3dSession::FramesHandler::notify(const EvsEventDesc& event) {
121 switch(event.aType) {
122 case EvsEventType::STREAM_STOPPED:
123 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
124
125 // TODO(b/158339680): There is currently an issue in EVS reference
126 // implementation that causes STREAM_STOPPED event to be delivered
127 // properly. When the bug is fixed, we should deal with this event
128 // properly in case the EVS stream is stopped unexpectly.
129 break;
130
131 case EvsEventType::PARAMETER_CHANGED:
132 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
133 << " is set to " << event.payload[1];
134 break;
135
136 // Below events are ignored in reference implementation.
137 case EvsEventType::STREAM_STARTED:
138 [[fallthrough]];
139 case EvsEventType::FRAME_DROPPED:
140 [[fallthrough]];
141 case EvsEventType::TIMEOUT:
142 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
143 << "is received but ignored.";
144 break;
145 default:
146 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
147 break;
148 }
149
150 return {};
151 }
152
copyFromBufferToPointers(BufferDesc_1_1 buffer,SurroundViewInputBufferPointers pointers)153 bool SurroundView3dSession::copyFromBufferToPointers(
154 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
155
156 AHardwareBuffer_Desc* pDesc =
157 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
158
159 // create a GraphicBuffer from the existing handle
160 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
161 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
162 pDesc->height, pDesc->format, pDesc->layers,
163 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
164
165 if (inputBuffer == nullptr) {
166 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
167 // Returning "true" in this error condition because we already released the
168 // previous image (if any) and so the texture may change in unpredictable
169 // ways now!
170 return false;
171 } else {
172 LOG(INFO) << "Managed to allocate GraphicBuffer with "
173 << " width: " << pDesc->width
174 << " height: " << pDesc->height
175 << " format: " << pDesc->format
176 << " stride: " << pDesc->stride;
177 }
178
179 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
180 // lock, return false.
181 void* inputDataPtr;
182 inputBuffer->lock(
183 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
184 &inputDataPtr);
185 if (!inputDataPtr) {
186 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
187 inputBuffer->unlock();
188 return false;
189 } else {
190 LOG(INFO) << "Managed to get read access to GraphicBuffer";
191 }
192
193 int stride = pDesc->stride;
194
195 // readPtr comes from EVS, and it is with 4 channels
196 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
197
198 // writePtr is with 3 channels, since that is what SV core lib expects.
199 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
200
201 for (int i = 0; i < pDesc->width; i++)
202 for (int j = 0; j < pDesc->height; j++) {
203 writePtr[(i + j * stride) * 3 + 0] =
204 readPtr[(i + j * stride) * 4 + 0];
205 writePtr[(i + j * stride) * 3 + 1] =
206 readPtr[(i + j * stride) * 4 + 1];
207 writePtr[(i + j * stride) * 3 + 2] =
208 readPtr[(i + j * stride) * 4 + 2];
209 }
210 LOG(INFO) << "Brute force copying finished";
211
212 return true;
213 }
214
processFrames()215 void SurroundView3dSession::processFrames() {
216 if (mSurroundView->Start3dPipeline()) {
217 LOG(INFO) << "Start3dPipeline succeeded";
218 } else {
219 LOG(ERROR) << "Start3dPipeline failed";
220 return;
221 }
222
223 while (true) {
224 {
225 unique_lock<mutex> lock(mAccessLock);
226
227 if (mStreamState != RUNNING) {
228 break;
229 }
230
231 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
232 }
233
234 handleFrames(mSequenceId);
235
236 {
237 // Set the boolean to false to receive the next set of frames.
238 scoped_lock<mutex> lock(mAccessLock);
239 mProcessingEvsFrames = false;
240 }
241 }
242
243 // Notify the SV client that no new results will be delivered.
244 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
245 mStream->notify(SvEvent::STREAM_STOPPED);
246
247 {
248 scoped_lock<mutex> lock(mAccessLock);
249 mStreamState = STOPPED;
250 mStream = nullptr;
251 LOG(DEBUG) << "Stream marked STOPPED.";
252 }
253 }
254
SurroundView3dSession(sp<IEvsEnumerator> pEvs,VhalHandler * vhalHandler,AnimationModule * animationModule,IOModuleConfig * pConfig)255 SurroundView3dSession::SurroundView3dSession(sp<IEvsEnumerator> pEvs,
256 VhalHandler* vhalHandler,
257 AnimationModule* animationModule,
258 IOModuleConfig* pConfig) :
259 mEvs(pEvs),
260 mStreamState(STOPPED),
261 mVhalHandler(vhalHandler),
262 mAnimationModule(animationModule),
263 mIOModuleConfig(pConfig) {
264 mEvsCameraIds = {"0" , "1", "2", "3"};
265 }
266
~SurroundView3dSession()267 SurroundView3dSession::~SurroundView3dSession() {
268 // In case the client did not call stopStream properly, we should stop the
269 // stream explicitly. Otherwise the process thread will take forever to
270 // join.
271 stopStream();
272
273 // Waiting for the process thread to finish the buffered frames.
274 mProcessThread.join();
275
276 mEvs->closeCamera(mCamera);
277 }
278
279 // Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession.
startStream(const sp<ISurroundViewStream> & stream)280 Return<SvResult> SurroundView3dSession::startStream(
281 const sp<ISurroundViewStream>& stream) {
282 LOG(DEBUG) << __FUNCTION__;
283 scoped_lock<mutex> lock(mAccessLock);
284
285 if (!mIsInitialized && !initialize()) {
286 LOG(ERROR) << "There is an error while initializing the use case. "
287 << "Exiting";
288 return SvResult::INTERNAL_ERROR;
289 }
290
291 if (mStreamState != STOPPED) {
292 LOG(ERROR) << "Ignoring startVideoStream call when a stream is "
293 << "already running.";
294 return SvResult::INTERNAL_ERROR;
295 }
296
297 if (mViews.empty()) {
298 LOG(ERROR) << "No views have been set for current Surround View"
299 << "3d Session. Please call setViews before starting"
300 << "the stream.";
301 return SvResult::VIEW_NOT_SET;
302 }
303
304 if (stream == nullptr) {
305 LOG(ERROR) << "The input stream is invalid";
306 return SvResult::INTERNAL_ERROR;
307 }
308 mStream = stream;
309
310 mSequenceId = 0;
311 startEvs();
312
313 if (mVhalHandler != nullptr) {
314 if (!mVhalHandler->startPropertiesUpdate()) {
315 LOG(WARNING) << "VhalHandler cannot be started properly";
316 }
317 } else {
318 LOG(WARNING) << "VhalHandler is null. Ignored";
319 }
320
321 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
322 // reference implementation yet. Once implemented, this logic should be
323 // moved to EVS notify callback.
324 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
325 mStream->notify(SvEvent::STREAM_STARTED);
326 mProcessingEvsFrames = false;
327
328 // Start the frame generation thread
329 mStreamState = RUNNING;
330
331 mProcessThread = thread([this]() {
332 processFrames();
333 });
334
335 return SvResult::OK;
336 }
337
stopStream()338 Return<void> SurroundView3dSession::stopStream() {
339 LOG(DEBUG) << __FUNCTION__;
340 unique_lock <mutex> lock(mAccessLock);
341
342 if (mVhalHandler != nullptr) {
343 mVhalHandler->stopPropertiesUpdate();
344 } else {
345 LOG(WARNING) << "VhalHandler is null. Ignored";
346 }
347
348 if (mStreamState == RUNNING) {
349 // Tell the processFrames loop to stop processing frames
350 mStreamState = STOPPING;
351
352 // Stop the EVS stream asynchronizely
353 mCamera->stopVideoStream();
354 }
355
356 return {};
357 }
358
doneWithFrames(const SvFramesDesc & svFramesDesc)359 Return<void> SurroundView3dSession::doneWithFrames(
360 const SvFramesDesc& svFramesDesc){
361 LOG(DEBUG) << __FUNCTION__;
362 scoped_lock <mutex> lock(mAccessLock);
363
364 mFramesRecord.inUse = false;
365
366 (void)svFramesDesc;
367 return {};
368 }
369
370 // Methods from ISurroundView3dSession follow.
setViews(const hidl_vec<View3d> & views)371 Return<SvResult> SurroundView3dSession::setViews(
372 const hidl_vec<View3d>& views) {
373 LOG(DEBUG) << __FUNCTION__;
374 scoped_lock <mutex> lock(mAccessLock);
375
376 mViews.resize(views.size());
377 for (int i=0; i<views.size(); i++) {
378 mViews[i] = views[i];
379 }
380
381 return SvResult::OK;
382 }
383
set3dConfig(const Sv3dConfig & sv3dConfig)384 Return<SvResult> SurroundView3dSession::set3dConfig(const Sv3dConfig& sv3dConfig) {
385 LOG(DEBUG) << __FUNCTION__;
386 scoped_lock <mutex> lock(mAccessLock);
387
388 if (sv3dConfig.width <=0 || sv3dConfig.width > 4096) {
389 LOG(WARNING) << "The width of 3d config is out of the range (0, 4096]"
390 << "Ignored!";
391 return SvResult::INVALID_ARG;
392 }
393
394 if (sv3dConfig.height <=0 || sv3dConfig.height > 4096) {
395 LOG(WARNING) << "The height of 3d config is out of the range (0, 4096]"
396 << "Ignored!";
397 return SvResult::INVALID_ARG;
398 }
399
400 mConfig.width = sv3dConfig.width;
401 mConfig.height = sv3dConfig.height;
402 mConfig.carDetails = sv3dConfig.carDetails;
403
404 if (mStream != nullptr) {
405 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
406 mStream->notify(SvEvent::CONFIG_UPDATED);
407 }
408
409 return SvResult::OK;
410 }
411
get3dConfig(get3dConfig_cb _hidl_cb)412 Return<void> SurroundView3dSession::get3dConfig(get3dConfig_cb _hidl_cb) {
413 LOG(DEBUG) << __FUNCTION__;
414
415 _hidl_cb(mConfig);
416 return {};
417 }
418
VerifyOverlayData(const OverlaysData & overlaysData)419 bool VerifyOverlayData(const OverlaysData& overlaysData) {
420 // Check size of shared memory matches overlaysMemoryDesc.
421 const int kVertexSize = 16;
422 const int kIdSize = 2;
423 int memDescSize = 0;
424 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
425 memDescSize += kIdSize + kVertexSize * overlayMemDesc.verticesCount;
426 }
427 if (memDescSize != overlaysData.overlaysMemory.size()) {
428 LOG(ERROR) << "shared memory and overlaysMemoryDesc size mismatch.";
429 return false;
430 }
431
432 // Map memory.
433 sp<IMemory> pSharedMemory = mapMemory(overlaysData.overlaysMemory);
434 if(pSharedMemory == nullptr) {
435 LOG(ERROR) << "mapMemory failed.";
436 return false;
437 }
438
439 // Get Data pointer.
440 uint8_t* pData = static_cast<uint8_t*>(
441 static_cast<void*>(pSharedMemory->getPointer()));
442 if (pData == nullptr) {
443 LOG(ERROR) << "Shared memory getPointer() failed.";
444 return false;
445 }
446
447 int idOffset = 0;
448 set<uint16_t> overlayIdSet;
449 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
450
451 if (overlayIdSet.find(overlayMemDesc.id) != overlayIdSet.end()) {
452 LOG(ERROR) << "Duplicate id within memory descriptor.";
453 return false;
454 }
455 overlayIdSet.insert(overlayMemDesc.id);
456
457 if(overlayMemDesc.verticesCount < 3) {
458 LOG(ERROR) << "Less than 3 vertices.";
459 return false;
460 }
461
462 if (overlayMemDesc.overlayPrimitive == OverlayPrimitive::TRIANGLES &&
463 overlayMemDesc.verticesCount % 3 != 0) {
464 LOG(ERROR) << "Triangles primitive does not have vertices "
465 << "multiple of 3.";
466 return false;
467 }
468
469 const uint16_t overlayId = *((uint16_t*)(pData + idOffset));
470
471 if (overlayId != overlayMemDesc.id) {
472 LOG(ERROR) << "Overlay id mismatch "
473 << overlayId
474 << ", "
475 << overlayMemDesc.id;
476 return false;
477 }
478
479 idOffset += kIdSize + (kVertexSize * overlayMemDesc.verticesCount);
480 }
481
482 return true;
483 }
484
485 // TODO(b/150412555): the overlay related methods are incomplete.
updateOverlays(const OverlaysData & overlaysData)486 Return<SvResult> SurroundView3dSession::updateOverlays(
487 const OverlaysData& overlaysData) {
488
489 if(!VerifyOverlayData(overlaysData)) {
490 LOG(ERROR) << "VerifyOverlayData failed.";
491 return SvResult::INVALID_ARG;
492 }
493
494 return SvResult::OK;
495 }
496
projectCameraPointsTo3dSurface(const hidl_vec<Point2dInt> & cameraPoints,const hidl_string & cameraId,projectCameraPointsTo3dSurface_cb _hidl_cb)497 Return<void> SurroundView3dSession::projectCameraPointsTo3dSurface(
498 const hidl_vec<Point2dInt>& cameraPoints, const hidl_string& cameraId,
499 projectCameraPointsTo3dSurface_cb _hidl_cb) {
500 LOG(DEBUG) << __FUNCTION__;
501 bool cameraIdFound = false;
502 int cameraIndex = 0;
503 std::vector<Point3dFloat> points3d;
504
505 // Note: mEvsCameraIds must be in the order front, right, rear, left.
506 for (auto& evsCameraId : mEvsCameraIds) {
507 if (cameraId == evsCameraId) {
508 cameraIdFound = true;
509 LOG(DEBUG) << "Camera id found for projection: " << cameraId;
510 break;
511 }
512 cameraIndex++;
513 }
514
515 if (!cameraIdFound) {
516 LOG(ERROR) << "Camera id not found for projection: " << cameraId;
517 _hidl_cb(points3d);
518 return {};
519 }
520
521 for (const auto& cameraPoint : cameraPoints) {
522 Point3dFloat point3d = {false, 0.0, 0.0, 0.0};
523
524 // Verify if camera point is within the camera resolution bounds.
525 point3d.isValid = (cameraPoint.x >= 0 && cameraPoint.x < mConfig.width &&
526 cameraPoint.y >= 0 && cameraPoint.y < mConfig.height);
527 if (!point3d.isValid) {
528 LOG(WARNING) << "Camera point (" << cameraPoint.x << ", " << cameraPoint.y
529 << ") is out of camera resolution bounds.";
530 points3d.push_back(point3d);
531 continue;
532 }
533
534 // Project points using mSurroundView function.
535 const Coordinate2dInteger camCoord(cameraPoint.x, cameraPoint.y);
536 Coordinate3dFloat projPoint3d(0.0, 0.0, 0.0);
537 point3d.isValid =
538 mSurroundView->GetProjectionPointFromRawCameraToSurroundView3d(camCoord,
539 cameraIndex,
540 &projPoint3d);
541 // Convert projPoint3d in meters to point3d which is in milli-meters.
542 point3d.x = projPoint3d.x * 1000.0;
543 point3d.y = projPoint3d.y * 1000.0;
544 point3d.z = projPoint3d.z * 1000.0;
545 points3d.push_back(point3d);
546 }
547 _hidl_cb(points3d);
548 return {};
549 }
550
handleFrames(int sequenceId)551 bool SurroundView3dSession::handleFrames(int sequenceId) {
552 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
553
554 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
555 // output frame is supported. Implement buffer queue for both of them.
556 {
557 scoped_lock<mutex> lock(mAccessLock);
558
559 if (mFramesRecord.inUse) {
560 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
561 mStream->notify(SvEvent::FRAME_DROPPED);
562 return true;
563 }
564 }
565
566 // If the width/height was changed, re-allocate the data pointer.
567 if (mOutputWidth != mConfig.width
568 || mOutputHeight != mConfig.height) {
569 LOG(DEBUG) << "Config changed. Re-allocate memory. "
570 << "Old width: "
571 << mOutputWidth
572 << ", old height: "
573 << mOutputHeight
574 << "; New width: "
575 << mConfig.width
576 << ", new height: "
577 << mConfig.height;
578 delete[] static_cast<char*>(mOutputPointer.data_pointer);
579 mOutputWidth = mConfig.width;
580 mOutputHeight = mConfig.height;
581 mOutputPointer.height = mOutputHeight;
582 mOutputPointer.width = mOutputWidth;
583 mOutputPointer.format = Format::RGBA;
584 mOutputPointer.data_pointer =
585 new char[mOutputHeight * mOutputWidth * kNumChannels];
586
587 if (!mOutputPointer.data_pointer) {
588 LOG(ERROR) << "Memory allocation failed. Exiting.";
589 return false;
590 }
591
592 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
593 mSurroundView->Update3dOutputResolution(size);
594
595 mSvTexture = new GraphicBuffer(mOutputWidth,
596 mOutputHeight,
597 HAL_PIXEL_FORMAT_RGBA_8888,
598 1,
599 GRALLOC_USAGE_HW_TEXTURE,
600 "SvTexture");
601 if (mSvTexture->initCheck() == OK) {
602 LOG(INFO) << "Successfully allocated Graphic Buffer";
603 } else {
604 LOG(ERROR) << "Failed to allocate Graphic Buffer";
605 return false;
606 }
607 }
608
609 // TODO(b/150412555): do not use the setViews for frames generation
610 // since there is a discrepancy between the HIDL APIs and core lib APIs.
611 array<array<float, 4>, 4> matrix;
612
613 // TODO(b/150412555): use hard-coded views for now. Change view every
614 // frame.
615 int recViewId = sequenceId % 16;
616 for (int i=0; i<4; i++)
617 for (int j=0; j<4; j++) {
618 matrix[i][j] = kRecViews[recViewId][i*4+j];
619 }
620
621 // Get the latest VHal property values
622 if (mVhalHandler != nullptr) {
623 if (!mVhalHandler->getPropertyValues(&mPropertyValues)) {
624 LOG(ERROR) << "Failed to get property values";
625 }
626 } else {
627 LOG(WARNING) << "VhalHandler is null. Ignored";
628 }
629
630 vector<AnimationParam> params;
631 if (mAnimationModule != nullptr) {
632 params = mAnimationModule->getUpdatedAnimationParams(mPropertyValues);
633 } else {
634 LOG(WARNING) << "AnimationModule is null. Ignored";
635 }
636
637 if (!params.empty()) {
638 mSurroundView->SetAnimations(params);
639 } else {
640 LOG(INFO) << "AnimationParams is empty. Ignored";
641 }
642
643 if (mSurroundView->Get3dSurroundView(
644 mInputPointers, matrix, &mOutputPointer)) {
645 LOG(INFO) << "Get3dSurroundView succeeded";
646 } else {
647 LOG(ERROR) << "Get3dSurroundView failed. "
648 << "Using memset to initialize to gray.";
649 memset(mOutputPointer.data_pointer, kGrayColor,
650 mOutputHeight * mOutputWidth * kNumChannels);
651 }
652
653 void* textureDataPtr = nullptr;
654 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
655 | GRALLOC_USAGE_SW_READ_NEVER,
656 &textureDataPtr);
657 if (!textureDataPtr) {
658 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
659 return false;
660 }
661
662 // Note: there is a chance that the stride of the texture is not the
663 // same as the width. For example, when the input frame is 1920 * 1080,
664 // the width is 1080, but the stride is 2048. So we'd better copy the
665 // data line by line, instead of single memcpy.
666 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
667 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
668 const int readStride = mOutputWidth * kNumChannels;
669 const int writeStride = mSvTexture->getStride() * kNumChannels;
670 if (readStride == writeStride) {
671 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
672 } else {
673 for (int i=0; i<mSvTexture->getHeight(); i++) {
674 memcpy(writePtr, readPtr, readStride);
675 writePtr = writePtr + writeStride;
676 readPtr = readPtr + readStride;
677 }
678 }
679 LOG(INFO) << "memcpy finished!";
680 mSvTexture->unlock();
681
682 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
683 LOG(DEBUG) << "ANativeWindowBuffer->handle: " << buffer->handle;
684
685 {
686 scoped_lock<mutex> lock(mAccessLock);
687
688 mFramesRecord.frames.svBuffers.resize(1);
689 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
690 svBuffer.viewId = 0;
691 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
692 AHardwareBuffer_Desc* pDesc =
693 reinterpret_cast<AHardwareBuffer_Desc *>(
694 &svBuffer.hardwareBuffer.description);
695 pDesc->width = mOutputWidth;
696 pDesc->height = mOutputHeight;
697 pDesc->layers = 1;
698 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
699 pDesc->stride = mSvTexture->getStride();
700 pDesc->format = HAL_PIXEL_FORMAT_RGBA_8888;
701 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
702 mFramesRecord.frames.sequenceId = sequenceId;
703
704 mFramesRecord.inUse = true;
705 mStream->receiveFrames(mFramesRecord.frames);
706 }
707
708 return true;
709 }
710
initialize()711 bool SurroundView3dSession::initialize() {
712 lock_guard<mutex> lock(mAccessLock, adopt_lock);
713
714 if (!setupEvs()) {
715 LOG(ERROR) << "Failed to setup EVS components for 3d session";
716 return false;
717 }
718
719 // TODO(b/150412555): ask core-lib team to add API description for "create"
720 // method in the .h file.
721 // The create method will never return a null pointer based the API
722 // description.
723 mSurroundView = unique_ptr<SurroundView>(Create());
724
725 SurroundViewStaticDataParams params =
726 SurroundViewStaticDataParams(
727 mCameraParams,
728 mIOModuleConfig->sv2dConfig.sv2dParams,
729 mIOModuleConfig->sv3dConfig.sv3dParams,
730 GetUndistortionScales(),
731 mIOModuleConfig->sv2dConfig.carBoundingBox,
732 mIOModuleConfig->carModelConfig.carModel.texturesMap,
733 mIOModuleConfig->carModelConfig.carModel.partsMap);
734 mSurroundView->SetStaticData(params);
735
736 mInputPointers.resize(kNumFrames);
737 for (int i = 0; i < kNumFrames; i++) {
738 mInputPointers[i].width = mCameraParams[i].size.width;
739 mInputPointers[i].height = mCameraParams[i].size.height;
740 mInputPointers[i].format = Format::RGB;
741 mInputPointers[i].cpu_data_pointer =
742 (void*)new uint8_t[mInputPointers[i].width *
743 mInputPointers[i].height *
744 kNumChannels];
745 }
746 LOG(INFO) << "Allocated " << kNumFrames << " input pointers";
747
748 mOutputWidth = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.width;
749 mOutputHeight = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.height;
750
751 mConfig.width = mOutputWidth;
752 mConfig.height = mOutputHeight;
753 mConfig.carDetails = SvQuality::HIGH;
754
755 mOutputPointer.height = mOutputHeight;
756 mOutputPointer.width = mOutputWidth;
757 mOutputPointer.format = Format::RGBA;
758 mOutputPointer.data_pointer = new char[
759 mOutputHeight * mOutputWidth * kNumChannels];
760
761 if (!mOutputPointer.data_pointer) {
762 LOG(ERROR) << "Memory allocation failed. Exiting.";
763 return false;
764 }
765
766 mSvTexture = new GraphicBuffer(mOutputWidth,
767 mOutputHeight,
768 HAL_PIXEL_FORMAT_RGBA_8888,
769 1,
770 GRALLOC_USAGE_HW_TEXTURE,
771 "SvTexture");
772
773 if (mSvTexture->initCheck() == OK) {
774 LOG(INFO) << "Successfully allocated Graphic Buffer";
775 } else {
776 LOG(ERROR) << "Failed to allocate Graphic Buffer";
777 return false;
778 }
779
780
781 mIsInitialized = true;
782 return true;
783 }
784
setupEvs()785 bool SurroundView3dSession::setupEvs() {
786 // Reads the camera related information from the config object
787 const string evsGroupId = mIOModuleConfig->cameraConfig.evsGroupId;
788
789 // Setup for EVS
790 LOG(INFO) << "Requesting camera list";
791 mEvs->getCameraList_1_1(
792 [this, evsGroupId] (hidl_vec<CameraDesc> cameraList) {
793 LOG(INFO) << "Camera list callback received " << cameraList.size();
794 for (auto&& cam : cameraList) {
795 LOG(INFO) << "Found camera " << cam.v1.cameraId;
796 if (cam.v1.cameraId == evsGroupId) {
797 mCameraDesc = cam;
798 }
799 }
800 });
801
802 bool foundCfg = false;
803 std::unique_ptr<Stream> targetCfg(new Stream());
804
805 // This logic picks the configuration with the largest area that supports
806 // RGBA8888 format
807 int32_t maxArea = 0;
808 camera_metadata_entry_t streamCfgs;
809 if (!find_camera_metadata_entry(
810 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
811 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
812 &streamCfgs)) {
813 // Stream configurations are found in metadata
814 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
815 streamCfgs.data.i32);
816 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
817 if (ptr->direction ==
818 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
819 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
820
821 if (ptr->width * ptr->height > maxArea) {
822 targetCfg->id = ptr->id;
823 targetCfg->width = ptr->width;
824 targetCfg->height = ptr->height;
825
826 // This client always wants below input data format
827 targetCfg->format =
828 static_cast<GraphicsPixelFormat>(
829 HAL_PIXEL_FORMAT_RGBA_8888);
830
831 maxArea = ptr->width * ptr->height;
832
833 foundCfg = true;
834 }
835 }
836 ++ptr;
837 }
838 } else {
839 LOG(WARNING) << "No stream configuration data is found; "
840 << "default parameters will be used.";
841 }
842
843 if (!foundCfg) {
844 LOG(INFO) << "No config was found";
845 targetCfg = nullptr;
846 return false;
847 }
848
849 string camId = mCameraDesc.v1.cameraId.c_str();
850 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
851 if (mCamera == nullptr) {
852 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
853 return false;
854 } else {
855 LOG(INFO) << "Camera " << camId << " is opened successfully";
856 }
857
858 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
859 for (const auto& id : mIOModuleConfig->cameraConfig.evsCameraIds) {
860 AndroidCameraParams params;
861 if (getAndroidCameraParams(mCamera, id, params)) {
862 cameraIdToAndroidParameters.emplace(id, params);
863 LOG(INFO) << "Camera parameters are fetched successfully for "
864 << "physical camera: " << id;
865 } else {
866 LOG(ERROR) << "Failed to get camera parameters for "
867 << "physical camera: " << id;
868 return false;
869 }
870 }
871
872 mCameraParams =
873 convertToSurroundViewCameraParams(cameraIdToAndroidParameters);
874
875 for (auto& camera : mCameraParams) {
876 camera.size.width = targetCfg->width;
877 camera.size.height = targetCfg->height;
878 camera.circular_fov = 179;
879 }
880
881 return true;
882 }
883
startEvs()884 bool SurroundView3dSession::startEvs() {
885 mFramesHandler = new FramesHandler(mCamera, this);
886 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
887 if (result != EvsResult::OK) {
888 LOG(ERROR) << "Failed to start video stream";
889 return false;
890 } else {
891 LOG(INFO) << "Video stream was started successfully";
892 }
893
894 return true;
895 }
896
897 } // namespace implementation
898 } // namespace V1_0
899 } // namespace sv
900 } // namespace automotive
901 } // namespace hardware
902 } // namespace android
903