1 /*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "SurroundView2dSession.h"
18
19 #include <android-base/logging.h>
20 #include <android/hardware_buffer.h>
21 #include <system/camera_metadata.h>
22 #include <utils/SystemClock.h>
23
24 #include <thread>
25
26 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
27
28 #include "CameraUtils.h"
29
30 using ::android::hardware::automotive::evs::V1_0::EvsResult;
31 using ::android::hardware::camera::device::V3_2::Stream;
32
33 using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
34
35 namespace android {
36 namespace hardware {
37 namespace automotive {
38 namespace sv {
39 namespace V1_0 {
40 namespace implementation {
41
42 // TODO(b/158479099): There are a lot of redundant code between 2d and 3d.
43 // Decrease the degree of redundancy.
44 typedef struct {
45 int32_t id;
46 int32_t width;
47 int32_t height;
48 int32_t format;
49 int32_t direction;
50 int32_t framerate;
51 } RawStreamConfig;
52
53 static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
54 static const uint8_t kGrayColor = 128;
55 static const int kNumChannels = 3;
56 static const int kNumFrames = 4;
57 static const int kSv2dViewId = 0;
58
FramesHandler(sp<IEvsCamera> pCamera,sp<SurroundView2dSession> pSession)59 SurroundView2dSession::FramesHandler::FramesHandler(
60 sp<IEvsCamera> pCamera, sp<SurroundView2dSession> pSession)
61 : mCamera(pCamera),
62 mSession(pSession) {}
63
deliverFrame(const BufferDesc_1_0 & bufDesc_1_0)64 Return<void> SurroundView2dSession::FramesHandler::deliverFrame(
65 const BufferDesc_1_0& bufDesc_1_0) {
66 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
67 mCamera->doneWithFrame(bufDesc_1_0);
68
69 return {};
70 }
71
deliverFrame_1_1(const hidl_vec<BufferDesc_1_1> & buffers)72 Return<void> SurroundView2dSession::FramesHandler::deliverFrame_1_1(
73 const hidl_vec<BufferDesc_1_1>& buffers) {
74 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
75 mSession->mSequenceId++;
76
77 {
78 scoped_lock<mutex> lock(mSession->mAccessLock);
79 if (mSession->mProcessingEvsFrames) {
80 LOG(WARNING) << "EVS frames are being processed. Skip frames:" << mSession->mSequenceId;
81 mCamera->doneWithFrame_1_1(buffers);
82 return {};
83 }
84 }
85
86 if (buffers.size() != kNumFrames) {
87 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
88 << ", which is different from the number " << kNumFrames
89 << ", specified in config file";
90 return {};
91 }
92
93 {
94 scoped_lock<mutex> lock(mSession->mAccessLock);
95 for (int i = 0; i < kNumFrames; i++) {
96 LOG(DEBUG) << "Copying buffer No." << i
97 << " to Surround View Service";
98 mSession->copyFromBufferToPointers(buffers[i],
99 mSession->mInputPointers[i]);
100 }
101 }
102
103 mCamera->doneWithFrame_1_1(buffers);
104
105 // Notify the session that a new set of frames is ready
106 {
107 scoped_lock<mutex> lock(mSession->mAccessLock);
108 mSession->mProcessingEvsFrames = true;
109 }
110 mSession->mFramesSignal.notify_all();
111
112 return {};
113 }
114
notify(const EvsEventDesc & event)115 Return<void> SurroundView2dSession::FramesHandler::notify(const EvsEventDesc& event) {
116 switch(event.aType) {
117 case EvsEventType::STREAM_STOPPED:
118 {
119 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
120
121 // TODO(b/158339680): There is currently an issue in EVS reference
122 // implementation that causes STREAM_STOPPED event to be delivered
123 // properly. When the bug is fixed, we should deal with this event
124 // properly in case the EVS stream is stopped unexpectly.
125 break;
126 }
127
128 case EvsEventType::PARAMETER_CHANGED:
129 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
130 << " is set to " << event.payload[1];
131 break;
132
133 // Below events are ignored in reference implementation.
134 case EvsEventType::STREAM_STARTED:
135 [[fallthrough]];
136 case EvsEventType::FRAME_DROPPED:
137 [[fallthrough]];
138 case EvsEventType::TIMEOUT:
139 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
140 << "is received but ignored.";
141 break;
142 default:
143 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
144 break;
145 }
146
147 return {};
148 }
149
copyFromBufferToPointers(BufferDesc_1_1 buffer,SurroundViewInputBufferPointers pointers)150 bool SurroundView2dSession::copyFromBufferToPointers(
151 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
152
153 AHardwareBuffer_Desc* pDesc =
154 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
155
156 // create a GraphicBuffer from the existing handle
157 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
158 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
159 pDesc->height, pDesc->format, pDesc->layers,
160 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
161
162 if (inputBuffer == nullptr) {
163 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
164 // Returning "true" in this error condition because we already released the
165 // previous image (if any) and so the texture may change in unpredictable
166 // ways now!
167 return false;
168 } else {
169 LOG(INFO) << "Managed to allocate GraphicBuffer with "
170 << " width: " << pDesc->width
171 << " height: " << pDesc->height
172 << " format: " << pDesc->format
173 << " stride: " << pDesc->stride;
174 }
175
176 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
177 // lock, return false.
178 void* inputDataPtr;
179 inputBuffer->lock(
180 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
181 &inputDataPtr);
182 if (!inputDataPtr) {
183 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
184 inputBuffer->unlock();
185 return false;
186 } else {
187 LOG(INFO) << "Managed to get read access to GraphicBuffer";
188 }
189
190 int stride = pDesc->stride;
191
192 // readPtr comes from EVS, and it is with 4 channels
193 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
194
195 // writePtr comes from CV imread, and it is with 3 channels
196 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
197
198 for (int i=0; i<pDesc->width; i++)
199 for (int j=0; j<pDesc->height; j++) {
200 writePtr[(i + j * stride) * 3 + 0] =
201 readPtr[(i + j * stride) * 4 + 0];
202 writePtr[(i + j * stride) * 3 + 1] =
203 readPtr[(i + j * stride) * 4 + 1];
204 writePtr[(i + j * stride) * 3 + 2] =
205 readPtr[(i + j * stride) * 4 + 2];
206 }
207 LOG(INFO) << "Brute force copying finished";
208
209 return true;
210 }
211
processFrames()212 void SurroundView2dSession::processFrames() {
213 while (true) {
214 {
215 unique_lock<mutex> lock(mAccessLock);
216
217 if (mStreamState != RUNNING) {
218 break;
219 }
220
221 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
222 }
223
224 handleFrames(mSequenceId);
225
226 {
227 // Set the boolean to false to receive the next set of frames.
228 scoped_lock<mutex> lock(mAccessLock);
229 mProcessingEvsFrames = false;
230 }
231 }
232
233 // Notify the SV client that no new results will be delivered.
234 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
235 mStream->notify(SvEvent::STREAM_STOPPED);
236
237 {
238 scoped_lock<mutex> lock(mAccessLock);
239 mStreamState = STOPPED;
240 mStream = nullptr;
241 LOG(DEBUG) << "Stream marked STOPPED.";
242 }
243 }
244
SurroundView2dSession(sp<IEvsEnumerator> pEvs,IOModuleConfig * pConfig)245 SurroundView2dSession::SurroundView2dSession(sp<IEvsEnumerator> pEvs,
246 IOModuleConfig* pConfig)
247 : mEvs(pEvs),
248 mIOModuleConfig(pConfig),
249 mStreamState(STOPPED) {
250 mEvsCameraIds = {"0", "1", "2", "3"};
251 }
252
~SurroundView2dSession()253 SurroundView2dSession::~SurroundView2dSession() {
254 // In case the client did not call stopStream properly, we should stop the
255 // stream explicitly. Otherwise the process thread will take forever to
256 // join.
257 stopStream();
258
259 // Waiting for the process thread to finish the buffered frames.
260 if (mProcessThread.joinable()) {
261 mProcessThread.join();
262 }
263
264 mEvs->closeCamera(mCamera);
265 }
266
267 // Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession
startStream(const sp<ISurroundViewStream> & stream)268 Return<SvResult> SurroundView2dSession::startStream(
269 const sp<ISurroundViewStream>& stream) {
270 LOG(DEBUG) << __FUNCTION__;
271 scoped_lock<mutex> lock(mAccessLock);
272
273 if (!mIsInitialized && !initialize()) {
274 LOG(ERROR) << "There is an error while initializing the use case. "
275 << "Exiting";
276 return SvResult::INTERNAL_ERROR;
277 }
278
279 if (mStreamState != STOPPED) {
280 LOG(ERROR) << "Ignoring startVideoStream call"
281 << "when a stream is already running.";
282 return SvResult::INTERNAL_ERROR;
283 }
284
285 if (stream == nullptr) {
286 LOG(ERROR) << "The input stream is invalid";
287 return SvResult::INTERNAL_ERROR;
288 }
289 mStream = stream;
290
291 mSequenceId = 0;
292 startEvs();
293
294 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
295 // reference implementation yet. Once implemented, this logic should be
296 // moved to EVS notify callback.
297 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
298 mStream->notify(SvEvent::STREAM_STARTED);
299 mProcessingEvsFrames = false;
300
301 // Start the frame generation thread
302 mStreamState = RUNNING;
303
304 mProcessThread = thread([this]() {
305 processFrames();
306 });
307
308 return SvResult::OK;
309 }
310
stopStream()311 Return<void> SurroundView2dSession::stopStream() {
312 LOG(DEBUG) << __FUNCTION__;
313 unique_lock<mutex> lock(mAccessLock);
314
315 if (mStreamState == RUNNING) {
316 // Tell the processFrames loop to stop processing frames
317 mStreamState = STOPPING;
318
319 // Stop the EVS stream asynchronizely
320 mCamera->stopVideoStream();
321 mFramesHandler = nullptr;
322 }
323
324 return {};
325 }
326
doneWithFrames(const SvFramesDesc & svFramesDesc)327 Return<void> SurroundView2dSession::doneWithFrames(
328 const SvFramesDesc& svFramesDesc){
329 LOG(DEBUG) << __FUNCTION__;
330 scoped_lock <mutex> lock(mAccessLock);
331
332 mFramesRecord.inUse = false;
333
334 (void)svFramesDesc;
335 return {};
336 }
337
338 // Methods from ISurroundView2dSession follow.
get2dMappingInfo(get2dMappingInfo_cb _hidl_cb)339 Return<void> SurroundView2dSession::get2dMappingInfo(
340 get2dMappingInfo_cb _hidl_cb) {
341 LOG(DEBUG) << __FUNCTION__;
342
343 _hidl_cb(mInfo);
344 return {};
345 }
346
set2dConfig(const Sv2dConfig & sv2dConfig)347 Return<SvResult> SurroundView2dSession::set2dConfig(
348 const Sv2dConfig& sv2dConfig) {
349 LOG(DEBUG) << __FUNCTION__;
350 scoped_lock <mutex> lock(mAccessLock);
351
352 if (sv2dConfig.width <=0 || sv2dConfig.width > 4096) {
353 LOG(WARNING) << "The width of 2d config is out of the range (0, 4096]"
354 << "Ignored!";
355 return SvResult::INVALID_ARG;
356 }
357
358 mConfig.width = sv2dConfig.width;
359 mConfig.blending = sv2dConfig.blending;
360 mHeight = mConfig.width * mInfo.height / mInfo.width;
361
362 if (mStream != nullptr) {
363 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
364 mStream->notify(SvEvent::CONFIG_UPDATED);
365 }
366
367 return SvResult::OK;
368 }
369
get2dConfig(get2dConfig_cb _hidl_cb)370 Return<void> SurroundView2dSession::get2dConfig(get2dConfig_cb _hidl_cb) {
371 LOG(DEBUG) << __FUNCTION__;
372
373 _hidl_cb(mConfig);
374 return {};
375 }
376
projectCameraPoints(const hidl_vec<Point2dInt> & points2dCamera,const hidl_string & cameraId,projectCameraPoints_cb _hidl_cb)377 Return<void> SurroundView2dSession::projectCameraPoints(const hidl_vec<Point2dInt>& points2dCamera,
378 const hidl_string& cameraId,
379 projectCameraPoints_cb _hidl_cb) {
380 LOG(DEBUG) << __FUNCTION__;
381 std::vector<Point2dFloat> outPoints;
382 bool cameraIdFound = false;
383 int cameraIndex = 0;
384 // Note: mEvsCameraIds must be in the order front, right, rear, left.
385 for (auto& evsCameraId : mEvsCameraIds) {
386 if (cameraId == evsCameraId) {
387 cameraIdFound = true;
388 LOG(DEBUG) << "Camera id found for projection: " << cameraId;
389 break;
390 }
391 cameraIndex++;
392 }
393
394 if (!cameraIdFound) {
395 LOG(ERROR) << "Camera id not found for projection: " << cameraId;
396 _hidl_cb(outPoints);
397 return {};
398 }
399
400 int width = mConfig.width;
401 int height = mHeight;
402 for (const auto& cameraPoint : points2dCamera) {
403 Point2dFloat outPoint = {false, 0.0, 0.0};
404 // Check of the camear point is within the camera resolution bounds.
405 if (cameraPoint.x < 0 || cameraPoint.x > width - 1 || cameraPoint.y < 0 ||
406 cameraPoint.y > height - 1) {
407 LOG(WARNING) << "Camera point (" << cameraPoint.x << ", " << cameraPoint.y
408 << ") is out of camera resolution bounds.";
409 outPoint.isValid = false;
410 outPoints.push_back(outPoint);
411 continue;
412 }
413
414 // Project points using mSurroundView function.
415 const Coordinate2dInteger camPoint(cameraPoint.x, cameraPoint.y);
416 Coordinate2dFloat projPoint2d(0.0, 0.0);
417
418 outPoint.isValid =
419 mSurroundView->GetProjectionPointFromRawCameraToSurroundView2d(camPoint,
420 cameraIndex,
421 &projPoint2d);
422 outPoint.x = projPoint2d.x;
423 outPoint.y = projPoint2d.y;
424 outPoints.push_back(outPoint);
425 }
426
427 _hidl_cb(outPoints);
428 return {};
429 }
430
handleFrames(int sequenceId)431 bool SurroundView2dSession::handleFrames(int sequenceId) {
432 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
433
434 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
435 // output frame is supported. Implement buffer queue for both of them.
436 {
437 scoped_lock<mutex> lock(mAccessLock);
438
439 if (mFramesRecord.inUse) {
440 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
441 mStream->notify(SvEvent::FRAME_DROPPED);
442 return true;
443 }
444 }
445
446 if (mOutputWidth != mConfig.width || mOutputHeight != mHeight) {
447 LOG(DEBUG) << "Config changed. Re-allocate memory."
448 << " Old width: "
449 << mOutputWidth
450 << " Old height: "
451 << mOutputHeight
452 << " New width: "
453 << mConfig.width
454 << " New height: "
455 << mHeight;
456 delete[] static_cast<char*>(mOutputPointer.data_pointer);
457 mOutputWidth = mConfig.width;
458 mOutputHeight = mHeight;
459 mOutputPointer.height = mOutputHeight;
460 mOutputPointer.width = mOutputWidth;
461 mOutputPointer.format = Format::RGB;
462 mOutputPointer.data_pointer =
463 new char[mOutputHeight * mOutputWidth * kNumChannels];
464
465 if (!mOutputPointer.data_pointer) {
466 LOG(ERROR) << "Memory allocation failed. Exiting.";
467 return false;
468 }
469
470 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
471 mSurroundView->Update2dOutputResolution(size);
472
473 mSvTexture = new GraphicBuffer(mOutputWidth,
474 mOutputHeight,
475 HAL_PIXEL_FORMAT_RGB_888,
476 1,
477 GRALLOC_USAGE_HW_TEXTURE,
478 "SvTexture");
479 if (mSvTexture->initCheck() == OK) {
480 LOG(INFO) << "Successfully allocated Graphic Buffer";
481 } else {
482 LOG(ERROR) << "Failed to allocate Graphic Buffer";
483 return false;
484 }
485 }
486
487 if (mSurroundView->Get2dSurroundView(mInputPointers, &mOutputPointer)) {
488 LOG(INFO) << "Get2dSurroundView succeeded";
489 } else {
490 LOG(ERROR) << "Get2dSurroundView failed. "
491 << "Using memset to initialize to gray";
492 memset(mOutputPointer.data_pointer, kGrayColor,
493 mOutputHeight * mOutputWidth * kNumChannels);
494 }
495
496 void* textureDataPtr = nullptr;
497 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
498 | GRALLOC_USAGE_SW_READ_NEVER,
499 &textureDataPtr);
500 if (!textureDataPtr) {
501 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
502 return false;
503 }
504
505 // Note: there is a chance that the stride of the texture is not the same
506 // as the width. For example, when the input frame is 1920 * 1080, the
507 // width is 1080, but the stride is 2048. So we'd better copy the data line
508 // by line, instead of single memcpy.
509 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
510 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
511 const int readStride = mOutputWidth * kNumChannels;
512 const int writeStride = mSvTexture->getStride() * kNumChannels;
513 if (readStride == writeStride) {
514 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
515 } else {
516 for (int i=0; i<mSvTexture->getHeight(); i++) {
517 memcpy(writePtr, readPtr, readStride);
518 writePtr = writePtr + writeStride;
519 readPtr = readPtr + readStride;
520 }
521 }
522 LOG(DEBUG) << "memcpy finished";
523 mSvTexture->unlock();
524
525 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
526 LOG(DEBUG) << "ANativeWindowBuffer->handle: "
527 << buffer->handle;
528
529 {
530 scoped_lock<mutex> lock(mAccessLock);
531
532 mFramesRecord.frames.svBuffers.resize(1);
533 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
534 svBuffer.viewId = kSv2dViewId;
535 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
536 AHardwareBuffer_Desc* pDesc =
537 reinterpret_cast<AHardwareBuffer_Desc*>(
538 &svBuffer.hardwareBuffer.description);
539 pDesc->width = mOutputWidth;
540 pDesc->height = mOutputHeight;
541 pDesc->layers = 1;
542 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
543 pDesc->stride = mSvTexture->getStride();
544 pDesc->format = HAL_PIXEL_FORMAT_RGB_888;
545 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
546 mFramesRecord.frames.sequenceId = sequenceId;
547
548 mFramesRecord.inUse = true;
549 mStream->receiveFrames(mFramesRecord.frames);
550 }
551
552 return true;
553 }
554
initialize()555 bool SurroundView2dSession::initialize() {
556 lock_guard<mutex> lock(mAccessLock, adopt_lock);
557
558 if (!setupEvs()) {
559 LOG(ERROR) << "Failed to setup EVS components for 2d session";
560 return false;
561 }
562
563 // TODO(b/150412555): ask core-lib team to add API description for "create"
564 // method in the .h file.
565 // The create method will never return a null pointer based the API
566 // description.
567 mSurroundView = unique_ptr<SurroundView>(Create());
568
569 SurroundViewStaticDataParams params =
570 SurroundViewStaticDataParams(
571 mCameraParams,
572 mIOModuleConfig->sv2dConfig.sv2dParams,
573 mIOModuleConfig->sv3dConfig.sv3dParams,
574 GetUndistortionScales(),
575 mIOModuleConfig->sv2dConfig.carBoundingBox,
576 mIOModuleConfig->carModelConfig.carModel.texturesMap,
577 mIOModuleConfig->carModelConfig.carModel.partsMap);
578 mSurroundView->SetStaticData(params);
579 if (mSurroundView->Start2dPipeline()) {
580 LOG(INFO) << "Start2dPipeline succeeded";
581 } else {
582 LOG(ERROR) << "Start2dPipeline failed";
583 return false;
584 }
585
586 mInputPointers.resize(kNumFrames);
587 for (int i = 0; i < kNumFrames; i++) {
588 mInputPointers[i].width = mCameraParams[i].size.width;
589 mInputPointers[i].height = mCameraParams[i].size.height;
590 mInputPointers[i].format = Format::RGB;
591 mInputPointers[i].cpu_data_pointer =
592 (void*)new uint8_t[mInputPointers[i].width *
593 mInputPointers[i].height *
594 kNumChannels];
595 }
596 LOG(INFO) << "Allocated " << kNumFrames << " input pointers";
597
598 mOutputWidth = mIOModuleConfig->sv2dConfig.sv2dParams.resolution.width;
599 mOutputHeight = mIOModuleConfig->sv2dConfig.sv2dParams.resolution.height;
600
601 mConfig.width = mOutputWidth;
602 mConfig.blending = SvQuality::HIGH;
603 mHeight = mOutputHeight;
604
605 mOutputPointer.height = mOutputHeight;
606 mOutputPointer.width = mOutputWidth;
607 mOutputPointer.format = mInputPointers[0].format;
608 mOutputPointer.data_pointer = new char[
609 mOutputHeight * mOutputWidth * kNumChannels];
610
611 if (!mOutputPointer.data_pointer) {
612 LOG(ERROR) << "Memory allocation failed. Exiting.";
613 return false;
614 }
615
616 mSvTexture = new GraphicBuffer(mOutputWidth,
617 mOutputHeight,
618 HAL_PIXEL_FORMAT_RGB_888,
619 1,
620 GRALLOC_USAGE_HW_TEXTURE,
621 "SvTexture");
622
623 // Note: sv2dParams is in meters while mInfo must be in milli-meters.
624 mInfo.width = mIOModuleConfig->sv2dConfig.sv2dParams.physical_size.width * 1000.0;
625 mInfo.height = mIOModuleConfig->sv2dConfig.sv2dParams.physical_size.height * 1000.0;
626 mInfo.center.isValid = true;
627 mInfo.center.x = mIOModuleConfig->sv2dConfig.sv2dParams.physical_center.x * 1000.0;
628 mInfo.center.y = mIOModuleConfig->sv2dConfig.sv2dParams.physical_center.y * 1000.0;
629
630 if (mSvTexture->initCheck() == OK) {
631 LOG(INFO) << "Successfully allocated Graphic Buffer";
632 } else {
633 LOG(ERROR) << "Failed to allocate Graphic Buffer";
634 return false;
635 }
636
637 mIsInitialized = true;
638 return true;
639 }
640
setupEvs()641 bool SurroundView2dSession::setupEvs() {
642 // Reads the camera related information from the config object
643 const string evsGroupId = mIOModuleConfig->cameraConfig.evsGroupId;
644
645 // Setup for EVS
646 LOG(INFO) << "Requesting camera list";
647 mEvs->getCameraList_1_1(
648 [this, evsGroupId] (hidl_vec<CameraDesc> cameraList) {
649 LOG(INFO) << "Camera list callback received " << cameraList.size();
650 for (auto&& cam : cameraList) {
651 LOG(INFO) << "Found camera " << cam.v1.cameraId;
652 if (cam.v1.cameraId == evsGroupId) {
653 mCameraDesc = cam;
654 }
655 }
656 });
657
658 bool foundCfg = false;
659 std::unique_ptr<Stream> targetCfg(new Stream());
660
661 // This logic picks the configuration with the largest area that supports
662 // RGBA8888 format
663 int32_t maxArea = 0;
664 camera_metadata_entry_t streamCfgs;
665 if (!find_camera_metadata_entry(
666 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
667 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
668 &streamCfgs)) {
669 // Stream configurations are found in metadata
670 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
671 streamCfgs.data.i32);
672 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
673 if (ptr->direction ==
674 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
675 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
676
677 if (ptr->width * ptr->height > maxArea) {
678 targetCfg->id = ptr->id;
679 targetCfg->width = ptr->width;
680 targetCfg->height = ptr->height;
681
682 // This client always wants below input data format
683 targetCfg->format =
684 static_cast<GraphicsPixelFormat>(
685 HAL_PIXEL_FORMAT_RGBA_8888);
686
687 maxArea = ptr->width * ptr->height;
688
689 foundCfg = true;
690 }
691 }
692 ++ptr;
693 }
694 } else {
695 LOG(WARNING) << "No stream configuration data is found; "
696 << "default parameters will be used.";
697 }
698
699 if (!foundCfg) {
700 LOG(INFO) << "No config was found";
701 targetCfg = nullptr;
702 return false;
703 }
704
705 string camId = mCameraDesc.v1.cameraId.c_str();
706 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
707 if (mCamera == nullptr) {
708 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
709 return false;
710 } else {
711 LOG(INFO) << "Camera " << camId << " is opened successfully";
712 }
713
714 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
715 for (const auto& id : mIOModuleConfig->cameraConfig.evsCameraIds) {
716 AndroidCameraParams params;
717 if (getAndroidCameraParams(mCamera, id, params)) {
718 cameraIdToAndroidParameters.emplace(id, params);
719 LOG(INFO) << "Camera parameters are fetched successfully for "
720 << "physical camera: " << id;
721 } else {
722 LOG(ERROR) << "Failed to get camera parameters for "
723 << "physical camera: " << id;
724 return false;
725 }
726 }
727
728 mCameraParams =
729 convertToSurroundViewCameraParams(cameraIdToAndroidParameters);
730
731 for (auto& camera : mCameraParams) {
732 camera.size.width = targetCfg->width;
733 camera.size.height = targetCfg->height;
734 camera.circular_fov = 179;
735 }
736
737 return true;
738 }
739
startEvs()740 bool SurroundView2dSession::startEvs() {
741 mFramesHandler = new FramesHandler(mCamera, this);
742 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
743 if (result != EvsResult::OK) {
744 LOG(ERROR) << "Failed to start video stream";
745 return false;
746 } else {
747 LOG(INFO) << "Video stream was started successfully";
748 }
749
750 return true;
751 }
752
753 } // namespace implementation
754 } // namespace V1_0
755 } // namespace sv
756 } // namespace automotive
757 } // namespace hardware
758 } // namespace android
759