1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "EmulatedRequestProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 
20 #include "EmulatedRequestProcessor.h"
21 
22 #include <HandleImporter.h>
23 #include <hardware/gralloc.h>
24 #include <log/log.h>
25 #include <sync/sync.h>
26 #include <utils/Timers.h>
27 #include <utils/Trace.h>
28 
29 namespace android {
30 
31 using android::hardware::camera::common::V1_0::helper::HandleImporter;
32 using google_camera_hal::ErrorCode;
33 using google_camera_hal::HwlPipelineResult;
34 using google_camera_hal::MessageType;
35 using google_camera_hal::NotifyMessage;
36 
EmulatedRequestProcessor(uint32_t camera_id,sp<EmulatedSensor> sensor,const HwlSessionCallback & session_callback)37 EmulatedRequestProcessor::EmulatedRequestProcessor(
38     uint32_t camera_id, sp<EmulatedSensor> sensor,
39     const HwlSessionCallback& session_callback)
40     : camera_id_(camera_id),
41       sensor_(sensor),
42       session_callback_(session_callback),
43       request_state_(std::make_unique<EmulatedLogicalRequestState>(camera_id)) {
44   ATRACE_CALL();
45   request_thread_ = std::thread([this] { this->RequestProcessorLoop(); });
46   importer_ = std::make_shared<HandleImporter>();
47 }
48 
~EmulatedRequestProcessor()49 EmulatedRequestProcessor::~EmulatedRequestProcessor() {
50   ATRACE_CALL();
51   processor_done_ = true;
52   request_thread_.join();
53 
54   auto ret = sensor_->ShutDown();
55   if (ret != OK) {
56     ALOGE("%s: Failed during sensor shutdown %s (%d)", __FUNCTION__,
57           strerror(-ret), ret);
58   }
59 }
60 
ProcessPipelineRequests(uint32_t frame_number,std::vector<HwlPipelineRequest> & requests,const std::vector<EmulatedPipeline> & pipelines,const DynamicStreamIdMapType & dynamic_stream_id_map,bool use_default_physical_camera)61 status_t EmulatedRequestProcessor::ProcessPipelineRequests(
62     uint32_t frame_number, std::vector<HwlPipelineRequest>& requests,
63     const std::vector<EmulatedPipeline>& pipelines,
64     const DynamicStreamIdMapType& dynamic_stream_id_map,
65     bool use_default_physical_camera) {
66   ATRACE_CALL();
67   status_t res = OK;
68 
69   std::unique_lock<std::mutex> lock(process_mutex_);
70 
71   for (auto& request : requests) {
72     if (request.pipeline_id >= pipelines.size()) {
73       ALOGE("%s: Pipeline request with invalid pipeline id: %u", __FUNCTION__,
74             request.pipeline_id);
75       return BAD_VALUE;
76     }
77 
78     while (pending_requests_.size() > EmulatedSensor::kPipelineDepth) {
79       auto result = request_condition_.wait_for(
80           lock, std::chrono::nanoseconds(
81                     EmulatedSensor::kSupportedFrameDurationRange[1]));
82       if (result == std::cv_status::timeout) {
83         ALOGE("%s: Timed out waiting for a pending request slot", __FUNCTION__);
84         return TIMED_OUT;
85       }
86     }
87 
88     res = request_state_->UpdateRequestForDynamicStreams(
89         &request, pipelines, dynamic_stream_id_map, use_default_physical_camera);
90     if (res != OK) {
91       ALOGE("%s: Failed to update request for dynamic streams: %s(%d)",
92             __FUNCTION__, strerror(-res), res);
93       return res;
94     }
95 
96     auto output_buffers = CreateSensorBuffers(
97         frame_number, request.output_buffers,
98         pipelines[request.pipeline_id].streams, request.pipeline_id,
99         pipelines[request.pipeline_id].cb, /*override_width*/ 0,
100         /*override_height*/ 0);
101     if (output_buffers == nullptr) {
102       return NO_MEMORY;
103     }
104 
105     auto input_buffers = CreateSensorBuffers(
106         frame_number, request.input_buffers,
107         pipelines[request.pipeline_id].streams, request.pipeline_id,
108         pipelines[request.pipeline_id].cb, request.input_width,
109         request.input_height);
110 
111     pending_requests_.push(
112         {.settings = HalCameraMetadata::Clone(request.settings.get()),
113          .input_buffers = std::move(input_buffers),
114          .output_buffers = std::move(output_buffers)});
115   }
116 
117   return OK;
118 }
119 
CreateSensorBuffers(uint32_t frame_number,const std::vector<StreamBuffer> & buffers,const std::unordered_map<uint32_t,EmulatedStream> & streams,uint32_t pipeline_id,HwlPipelineCallback cb,int32_t override_width,int32_t override_height)120 std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers(
121     uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
122     const std::unordered_map<uint32_t, EmulatedStream>& streams,
123     uint32_t pipeline_id, HwlPipelineCallback cb, int32_t override_width,
124     int32_t override_height) {
125   if (buffers.empty()) {
126     return nullptr;
127   }
128 
129   std::vector<StreamBuffer> requested_buffers;
130   for (auto& buffer : buffers) {
131     if (buffer.buffer != nullptr) {
132       requested_buffers.push_back(buffer);
133       continue;
134     }
135 
136     if (session_callback_.request_stream_buffers != nullptr) {
137       std::vector<StreamBuffer> one_requested_buffer;
138       status_t res = session_callback_.request_stream_buffers(
139           buffer.stream_id, 1, &one_requested_buffer, frame_number);
140       if (res != OK) {
141         ALOGE("%s: request_stream_buffers failed: %s(%d)", __FUNCTION__,
142               strerror(-res), res);
143         continue;
144       }
145       if (one_requested_buffer.size() != 1 ||
146           one_requested_buffer[0].buffer == nullptr) {
147         ALOGE("%s: request_stream_buffers failed to return a valid buffer",
148               __FUNCTION__);
149         continue;
150       }
151       requested_buffers.push_back(one_requested_buffer[0]);
152     }
153   }
154 
155   if (requested_buffers.size() < buffers.size()) {
156     ALOGE(
157         "%s: Failed to acquire all sensor buffers: %zu acquired, %zu requested",
158         __FUNCTION__, requested_buffers.size(), buffers.size());
159     // This only happens for HAL buffer manager use case.
160     if (session_callback_.return_stream_buffers != nullptr) {
161       session_callback_.return_stream_buffers(requested_buffers);
162     }
163     return nullptr;
164   }
165 
166   auto sensor_buffers = std::make_unique<Buffers>();
167   sensor_buffers->reserve(requested_buffers.size());
168   for (auto& buffer : requested_buffers) {
169     auto sensor_buffer = CreateSensorBuffer(
170         frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer,
171         override_width, override_height);
172     if (sensor_buffer.get() != nullptr) {
173       sensor_buffers->push_back(std::move(sensor_buffer));
174     }
175   }
176 
177   return sensor_buffers;
178 }
179 
NotifyFailedRequest(const PendingRequest & request)180 void EmulatedRequestProcessor::NotifyFailedRequest(const PendingRequest& request) {
181   if (request.output_buffers->at(0)->callback.notify != nullptr) {
182     // Mark all output buffers for this request in order not to send
183     // ERROR_BUFFER for them.
184     for (auto& output_buffer : *(request.output_buffers)) {
185       output_buffer->is_failed_request = true;
186     }
187 
188     auto output_buffer = std::move(request.output_buffers->at(0));
189     NotifyMessage msg = {
190         .type = MessageType::kError,
191         .message.error = {.frame_number = output_buffer->frame_number,
192                           .error_stream_id = -1,
193                           .error_code = ErrorCode::kErrorRequest}};
194     output_buffer->callback.notify(output_buffer->pipeline_id, msg);
195   }
196 }
197 
Flush()198 status_t EmulatedRequestProcessor::Flush() {
199   std::lock_guard<std::mutex> lock(process_mutex_);
200   // First flush in-flight requests
201   auto ret = sensor_->Flush();
202 
203   // Then the rest of the pending requests
204   while (!pending_requests_.empty()) {
205     const auto& request = pending_requests_.front();
206     NotifyFailedRequest(request);
207     pending_requests_.pop();
208   }
209 
210   return ret;
211 }
212 
GetBufferSizeAndStride(const EmulatedStream & stream,buffer_handle_t buffer,uint32_t * size,uint32_t * stride)213 status_t EmulatedRequestProcessor::GetBufferSizeAndStride(
214     const EmulatedStream& stream, buffer_handle_t buffer,
215     uint32_t* size /*out*/, uint32_t* stride /*out*/) {
216   if (size == nullptr) {
217     return BAD_VALUE;
218   }
219 
220   switch (stream.override_format) {
221     case HAL_PIXEL_FORMAT_RGB_888:
222       *stride = stream.width * 3;
223       *size = (*stride) * stream.height;
224       break;
225     case HAL_PIXEL_FORMAT_RGBA_8888:
226       *stride = stream.width * 4;
227       *size = (*stride) * stream.height;
228       break;
229     case HAL_PIXEL_FORMAT_Y16:
230       if (stream.override_data_space == HAL_DATASPACE_DEPTH) {
231         *stride = AlignTo(AlignTo(stream.width, 2) * 2, 16);
232         *size = (*stride) * AlignTo(stream.height, 2);
233       } else {
234         return BAD_VALUE;
235       }
236       break;
237     case HAL_PIXEL_FORMAT_BLOB:
238       if (stream.override_data_space == HAL_DATASPACE_V0_JFIF) {
239         *size = stream.buffer_size;
240         *stride = *size;
241       } else {
242         return BAD_VALUE;
243       }
244       break;
245     case HAL_PIXEL_FORMAT_RAW16:
246       if (importer_->getMonoPlanarStrideBytes(buffer, stride) != NO_ERROR) {
247         *stride = stream.width * 2;
248       }
249       *size = (*stride) * stream.height;
250       break;
251     default:
252       return BAD_VALUE;
253   }
254 
255   return OK;
256 }
257 
LockSensorBuffer(const EmulatedStream & stream,buffer_handle_t buffer,int32_t width,int32_t height,SensorBuffer * sensor_buffer)258 status_t EmulatedRequestProcessor::LockSensorBuffer(
259     const EmulatedStream& stream, buffer_handle_t buffer, int32_t width,
260     int32_t height, SensorBuffer* sensor_buffer /*out*/) {
261   if (sensor_buffer == nullptr) {
262     return BAD_VALUE;
263   }
264 
265   auto usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN;
266   bool isYUV_420_888 = stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888;
267   bool isP010 = static_cast<android_pixel_format_v1_1_t>(
268                     stream.override_format) == HAL_PIXEL_FORMAT_YCBCR_P010;
269   if ((isYUV_420_888) || (isP010)) {
270     IMapper::Rect map_rect = {0, 0, width, height};
271     auto yuv_layout = importer_->lockYCbCr(buffer, usage, map_rect);
272     if ((yuv_layout.y != nullptr) && (yuv_layout.cb != nullptr) &&
273         (yuv_layout.cr != nullptr)) {
274       sensor_buffer->plane.img_y_crcb.img_y =
275           static_cast<uint8_t*>(yuv_layout.y);
276       sensor_buffer->plane.img_y_crcb.img_cb =
277           static_cast<uint8_t*>(yuv_layout.cb);
278       sensor_buffer->plane.img_y_crcb.img_cr =
279           static_cast<uint8_t*>(yuv_layout.cr);
280       sensor_buffer->plane.img_y_crcb.y_stride = yuv_layout.yStride;
281       sensor_buffer->plane.img_y_crcb.cbcr_stride = yuv_layout.cStride;
282       sensor_buffer->plane.img_y_crcb.cbcr_step = yuv_layout.chromaStep;
283       if (isYUV_420_888 && (yuv_layout.chromaStep == 2) &&
284           std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
285                    sensor_buffer->plane.img_y_crcb.img_cr) != 1) {
286         ALOGE("%s: Unsupported YUV layout, chroma step: %u U/V plane delta: %u",
287               __FUNCTION__, yuv_layout.chromaStep,
288               static_cast<unsigned>(
289                   std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
290                            sensor_buffer->plane.img_y_crcb.img_cr)));
291         return BAD_VALUE;
292       }
293       sensor_buffer->plane.img_y_crcb.bytesPerPixel = isP010 ? 2 : 1;
294     } else {
295       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
296       return BAD_VALUE;
297     }
298   } else {
299     uint32_t buffer_size = 0, stride = 0;
300     auto ret = GetBufferSizeAndStride(stream, buffer, &buffer_size, &stride);
301     if (ret != OK) {
302       ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__,
303             stream.override_format);
304       return BAD_VALUE;
305     }
306     if (stream.override_format == HAL_PIXEL_FORMAT_BLOB) {
307       sensor_buffer->plane.img.img =
308           static_cast<uint8_t*>(importer_->lock(buffer, usage, buffer_size));
309     } else {
310       IMapper::Rect region{0, 0, width, height};
311       sensor_buffer->plane.img.img =
312           static_cast<uint8_t*>(importer_->lock(buffer, usage, region));
313     }
314     if (sensor_buffer->plane.img.img == nullptr) {
315       ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
316       return BAD_VALUE;
317     }
318     sensor_buffer->plane.img.stride_in_bytes = stride;
319     sensor_buffer->plane.img.buffer_size = buffer_size;
320   }
321 
322   return OK;
323 }
324 
CreateSensorBuffer(uint32_t frame_number,const EmulatedStream & emulated_stream,uint32_t pipeline_id,HwlPipelineCallback callback,StreamBuffer stream_buffer,int32_t override_width,int32_t override_height)325 std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer(
326     uint32_t frame_number, const EmulatedStream& emulated_stream,
327     uint32_t pipeline_id, HwlPipelineCallback callback,
328     StreamBuffer stream_buffer, int32_t override_width,
329     int32_t override_height) {
330   auto buffer = std::make_unique<SensorBuffer>(importer_);
331 
332   auto stream = emulated_stream;
333   // Make sure input stream formats are correctly mapped here
334   if (stream.is_input) {
335     stream.override_format =
336         EmulatedSensor::OverrideFormat(stream.override_format);
337   }
338   if (override_width > 0 && override_height > 0) {
339     buffer->width = override_width;
340     buffer->height = override_height;
341   } else {
342     buffer->width = stream.width;
343     buffer->height = stream.height;
344   }
345   buffer->format = static_cast<PixelFormat>(stream.override_format);
346   buffer->dataSpace = stream.override_data_space;
347   buffer->stream_buffer = stream_buffer;
348   buffer->pipeline_id = pipeline_id;
349   buffer->callback = callback;
350   buffer->frame_number = frame_number;
351   buffer->camera_id = emulated_stream.is_physical_camera_stream
352                           ? emulated_stream.physical_camera_id
353                           : camera_id_;
354   buffer->is_input = stream.is_input;
355   // In case buffer processing is successful, flip this flag accordingly
356   buffer->stream_buffer.status = BufferStatus::kError;
357 
358   if (buffer->stream_buffer.buffer != nullptr) {
359     auto ret = LockSensorBuffer(stream, buffer->stream_buffer.buffer,
360                                 buffer->width, buffer->height, buffer.get());
361     if (ret != OK) {
362       buffer.release();
363       buffer = nullptr;
364     }
365   }
366 
367   if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) {
368     auto fence_status = importer_->importFence(stream_buffer.acquire_fence,
369                                                buffer->acquire_fence_fd);
370     if (!fence_status) {
371       ALOGE("%s: Failed importing acquire fence!", __FUNCTION__);
372       buffer.release();
373       buffer = nullptr;
374     }
375   }
376 
377   return buffer;
378 }
379 
AcquireBuffers(Buffers * buffers)380 std::unique_ptr<Buffers> EmulatedRequestProcessor::AcquireBuffers(
381     Buffers* buffers) {
382   if ((buffers == nullptr) || (buffers->empty())) {
383     return nullptr;
384   }
385 
386   auto acquired_buffers = std::make_unique<Buffers>();
387   acquired_buffers->reserve(buffers->size());
388   auto output_buffer = buffers->begin();
389   while (output_buffer != buffers->end()) {
390     status_t ret = OK;
391     if ((*output_buffer)->acquire_fence_fd >= 0) {
392       ret = sync_wait((*output_buffer)->acquire_fence_fd,
393                       ns2ms(EmulatedSensor::kSupportedFrameDurationRange[1]));
394       if (ret != OK) {
395         ALOGE("%s: Fence sync failed: %s, (%d)", __FUNCTION__, strerror(-ret),
396               ret);
397       }
398     }
399 
400     if (ret == OK) {
401       acquired_buffers->push_back(std::move(*output_buffer));
402     }
403 
404     output_buffer = buffers->erase(output_buffer);
405   }
406 
407   return acquired_buffers;
408 }
409 
RequestProcessorLoop()410 void EmulatedRequestProcessor::RequestProcessorLoop() {
411   ATRACE_CALL();
412 
413   bool vsync_status_ = true;
414   while (!processor_done_ && vsync_status_) {
415     {
416       std::lock_guard<std::mutex> lock(process_mutex_);
417       if (!pending_requests_.empty()) {
418         status_t ret;
419         const auto& request = pending_requests_.front();
420         auto frame_number = request.output_buffers->at(0)->frame_number;
421         auto notify_callback = request.output_buffers->at(0)->callback;
422         auto pipeline_id = request.output_buffers->at(0)->pipeline_id;
423 
424         auto output_buffers = AcquireBuffers(request.output_buffers.get());
425         auto input_buffers = AcquireBuffers(request.input_buffers.get());
426         if (!output_buffers->empty()) {
427           std::unique_ptr<EmulatedSensor::LogicalCameraSettings> logical_settings =
428               std::make_unique<EmulatedSensor::LogicalCameraSettings>();
429 
430           std::unique_ptr<std::set<uint32_t>> physical_camera_output_ids =
431               std::make_unique<std::set<uint32_t>>();
432           for (const auto& it : *output_buffers) {
433             if (it->camera_id != camera_id_) {
434               physical_camera_output_ids->emplace(it->camera_id);
435             }
436           }
437 
438           // Repeating requests usually include valid settings only during the
439           // initial call. Afterwards an invalid settings pointer means that
440           // there are no changes in the parameters and Hal should re-use the
441           // last valid values.
442           // TODO: Add support for individual physical camera requests.
443           if (request.settings.get() != nullptr) {
444             ret = request_state_->InitializeLogicalSettings(
445                 HalCameraMetadata::Clone(request.settings.get()),
446                 std::move(physical_camera_output_ids), logical_settings.get());
447             last_settings_ = HalCameraMetadata::Clone(request.settings.get());
448           } else {
449             ret = request_state_->InitializeLogicalSettings(
450                 HalCameraMetadata::Clone(last_settings_.get()),
451                 std::move(physical_camera_output_ids), logical_settings.get());
452           }
453 
454           if (ret == OK) {
455             auto result = request_state_->InitializeLogicalResult(pipeline_id,
456                                                                   frame_number);
457             sensor_->SetCurrentRequest(
458                 std::move(logical_settings), std::move(result),
459                 std::move(input_buffers), std::move(output_buffers));
460           } else {
461             NotifyMessage msg{.type = MessageType::kError,
462                               .message.error = {
463                                   .frame_number = frame_number,
464                                   .error_stream_id = -1,
465                                   .error_code = ErrorCode::kErrorResult,
466                               }};
467 
468             notify_callback.notify(pipeline_id, msg);
469           }
470         } else {
471           // No further processing is needed, just fail the result which will
472           // complete this request.
473           NotifyMessage msg{.type = MessageType::kError,
474                             .message.error = {
475                                 .frame_number = frame_number,
476                                 .error_stream_id = -1,
477                                 .error_code = ErrorCode::kErrorResult,
478                             }};
479 
480           notify_callback.notify(pipeline_id, msg);
481         }
482 
483         pending_requests_.pop();
484         request_condition_.notify_one();
485       }
486     }
487 
488     vsync_status_ =
489         sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]);
490   }
491 }
492 
Initialize(std::unique_ptr<HalCameraMetadata> static_meta,PhysicalDeviceMapPtr physical_devices)493 status_t EmulatedRequestProcessor::Initialize(
494     std::unique_ptr<HalCameraMetadata> static_meta,
495     PhysicalDeviceMapPtr physical_devices) {
496   std::lock_guard<std::mutex> lock(process_mutex_);
497   return request_state_->Initialize(std::move(static_meta),
498                                     std::move(physical_devices));
499 }
500 
SetSessionCallback(const HwlSessionCallback & hwl_session_callback)501 void EmulatedRequestProcessor::SetSessionCallback(
502     const HwlSessionCallback& hwl_session_callback) {
503   std::lock_guard<std::mutex> lock(process_mutex_);
504   session_callback_ = hwl_session_callback;
505 }
506 
GetDefaultRequest(RequestTemplate type,std::unique_ptr<HalCameraMetadata> * default_settings)507 status_t EmulatedRequestProcessor::GetDefaultRequest(
508     RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
509   std::lock_guard<std::mutex> lock(process_mutex_);
510   return request_state_->GetDefaultRequest(type, default_settings);
511 }
512 
513 }  // namespace android
514