1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "GCH_HalUtils"
19 #include "hal_utils.h"
20 
21 #include <cutils/properties.h>
22 #include <inttypes.h>
23 #include <log/log.h>
24 
25 #include <string>
26 
27 #include "vendor_tag_defs.h"
28 
29 namespace android {
30 namespace google_camera_hal {
31 namespace hal_utils {
32 
CreateHwlPipelineRequest(HwlPipelineRequest * hwl_request,uint32_t pipeline_id,const CaptureRequest & request)33 status_t CreateHwlPipelineRequest(HwlPipelineRequest* hwl_request,
34                                   uint32_t pipeline_id,
35                                   const CaptureRequest& request) {
36   if (hwl_request == nullptr) {
37     ALOGE("%s: hwl_request is nullptr", __FUNCTION__);
38     return BAD_VALUE;
39   }
40 
41   hwl_request->pipeline_id = pipeline_id;
42   hwl_request->settings = HalCameraMetadata::Clone(request.settings.get());
43   hwl_request->input_buffers = request.input_buffers;
44   hwl_request->output_buffers = request.output_buffers;
45   hwl_request->input_width = request.input_width;
46   hwl_request->input_height = request.input_height;
47 
48   for (auto& metadata : request.input_buffer_metadata) {
49     hwl_request->input_buffer_metadata.push_back(
50         HalCameraMetadata::Clone(metadata.get()));
51   }
52 
53   for (auto& [camera_id, physical_metadata] : request.physical_camera_settings) {
54     hwl_request->physical_camera_settings.emplace(
55         camera_id, HalCameraMetadata::Clone(physical_metadata.get()));
56   }
57 
58   return OK;
59 }
60 
CreateHwlPipelineRequests(std::vector<HwlPipelineRequest> * hwl_requests,const std::vector<uint32_t> & pipeline_ids,const std::vector<ProcessBlockRequest> & requests)61 status_t CreateHwlPipelineRequests(
62     std::vector<HwlPipelineRequest>* hwl_requests,
63     const std::vector<uint32_t>& pipeline_ids,
64     const std::vector<ProcessBlockRequest>& requests) {
65   if (hwl_requests == nullptr) {
66     ALOGE("%s: hwl_requests is nullptr", __FUNCTION__);
67     return BAD_VALUE;
68   }
69 
70   if (pipeline_ids.size() != requests.size()) {
71     ALOGE("%s: There are %zu pipeline IDs but %zu requests", __FUNCTION__,
72           pipeline_ids.size(), requests.size());
73     return BAD_VALUE;
74   }
75 
76   status_t res;
77   for (size_t i = 0; i < pipeline_ids.size(); i++) {
78     HwlPipelineRequest hwl_request;
79     res = CreateHwlPipelineRequest(&hwl_request, pipeline_ids[i],
80                                    requests[i].request);
81     if (res != OK) {
82       ALOGE("%s: Creating a HWL pipeline request failed: %s(%d)", __FUNCTION__,
83             strerror(-res), res);
84       return res;
85     }
86 
87     hwl_requests->push_back(std::move(hwl_request));
88   }
89 
90   return OK;
91 }
92 
ConvertToCaptureResult(std::unique_ptr<HwlPipelineResult> hwl_result)93 std::unique_ptr<CaptureResult> ConvertToCaptureResult(
94     std::unique_ptr<HwlPipelineResult> hwl_result) {
95   if (hwl_result == nullptr) {
96     ALOGE("%s: hwl_result is nullptr", __FUNCTION__);
97     return nullptr;
98   }
99 
100   auto capture_result = std::make_unique<CaptureResult>();
101   if (capture_result == nullptr) {
102     ALOGE("%s: Creating capture_result failed.", __FUNCTION__);
103     return nullptr;
104   }
105 
106   capture_result->frame_number = hwl_result->frame_number;
107   capture_result->result_metadata = std::move(hwl_result->result_metadata);
108   capture_result->output_buffers = std::move(hwl_result->output_buffers);
109   capture_result->input_buffers = std::move(hwl_result->input_buffers);
110   capture_result->partial_result = hwl_result->partial_result;
111 
112   capture_result->physical_metadata.reserve(
113       hwl_result->physical_camera_results.size());
114   for (const auto& [camera_id, metadata] : hwl_result->physical_camera_results) {
115     capture_result->physical_metadata.push_back(PhysicalCameraMetadata(
116         {camera_id, HalCameraMetadata::Clone(metadata.get())}));
117   }
118 
119   return capture_result;
120 }
121 
ContainsOutputBuffer(const CaptureRequest & request,const StreamBuffer & buffer)122 bool ContainsOutputBuffer(const CaptureRequest& request,
123                           const StreamBuffer& buffer) {
124   for (auto& request_buffer : request.output_buffers) {
125     if (request_buffer.buffer == buffer.buffer) {
126       return true;
127     } else if (buffer.buffer == nullptr &&
128                request_buffer.stream_id == buffer.stream_id) {
129       // Framework passed in an empty buffer and HAL allocated the buffer.
130       return true;
131     }
132   }
133 
134   return false;
135 }
136 
AreAllRemainingBuffersRequested(const std::vector<ProcessBlockRequest> & process_block_requests,const CaptureRequest & remaining_session_request)137 bool AreAllRemainingBuffersRequested(
138     const std::vector<ProcessBlockRequest>& process_block_requests,
139     const CaptureRequest& remaining_session_request) {
140   for (auto& buffer : remaining_session_request.output_buffers) {
141     bool found = false;
142 
143     for (auto& block_request : process_block_requests) {
144       if (ContainsOutputBuffer(block_request.request, buffer)) {
145         found = true;
146         break;
147       }
148     }
149 
150     if (!found) {
151       ALOGE("%s: A buffer %" PRIu64 " of stream %d is not requested.",
152             __FUNCTION__, buffer.buffer_id, buffer.stream_id);
153       return false;
154     }
155   }
156 
157   return true;
158 }
159 
GetColorFilterArrangement(const HalCameraMetadata * characteristics,uint8_t * cfa)160 static status_t GetColorFilterArrangement(
161     const HalCameraMetadata* characteristics, uint8_t* cfa) {
162   if (characteristics == nullptr || cfa == nullptr) {
163     ALOGE("%s: characteristics (%p) or cfa (%p) is nullptr", __FUNCTION__,
164           characteristics, cfa);
165     return BAD_VALUE;
166   }
167 
168   camera_metadata_ro_entry entry;
169   status_t res = characteristics->Get(
170       ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &entry);
171   if (res != OK || entry.count != 1) {
172     ALOGE("%s: Getting COLOR_FILTER_ARRANGEMENT failed: %s(%d) count: %zu",
173           __FUNCTION__, strerror(-res), res, entry.count);
174     return res;
175   }
176 
177   *cfa = entry.data.u8[0];
178   return OK;
179 }
180 
IsIrCamera(const HalCameraMetadata * characteristics)181 bool IsIrCamera(const HalCameraMetadata* characteristics) {
182   uint8_t cfa;
183   status_t res = GetColorFilterArrangement(characteristics, &cfa);
184   if (res != OK) {
185     ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
186           strerror(-res), res);
187     return false;
188   }
189 
190   return cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR;
191 }
192 
IsMonoCamera(const HalCameraMetadata * characteristics)193 bool IsMonoCamera(const HalCameraMetadata* characteristics) {
194   uint8_t cfa;
195   status_t res = GetColorFilterArrangement(characteristics, &cfa);
196   if (res != OK) {
197     ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
198           strerror(-res), res);
199     return false;
200   }
201 
202   return cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO;
203 }
204 
IsBayerCamera(const HalCameraMetadata * characteristics)205 bool IsBayerCamera(const HalCameraMetadata* characteristics) {
206   uint8_t cfa;
207   status_t res = GetColorFilterArrangement(characteristics, &cfa);
208   if (res != OK) {
209     ALOGE("%s: Getting color filter arrangement failed: %s(%d)", __FUNCTION__,
210           strerror(-res), res);
211     return false;
212   }
213 
214   if (cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB ||
215       cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG ||
216       cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG ||
217       cfa == ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR) {
218     return true;
219   }
220 
221   return false;
222 }
223 
IsFixedFocusCamera(const HalCameraMetadata * characteristics)224 bool IsFixedFocusCamera(const HalCameraMetadata* characteristics) {
225   if (characteristics == nullptr) {
226     ALOGE("%s: characteristics (%p) is nullptr", __FUNCTION__, characteristics);
227     return false;
228   }
229 
230   camera_metadata_ro_entry entry = {};
231   status_t res =
232       characteristics->Get(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, &entry);
233   if (res != OK || entry.count != 1) {
234     ALOGE("%s: Getting ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE failed: %s(%d)",
235           __FUNCTION__, strerror(-res), res);
236     return false;
237   }
238 
239   return entry.data.f[0] == 0.0f;
240 }
241 
IsRequestHdrplusCompatible(const CaptureRequest & request,int32_t preview_stream_id)242 bool IsRequestHdrplusCompatible(const CaptureRequest& request,
243                                 int32_t preview_stream_id) {
244   if (request.settings == nullptr) {
245     return false;
246   }
247 
248   camera_metadata_ro_entry entry;
249   if (request.settings->Get(ANDROID_CONTROL_CAPTURE_INTENT, &entry) != OK ||
250       *entry.data.u8 != ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
251     ALOGV("%s: ANDROID_CONTROL_CAPTURE_INTENT is not STILL_CAPTURE",
252           __FUNCTION__);
253     return false;
254   }
255 
256   if (request.settings->Get(ANDROID_CONTROL_ENABLE_ZSL_TRUE, &entry) != OK ||
257       *entry.data.u8 != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
258     ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true", __FUNCTION__);
259     return false;
260   }
261 
262   if (request.settings->Get(ANDROID_NOISE_REDUCTION_MODE, &entry) != OK ||
263       *entry.data.u8 != ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
264     ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ", __FUNCTION__);
265     return false;
266   }
267 
268   if (request.settings->Get(ANDROID_EDGE_MODE, &entry) != OK ||
269       *entry.data.u8 != ANDROID_EDGE_MODE_HIGH_QUALITY) {
270     ALOGV("%s: ANDROID_EDGE_MODE is not HQ", __FUNCTION__);
271     return false;
272   }
273 
274   if (request.settings->Get(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &entry) !=
275           OK ||
276       *entry.data.u8 != ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
277     ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ",
278           __FUNCTION__);
279     return false;
280   }
281 
282   if (request.settings->Get(ANDROID_CONTROL_AE_MODE, &entry) != OK ||
283       (*entry.data.u8 != ANDROID_CONTROL_AE_MODE_ON &&
284        *entry.data.u8 != ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
285     ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH",
286           __FUNCTION__);
287     return false;
288   }
289 
290   if (request.settings->Get(ANDROID_CONTROL_AWB_MODE, &entry) != OK ||
291       *entry.data.u8 != ANDROID_CONTROL_AWB_MODE_AUTO) {
292     ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not HQ", __FUNCTION__);
293     return false;
294   }
295 
296   if (request.settings->Get(ANDROID_CONTROL_EFFECT_MODE, &entry) != OK ||
297       *entry.data.u8 != ANDROID_CONTROL_EFFECT_MODE_OFF) {
298     ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE is not HQ", __FUNCTION__);
299     return false;
300   }
301 
302   if (request.settings->Get(ANDROID_CONTROL_MODE, &entry) != OK ||
303       (*entry.data.u8 != ANDROID_CONTROL_MODE_AUTO &&
304        *entry.data.u8 != ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
305     ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE",
306           __FUNCTION__);
307     return false;
308   }
309 
310   if (request.settings->Get(ANDROID_FLASH_MODE, &entry) != OK ||
311       *entry.data.u8 != ANDROID_FLASH_MODE_OFF) {
312     ALOGV("%s: ANDROID_FLASH_MODE is not OFF", __FUNCTION__);
313     return false;
314   }
315 
316   if (request.settings->Get(ANDROID_TONEMAP_MODE, &entry) != OK ||
317       *entry.data.u8 != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
318     ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ", __FUNCTION__);
319     return false;
320   }
321 
322   // For b/129798167 - AOSP camera AP can't trigger the snapshot
323   if (request.settings->Get(ANDROID_CONTROL_AF_TRIGGER, &entry) != OK ||
324       *entry.data.u8 != ANDROID_CONTROL_AF_TRIGGER_IDLE) {
325     ALOGI("%s: (%d)ANDROID_CONTROL_AF_TRIGGER is not IDLE", __FUNCTION__,
326           request.frame_number);
327     return false;
328   }
329 
330   // For b/130768200, treat the request as non-HDR+ request
331   // if only request one preview frame output.
332   if (preview_stream_id != -1 && request.output_buffers.size() == 1 &&
333       request.output_buffers[0].stream_id == preview_stream_id) {
334     ALOGI("%s: (%d)Only request preview frame", __FUNCTION__,
335           request.frame_number);
336     return false;
337   }
338 
339   return true;
340 }
341 
IsStreamHdrplusCompatible(const StreamConfiguration & stream_config,const HalCameraMetadata * characteristics)342 bool IsStreamHdrplusCompatible(const StreamConfiguration& stream_config,
343                                const HalCameraMetadata* characteristics) {
344   static const uint32_t kHdrplusSensorMaxFps = 30;
345   if (characteristics == nullptr) {
346     ALOGE("%s: characteristics is nullptr", __FUNCTION__);
347     return false;
348   }
349 
350   if (property_get_bool("persist.vendor.camera.hdrplus.disable", false)) {
351     ALOGI("%s: HDR+ is disabled by property", __FUNCTION__);
352     return false;
353   }
354 
355   camera_metadata_ro_entry entry;
356   status_t res =
357       characteristics->Get(VendorTagIds::kHdrplusPayloadFrames, &entry);
358   if (res != OK || entry.data.i32[0] <= 0) {
359     ALOGW("%s: Getting kHdrplusPayloadFrames failed or number <= 0",
360           __FUNCTION__);
361     return false;
362   }
363 
364   if (stream_config.operation_mode != StreamConfigurationMode::kNormal) {
365     ALOGI("%s: Only support normal mode. operation_mode = %d", __FUNCTION__,
366           stream_config.operation_mode);
367     return false;
368   }
369 
370   if (property_get_bool("persist.vendor.camera.fatp.enable", false)) {
371     ALOGI("%s: Do not use HDR+ for FATP mode", __FUNCTION__);
372     return false;
373   }
374 
375   if (stream_config.session_params != nullptr &&
376       stream_config.session_params->Get(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
377                                         &entry) == OK) {
378     uint32_t max_fps = entry.data.i32[1];
379     if (max_fps > kHdrplusSensorMaxFps) {
380       ALOGI("%s: the fps (%d) is over HDR+ support.", __FUNCTION__, max_fps);
381       return false;
382     }
383   }
384 
385   if (stream_config.session_params != nullptr) {
386     camera_metadata_ro_entry entry;
387     status_t result = stream_config.session_params->Get(
388         VendorTagIds::kHdrPlusDisabled, &entry);
389 
390     if ((result == OK) && (entry.data.u8[0] == 1)) {
391       ALOGI("%s: request.disable_hdrplus true", __FUNCTION__);
392       return false;
393     }
394   }
395 
396   bool preview_stream = false;
397   bool jpeg_stream = false;
398   bool has_logical_stream = false;
399   bool has_physical_stream = false;
400   uint32_t yuv_num = 0;
401   uint32_t last_physical_cam_id = 0;
402 
403   for (auto stream : stream_config.streams) {
404     if (utils::IsPreviewStream(stream)) {
405       preview_stream = true;
406     } else if (utils::IsJPEGSnapshotStream(stream)) {
407       jpeg_stream = true;
408     } else if (utils::IsDepthStream(stream)) {
409       ALOGI("%s: Don't support depth stream", __FUNCTION__);
410       return false;
411     } else if (utils::IsVideoStream(stream)) {
412       ALOGI("%s: Don't support video stream", __FUNCTION__);
413       return false;
414     } else if (utils::IsArbitraryDataSpaceRawStream(stream)) {
415       ALOGI("%s: Don't support raw stream", __FUNCTION__);
416       return false;
417     } else if (utils::IsYUVSnapshotStream(stream)) {
418       yuv_num++;
419     } else {
420       ALOGE("%s: Unknown stream type %d, res %ux%u, format %d, usage %" PRIu64,
421             __FUNCTION__, stream.stream_type, stream.width, stream.height,
422             stream.format, stream.usage);
423       return false;
424     }
425 
426     if (stream.is_physical_camera_stream) {
427       if (has_physical_stream &&
428           stream.physical_camera_id != last_physical_cam_id) {
429         // b/137721824, we don't support HDR+ if stream configuration contains
430         // different physical camera id streams.
431         ALOGI("%s: Don't support different physical camera id streams",
432               __FUNCTION__);
433         return false;
434       }
435       has_physical_stream = true;
436       last_physical_cam_id = stream.physical_camera_id;
437     } else {
438       has_logical_stream = true;
439     }
440   }
441 
442   // Only preview is configured.
443   if (preview_stream == true && jpeg_stream == false && yuv_num == 0) {
444     ALOGI("%s: Only preview is configured.", __FUNCTION__);
445     return false;
446   }
447 
448   // No preview is configured.
449   if (preview_stream == false) {
450     ALOGI("%s: no preview is configured.", __FUNCTION__);
451     return false;
452   }
453 
454   // b/137721824, we don't support HDR+ if stream configuration contains
455   // logical and physical streams.
456   if (has_logical_stream == true && has_physical_stream == true) {
457     ALOGI("%s: Don't support logical and physical combination", __FUNCTION__);
458     return false;
459   }
460 
461   // TODO(b/128633958): remove this after depth block is in place
462   if (property_get_bool("persist.vendor.camera.rgbird.forceinternal", false)) {
463     return false;
464   }
465 
466   return true;
467 }
468 
SetEnableZslMetadata(HalCameraMetadata * metadata,bool enable)469 status_t SetEnableZslMetadata(HalCameraMetadata* metadata, bool enable) {
470   if (metadata == nullptr) {
471     ALOGE("%s: metadata is nullptr", __FUNCTION__);
472     return BAD_VALUE;
473   }
474 
475   uint8_t enable_zsl = enable ? 1 : 0;
476   status_t res = metadata->Set(ANDROID_CONTROL_ENABLE_ZSL, &enable_zsl, 1);
477   if (res != OK) {
478     ALOGE("%s: set %d fail", __FUNCTION__, enable_zsl);
479     return res;
480   }
481 
482   return OK;
483 }
484 
SetHybridAeMetadata(HalCameraMetadata * metadata,bool enable)485 status_t SetHybridAeMetadata(HalCameraMetadata* metadata, bool enable) {
486   if (metadata == nullptr) {
487     ALOGE("%s: metadata is nullptr", __FUNCTION__);
488     return BAD_VALUE;
489   }
490 
491   status_t res;
492   int32_t enable_hybrid_ae = enable ? 1 : 0;
493   res = metadata->Set(VendorTagIds::kHybridAeEnabled, &enable_hybrid_ae,
494                       /*data_count=*/1);
495   if (res != OK) {
496     ALOGE("%s: enable_hybrid_ae(%d) fail", __FUNCTION__, enable_hybrid_ae);
497     return res;
498   }
499 
500   return OK;
501 }
502 
ForceLensShadingMapModeOn(HalCameraMetadata * metadata)503 status_t ForceLensShadingMapModeOn(HalCameraMetadata* metadata) {
504   if (metadata == nullptr) {
505     ALOGE("%s: metadata is nullptr", __FUNCTION__);
506     return BAD_VALUE;
507   }
508 
509   camera_metadata_ro_entry entry;
510   if (metadata->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry) == OK &&
511       *entry.data.u8 == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
512     // Force enabling LENS_SHADING_MAP_MODE_ON.
513     uint8_t mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
514     status_t result =
515         metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &mode, 1);
516     if (result != OK) {
517       ALOGE("%s: Set LENS_SHADING_MAP_MODE on fail", __FUNCTION__);
518       return result;
519     }
520   }
521 
522   return OK;
523 }
524 
ModifyRealtimeRequestForHdrplus(HalCameraMetadata * metadata,const bool hybrid_ae_enable)525 status_t ModifyRealtimeRequestForHdrplus(HalCameraMetadata* metadata,
526                                          const bool hybrid_ae_enable) {
527   if (metadata == nullptr) {
528     ALOGE("%s: metadata is nullptr", __FUNCTION__);
529     return BAD_VALUE;
530   }
531 
532   // Update hybrid AE
533   status_t result = SetHybridAeMetadata(metadata, hybrid_ae_enable);
534   if (result != OK) {
535     ALOGE("%s: SetHybridAeMetadata fail", __FUNCTION__);
536     return result;
537   }
538 
539   // Update FD mode
540   camera_metadata_ro_entry entry;
541   if (metadata->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry) == OK &&
542       *entry.data.u8 == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
543     // Force enabling face detect mode to simple.
544     uint8_t mode = ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
545     result = metadata->Set(ANDROID_STATISTICS_FACE_DETECT_MODE, &mode, 1);
546     if (result != OK) {
547       ALOGE("%s: update FD simple mode fail", __FUNCTION__);
548       return result;
549     }
550   }
551 
552   // Force lens shading mode to on
553   result = ForceLensShadingMapModeOn(metadata);
554   if (result != OK) {
555     ALOGE("%s: ForceLensShadingMapModeOn fail", __FUNCTION__);
556     return result;
557   }
558 
559   return OK;
560 }
561 
GetLensShadingMapMode(const CaptureRequest & request,uint8_t * lens_shading_mode)562 status_t GetLensShadingMapMode(const CaptureRequest& request,
563                                uint8_t* lens_shading_mode) {
564   if (request.settings == nullptr || lens_shading_mode == nullptr) {
565     ALOGE("%s: request.settings or lens_shading_mode is nullptr", __FUNCTION__);
566     return BAD_VALUE;
567   }
568 
569   camera_metadata_ro_entry entry;
570   status_t result =
571       request.settings->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry);
572   if (result != OK) {
573     ALOGV("%s: Get LENS_SHADING_MAP_MODE fail", __FUNCTION__);
574     return result;
575   }
576   *lens_shading_mode = *entry.data.u8;
577 
578   return OK;
579 }
580 
RemoveLsInfoFromResult(HalCameraMetadata * metadata)581 status_t RemoveLsInfoFromResult(HalCameraMetadata* metadata) {
582   if (metadata == nullptr) {
583     ALOGE("%s: metadata is nullptr", __FUNCTION__);
584     return BAD_VALUE;
585   }
586 
587   camera_metadata_ro_entry entry;
588   status_t res;
589   if (metadata->Get(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &entry) == OK) {
590     // Change lens shading map mode to OFF.
591     uint8_t mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
592     res = metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &mode, 1);
593     if (res != OK) {
594       ALOGE("%s: Set LENS_SHADING_MAP_MODE off fail", __FUNCTION__);
595       return res;
596     }
597   }
598 
599   // Erase lens shading map.
600   res = metadata->Erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
601   if (res != OK) {
602     ALOGE("%s: erase LENS_SHADING_MAP fail", __FUNCTION__);
603     return res;
604   }
605 
606   return OK;
607 }
608 
GetFdMode(const CaptureRequest & request,uint8_t * face_detect_mode)609 status_t GetFdMode(const CaptureRequest& request, uint8_t* face_detect_mode) {
610   if (request.settings == nullptr || face_detect_mode == nullptr) {
611     ALOGE("%s: request.settings or face_detect_mode is nullptr", __FUNCTION__);
612     return BAD_VALUE;
613   }
614 
615   camera_metadata_ro_entry entry;
616   status_t result =
617       request.settings->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry);
618   if (result != OK) {
619     ALOGV("%s: Get FACE_DETECT_MODE fail", __FUNCTION__);
620     return result;
621   }
622   *face_detect_mode = *entry.data.u8;
623 
624   return OK;
625 }
626 
RemoveFdInfoFromResult(HalCameraMetadata * metadata)627 status_t RemoveFdInfoFromResult(HalCameraMetadata* metadata) {
628   if (metadata == nullptr) {
629     ALOGE("%s: metadata is nullptr", __FUNCTION__);
630     return BAD_VALUE;
631   }
632 
633   camera_metadata_ro_entry entry;
634   status_t res;
635   if (metadata->Get(ANDROID_STATISTICS_FACE_DETECT_MODE, &entry) == OK) {
636     uint8_t mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
637     res = metadata->Set(ANDROID_STATISTICS_FACE_DETECT_MODE, &mode, 1);
638     if (res != OK) {
639       ALOGE("%s: update FD off mode fail", __FUNCTION__);
640       return res;
641     }
642   }
643 
644   res = metadata->Erase(ANDROID_STATISTICS_FACE_RECTANGLES);
645   if (res != OK) {
646     ALOGE("%s: erase face rectangles fail", __FUNCTION__);
647     return res;
648   }
649 
650   res = metadata->Erase(ANDROID_STATISTICS_FACE_SCORES);
651   if (res != OK) {
652     ALOGE("%s: erase face scores fail", __FUNCTION__);
653     return res;
654   }
655 
656   return OK;
657 }
658 
DumpStreamConfiguration(const StreamConfiguration & stream_configuration,const std::string & title)659 void DumpStreamConfiguration(const StreamConfiguration& stream_configuration,
660                              const std::string& title) {
661   std::string str = "======== " + title + " ========";
662   ALOGI("%s", str.c_str());
663   ALOGI("== stream num: %zu, operation_mode:%d",
664         stream_configuration.streams.size(),
665         stream_configuration.operation_mode);
666   for (uint32_t i = 0; i < stream_configuration.streams.size(); i++) {
667     auto& stream = stream_configuration.streams[i];
668     ALOGI("==== [%u]stream_id %d, type %d, format %d, res %ux%u, usage %" PRIu64
669           ", is_phy %d, phy_cam_id %u, group_id %d",
670           i, stream.id, stream.stream_type, stream.format, stream.width,
671           stream.height, stream.usage, stream.is_physical_camera_stream,
672           stream.physical_camera_id, stream.group_id);
673   }
674   ALOGI("%s", str.c_str());
675 }
676 
DumpHalConfiguredStreams(const std::vector<HalStream> & hal_configured_streams,const std::string & title)677 void DumpHalConfiguredStreams(
678     const std::vector<HalStream>& hal_configured_streams,
679     const std::string& title) {
680   std::string str = "======== " + title + " ========";
681   ALOGI("%s", str.c_str());
682   ALOGI("== stream num: %zu", hal_configured_streams.size());
683   for (uint32_t i = 0; i < hal_configured_streams.size(); i++) {
684     auto& stream = hal_configured_streams[i];
685     ALOGI("==== [%u]stream_id:%5d override_format:%8x p_usage:%" PRIu64
686           " c_usage:%" PRIu64 " max_buf:%u is_phy:%d",
687           i, stream.id, stream.override_format, stream.producer_usage,
688           stream.consumer_usage, stream.max_buffers,
689           stream.is_physical_camera_stream);
690   }
691   ALOGI("%s", str.c_str());
692 }
693 
DumpCaptureRequest(const CaptureRequest & request,const std::string & title)694 void DumpCaptureRequest(const CaptureRequest& request,
695                         const std::string& title) {
696   std::string str = "======== " + title + " ========";
697   ALOGI("%s", str.c_str());
698   ALOGI("== frame_number:%u", request.frame_number);
699   ALOGI("== settings:%p", request.settings.get());
700   ALOGI("== num_output_buffers:%zu", request.output_buffers.size());
701   for (uint32_t i = 0; i < request.output_buffers.size(); i++) {
702     ALOGI("==== buf[%d] stream_id:%d buf:%p", i,
703           request.output_buffers[i].stream_id, request.output_buffers[i].buffer);
704   }
705   ALOGI("== num_input_buffers:%zu", request.input_buffers.size());
706   for (uint32_t i = 0; i < request.input_buffers.size(); i++) {
707     ALOGI("==== buf[%d] stream_id:%d buf:%p", i,
708           request.input_buffers[i].stream_id, request.input_buffers[i].buffer);
709   }
710   ALOGI("%s", str.c_str());
711 }
712 
DumpCaptureResult(const ProcessBlockResult & result,const std::string & title)713 void DumpCaptureResult(const ProcessBlockResult& result,
714                        const std::string& title) {
715   std::string str = "======== " + title + " ========";
716   ALOGI("%s", str.c_str());
717   ALOGI("== frame_number:%u", result.result->frame_number);
718   ALOGI("== num_output_buffers:%zu", result.result->output_buffers.size());
719   for (uint32_t i = 0; i < result.result->output_buffers.size(); i++) {
720     ALOGI("==== buf[%d] stream_id:%d bud:%" PRIu64 " handle: %p status: %d", i,
721           result.result->output_buffers[i].stream_id,
722           result.result->output_buffers[i].buffer_id,
723           result.result->output_buffers[i].buffer,
724           result.result->output_buffers[i].status);
725   }
726   ALOGI("== has_metadata:%d", result.result->result_metadata != nullptr);
727   ALOGI("== request_id:%d", result.request_id);
728   ALOGI("%s", str.c_str());
729 }
730 
DumpCaptureResult(const CaptureResult & result,const std::string & title)731 void DumpCaptureResult(const CaptureResult& result, const std::string& title) {
732   std::string str = "======== " + title + " ========";
733   ALOGI("%s", str.c_str());
734   ALOGI("== frame_number:%u", result.frame_number);
735   ALOGI("== num_output_buffers:%zu", result.output_buffers.size());
736   for (uint32_t i = 0; i < result.output_buffers.size(); i++) {
737     ALOGI("==== buf[%d] stream_id:%d bud:%" PRIu64 " handle: %p status: %d", i,
738           result.output_buffers[i].stream_id, result.output_buffers[i].buffer_id,
739           result.output_buffers[i].buffer, result.output_buffers[i].status);
740   }
741   ALOGI("== has_metadata:%d", result.result_metadata != nullptr);
742   ALOGI("%s", str.c_str());
743 }
744 
DumpNotify(const NotifyMessage & message,const std::string & title)745 void DumpNotify(const NotifyMessage& message, const std::string& title) {
746   std::string str = "======== " + title + " ========";
747   ALOGI("%s", str.c_str());
748   if (message.type == MessageType::kShutter) {
749     ALOGI("== frame_number:%u", message.message.shutter.frame_number);
750     ALOGI("== time_stamp:%" PRIu64, message.message.shutter.timestamp_ns);
751     ALOGI("== readout_time_stamp:%" PRIu64,
752           message.message.shutter.readout_timestamp_ns);
753   } else if (message.type == MessageType::kError) {
754     ALOGI("== frame_number:%u", message.message.error.frame_number);
755     ALOGI("== error_code:%u", message.message.error.error_code);
756   }
757   ALOGI("%s", str.c_str());
758 }
759 
DumpStream(const Stream & stream,const std::string & title)760 void DumpStream(const Stream& stream, const std::string& title) {
761   std::string str = "======== " + title + " ========";
762   ALOGI("%s", str.c_str());
763   ALOGI("== stream_id %d, format %d, res %ux%u, usage %" PRIu64
764         ", is_phy %d, phy_cam_id %u",
765         stream.id, stream.format, stream.width, stream.height, stream.usage,
766         stream.is_physical_camera_stream, stream.physical_camera_id);
767   ALOGI("%s", str.c_str());
768 }
769 
770 // Dump HalStream
DumpHalStream(const HalStream & hal_stream,const std::string & title)771 void DumpHalStream(const HalStream& hal_stream, const std::string& title) {
772   std::string str = "======== " + title + " ========";
773   ALOGI("%s", str.c_str());
774   ALOGI("== id %d, override_format %d, producer_usage %" PRIu64 ", %" PRIu64
775         ", max_buffers %u, override_data_space %u, is_phy %u, phy_cam_id %d",
776         hal_stream.id, hal_stream.override_format, hal_stream.producer_usage,
777         hal_stream.consumer_usage, hal_stream.max_buffers,
778         hal_stream.override_data_space, hal_stream.is_physical_camera_stream,
779         hal_stream.physical_camera_id);
780   ALOGI("%s", str.c_str());
781 }
782 
DumpBufferReturn(const std::vector<StreamBuffer> & stream_buffers,const std::string & title)783 void DumpBufferReturn(const std::vector<StreamBuffer>& stream_buffers,
784                       const std::string& title) {
785   std::string str = "======== " + title + " ========";
786   ALOGI("%s", str.c_str());
787   for (auto stream_buffer : stream_buffers) {
788     ALOGI("== Strm id:%d, buf id:%" PRIu64, stream_buffer.stream_id,
789           stream_buffer.buffer_id);
790   }
791   ALOGI("%s", str.c_str());
792 }
793 
DumpBufferRequest(const std::vector<BufferRequest> & hal_buffer_requests,const std::vector<BufferReturn> * hal_buffer_returns,const std::string & title)794 void DumpBufferRequest(const std::vector<BufferRequest>& hal_buffer_requests,
795                        const std::vector<BufferReturn>* hal_buffer_returns,
796                        const std::string& title) {
797   std::string str = "======== " + title + " ========";
798   ALOGI("%s", str.c_str());
799   for (const auto& buffer_request : hal_buffer_requests) {
800     ALOGI("== Strm id:%d", buffer_request.stream_id);
801   }
802   ALOGI("===");
803   for (const auto& buffer_return : *hal_buffer_returns) {
804     for (const auto& stream_buffer : buffer_return.val.buffers) {
805       ALOGI("== buf id:%" PRIu64 " stm id:%d buf:%p", stream_buffer.buffer_id,
806             stream_buffer.stream_id, stream_buffer.buffer);
807     }
808   }
809   ALOGI("%s", str.c_str());
810 }
811 
812 }  // namespace hal_utils
813 }  // namespace google_camera_hal
814 }  // namespace android
815