1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "EmulatedCameraProviderHwlImpl"
19 #include "EmulatedCameraProviderHWLImpl.h"
20 
21 #include <android-base/file.h>
22 #include <android-base/strings.h>
23 #include <cutils/properties.h>
24 #include <hardware/camera_common.h>
25 #include <log/log.h>
26 
27 #include "EmulatedCameraDeviceHWLImpl.h"
28 #include "EmulatedCameraDeviceSessionHWLImpl.h"
29 #include "EmulatedLogicalRequestState.h"
30 #include "EmulatedSensor.h"
31 #include "EmulatedTorchState.h"
32 #include "utils/HWLUtils.h"
33 #include "vendor_tag_defs.h"
34 
35 namespace android {
36 
37 // Location of the camera configuration files.
38 const char* EmulatedCameraProviderHwlImpl::kConfigurationFileLocation[] = {
39     "/vendor/etc/config/emu_camera_back.json",
40     "/vendor/etc/config/emu_camera_front.json",
41     "/vendor/etc/config/emu_camera_depth.json",
42 };
43 
44 constexpr StreamSize s240pStreamSize = std::pair(240, 180);
45 constexpr StreamSize s720pStreamSize = std::pair(1280, 720);
46 constexpr StreamSize s1440pStreamSize = std::pair(1920, 1440);
47 
48 std::unique_ptr<EmulatedCameraProviderHwlImpl>
Create()49 EmulatedCameraProviderHwlImpl::Create() {
50   auto provider = std::unique_ptr<EmulatedCameraProviderHwlImpl>(
51       new EmulatedCameraProviderHwlImpl());
52 
53   if (provider == nullptr) {
54     ALOGE("%s: Creating EmulatedCameraProviderHwlImpl failed.", __FUNCTION__);
55     return nullptr;
56   }
57 
58   status_t res = provider->Initialize();
59   if (res != OK) {
60     ALOGE("%s: Initializing EmulatedCameraProviderHwlImpl failed: %s (%d).",
61           __FUNCTION__, strerror(-res), res);
62     return nullptr;
63   }
64 
65   ALOGI("%s: Created EmulatedCameraProviderHwlImpl", __FUNCTION__);
66 
67   return provider;
68 }
69 
GetTagFromName(const char * name,uint32_t * tag)70 status_t EmulatedCameraProviderHwlImpl::GetTagFromName(const char* name,
71                                                        uint32_t* tag) {
72   if (name == nullptr || tag == nullptr) {
73     return BAD_VALUE;
74   }
75 
76   size_t name_length = strlen(name);
77   // First, find the section by the longest string match
78   const char* section = NULL;
79   size_t section_index = 0;
80   size_t section_length = 0;
81   for (size_t i = 0; i < ANDROID_SECTION_COUNT; ++i) {
82     const char* str = camera_metadata_section_names[i];
83 
84     ALOGV("%s: Trying to match against section '%s'", __FUNCTION__, str);
85 
86     if (strstr(name, str) == name) {  // name begins with the section name
87       size_t str_length = strlen(str);
88 
89       ALOGV("%s: Name begins with section name", __FUNCTION__);
90 
91       // section name is the longest we've found so far
92       if (section == NULL || section_length < str_length) {
93         section = str;
94         section_index = i;
95         section_length = str_length;
96 
97         ALOGV("%s: Found new best section (%s)", __FUNCTION__, section);
98       }
99     }
100   }
101 
102   if (section == NULL) {
103     return NAME_NOT_FOUND;
104   } else {
105     ALOGV("%s: Found matched section '%s' (%zu)", __FUNCTION__, section,
106           section_index);
107   }
108 
109   // Get the tag name component of the name
110   const char* name_tag_name = name + section_length + 1;  // x.y.z -> z
111   if (section_length + 1 >= name_length) {
112     return BAD_VALUE;
113   }
114 
115   // Match rest of name against the tag names in that section only
116   uint32_t candidate_tag = 0;
117   // Match built-in tags (typically android.*)
118   uint32_t tag_begin, tag_end;  // [tag_begin, tag_end)
119   tag_begin = camera_metadata_section_bounds[section_index][0];
120   tag_end = camera_metadata_section_bounds[section_index][1];
121 
122   for (candidate_tag = tag_begin; candidate_tag < tag_end; ++candidate_tag) {
123     const char* tag_name = get_camera_metadata_tag_name(candidate_tag);
124 
125     if (strcmp(name_tag_name, tag_name) == 0) {
126       ALOGV("%s: Found matched tag '%s' (%d)", __FUNCTION__, tag_name,
127             candidate_tag);
128       break;
129     }
130   }
131 
132   if (candidate_tag == tag_end) {
133     return NAME_NOT_FOUND;
134   }
135 
136   *tag = candidate_tag;
137   return OK;
138 }
139 
IsMaxSupportedSizeGreaterThanOrEqual(const std::set<StreamSize> & stream_sizes,StreamSize compare_size)140 static bool IsMaxSupportedSizeGreaterThanOrEqual(
141     const std::set<StreamSize>& stream_sizes, StreamSize compare_size) {
142   for (const auto& stream_size : stream_sizes) {
143     if (stream_size.first * stream_size.second >=
144         compare_size.first * compare_size.second) {
145       return true;
146     }
147   }
148   return false;
149 }
150 
SupportsCapability(const uint32_t camera_id,const HalCameraMetadata & static_metadata,uint8_t cap)151 static bool SupportsCapability(const uint32_t camera_id,
152                                const HalCameraMetadata& static_metadata,
153                                uint8_t cap) {
154   camera_metadata_ro_entry_t entry;
155   auto ret = static_metadata.Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
156   if (ret != OK || (entry.count == 0)) {
157     ALOGE("Error getting capabilities for camera id %u", camera_id);
158     return false;
159   }
160   for (size_t i = 0; i < entry.count; i++) {
161     if (entry.data.u8[i] == cap) {
162       return true;
163     }
164   }
165   return false;
166 }
167 
SupportsMandatoryConcurrentStreams(uint32_t camera_id)168 bool EmulatedCameraProviderHwlImpl::SupportsMandatoryConcurrentStreams(
169     uint32_t camera_id) {
170   HalCameraMetadata& static_metadata = *(static_metadata_[camera_id]);
171   auto map = std::make_unique<StreamConfigurationMap>(static_metadata);
172   auto yuv_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_YCBCR_420_888);
173   auto blob_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_BLOB);
174   auto depth16_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_Y16);
175   auto priv_output_sizes =
176       map->GetOutputSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
177 
178   if (!SupportsCapability(
179           camera_id, static_metadata,
180           ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) &&
181       IsMaxSupportedSizeGreaterThanOrEqual(depth16_output_sizes,
182                                            s240pStreamSize)) {
183     ALOGI("%s: Depth only output supported by camera id %u", __FUNCTION__,
184           camera_id);
185     return true;
186   }
187   if (yuv_output_sizes.empty()) {
188     ALOGW("%s: No YUV output supported by camera id %u", __FUNCTION__,
189           camera_id);
190     return false;
191   }
192 
193   if (priv_output_sizes.empty()) {
194     ALOGW("No PRIV output supported by camera id %u", camera_id);
195     return false;
196   }
197 
198   if (blob_output_sizes.empty()) {
199     ALOGW("No BLOB output supported by camera id %u", camera_id);
200     return false;
201   }
202 
203   // According to the HAL spec, if a device supports format sizes > 1440p and
204   // 720p, it must support both 1440p and 720p streams for PRIV, JPEG and YUV
205   // formats. Otherwise it must support 2 streams (YUV / PRIV + JPEG) of the max
206   // supported size.
207 
208   // Check for YUV output sizes
209   if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes, s1440pStreamSize) &&
210       (yuv_output_sizes.find(s1440pStreamSize) == yuv_output_sizes.end() ||
211        yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end())) {
212     ALOGW("%s: 1440p+720p YUV outputs not found for camera id %u", __FUNCTION__,
213           camera_id);
214     return false;
215   } else if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes,
216                                                   s720pStreamSize) &&
217              yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end()) {
218     ALOGW("%s: 720p YUV output not found for camera id %u", __FUNCTION__,
219           camera_id);
220     return false;
221   }
222 
223   // Check for PRIV output sizes
224   if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes, s1440pStreamSize) &&
225       (priv_output_sizes.find(s1440pStreamSize) == priv_output_sizes.end() ||
226        priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end())) {
227     ALOGW("%s: 1440p + 720p PRIV outputs not found for camera id %u",
228           __FUNCTION__, camera_id);
229     return false;
230   } else if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes,
231                                                   s720pStreamSize) &&
232              priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end()) {
233     ALOGW("%s: 720p PRIV output not found for camera id %u", __FUNCTION__,
234           camera_id);
235     return false;
236   }
237 
238   // Check for BLOB output sizes
239   if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes, s1440pStreamSize) &&
240       (blob_output_sizes.find(s1440pStreamSize) == blob_output_sizes.end() ||
241        blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end())) {
242     ALOGW("%s: 1440p + 720p BLOB outputs not found for camera id %u",
243           __FUNCTION__, camera_id);
244     return false;
245   } else if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes,
246                                                   s720pStreamSize) &&
247              blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end()) {
248     ALOGW("%s: 720p BLOB output not found for camera id %u", __FUNCTION__,
249           camera_id);
250     return false;
251   }
252 
253   return true;
254 }
255 
GetConcurrentStreamingCameraIds(std::vector<std::unordered_set<uint32_t>> * combinations)256 status_t EmulatedCameraProviderHwlImpl::GetConcurrentStreamingCameraIds(
257     std::vector<std::unordered_set<uint32_t>>* combinations) {
258   if (combinations == nullptr) {
259     return BAD_VALUE;
260   }
261   // Collect all camera ids that support the guaranteed stream combinations
262   // (720p YUV and IMPLEMENTATION_DEFINED) and put them in one set. We don't
263   // make all possible combinations since it should be possible to stream all
264   // of them at once in the emulated camera.
265   std::unordered_set<uint32_t> candidate_ids;
266   for (auto& entry : camera_id_map_) {
267     if (SupportsMandatoryConcurrentStreams(entry.first)) {
268       candidate_ids.insert(entry.first);
269     }
270   }
271   combinations->emplace_back(std::move(candidate_ids));
272   return OK;
273 }
274 
IsConcurrentStreamCombinationSupported(const std::vector<CameraIdAndStreamConfiguration> & configs,bool * is_supported)275 status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
276     const std::vector<CameraIdAndStreamConfiguration>& configs,
277     bool* is_supported) {
278   *is_supported = false;
279 
280   // Go through the given camera ids, get their sensor characteristics, stream
281   // config maps and call EmulatedSensor::IsStreamCombinationSupported()
282   for (auto& config : configs) {
283     // TODO: Consider caching sensor characteristics and StreamConfigurationMap
284     if (camera_id_map_.find(config.camera_id) == camera_id_map_.end()) {
285       ALOGE("%s: Camera id %u does not exist", __FUNCTION__, config.camera_id);
286       return BAD_VALUE;
287     }
288 
289     auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
290         *(static_metadata_[config.camera_id]));
291     auto stream_configuration_map_max_resolution =
292         std::make_unique<StreamConfigurationMap>(
293             *(static_metadata_[config.camera_id]), /*maxResolution*/ true);
294 
295     LogicalCharacteristics sensor_chars;
296     status_t ret =
297         GetSensorCharacteristics((static_metadata_[config.camera_id]).get(),
298                                  &sensor_chars[config.camera_id]);
299     if (ret != OK) {
300       ALOGE("%s: Unable to extract sensor chars for camera id %u", __FUNCTION__,
301             config.camera_id);
302       return UNKNOWN_ERROR;
303     }
304 
305     PhysicalStreamConfigurationMap physical_stream_configuration_map;
306     PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution;
307     auto const& physicalCameraInfo = camera_id_map_[config.camera_id];
308     for (size_t i = 0; i < physicalCameraInfo.size(); i++) {
309       uint32_t physical_camera_id = physicalCameraInfo[i].second;
310       physical_stream_configuration_map.emplace(
311           physical_camera_id, std::make_unique<StreamConfigurationMap>(
312                                   *(static_metadata_[physical_camera_id])));
313 
314       physical_stream_configuration_map_max_resolution.emplace(
315           physical_camera_id,
316           std::make_unique<StreamConfigurationMap>(
317               *(static_metadata_[physical_camera_id]), /*maxResolution*/ true));
318 
319       ret = GetSensorCharacteristics(static_metadata_[physical_camera_id].get(),
320                                      &sensor_chars[physical_camera_id]);
321       if (ret != OK) {
322         ALOGE("%s: Unable to extract camera %d sensor characteristics %s (%d)",
323               __FUNCTION__, physical_camera_id, strerror(-ret), ret);
324         return ret;
325       }
326     }
327 
328     if (!EmulatedSensor::IsStreamCombinationSupported(
329             config.camera_id, config.stream_configuration,
330             *stream_configuration_map, *stream_configuration_map_max_resolution,
331             physical_stream_configuration_map,
332             physical_stream_configuration_map_max_resolution, sensor_chars)) {
333       return OK;
334     }
335   }
336 
337   *is_supported = true;
338   return OK;
339 }
340 
IsDigit(const std::string & value)341 bool IsDigit(const std::string& value) {
342   if (value.empty()) {
343     return false;
344   }
345 
346   for (const auto& c : value) {
347     if (!std::isdigit(c) && (!std::ispunct(c))) {
348       return false;
349     }
350   }
351 
352   return true;
353 }
354 
355 template <typename T>
GetEnumValue(uint32_t tag_id,const char * cstring,T * result)356 status_t GetEnumValue(uint32_t tag_id, const char* cstring, T* result /*out*/) {
357   if ((result == nullptr) || (cstring == nullptr)) {
358     return BAD_VALUE;
359   }
360 
361   uint32_t enum_value;
362   auto ret =
363       camera_metadata_enum_value(tag_id, cstring, strlen(cstring), &enum_value);
364   if (ret != OK) {
365     ALOGE("%s: Failed to match tag id: 0x%x value: %s", __FUNCTION__, tag_id,
366           cstring);
367     return ret;
368   }
369   *result = enum_value;
370 
371   return OK;
372 }
373 
GetUInt8Value(const Json::Value & value,uint32_t tag_id,uint8_t * result)374 status_t GetUInt8Value(const Json::Value& value, uint32_t tag_id,
375                        uint8_t* result /*out*/) {
376   if (result == nullptr) {
377     return BAD_VALUE;
378   }
379 
380   if (value.isString()) {
381     errno = 0;
382     if (IsDigit(value.asString())) {
383       auto int_value = strtol(value.asCString(), nullptr, 10);
384       if ((int_value >= 0) && (int_value <= UINT8_MAX) && (errno == 0)) {
385         *result = int_value;
386       } else {
387         ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
388         return BAD_VALUE;
389       }
390     } else {
391       return GetEnumValue(tag_id, value.asCString(), result);
392     }
393   } else {
394     ALOGE(
395         "%s: Unexpected json type: %d! All value types are expected to be "
396         "strings!",
397         __FUNCTION__, value.type());
398     return BAD_VALUE;
399   }
400 
401   return OK;
402 }
403 
GetInt32Value(const Json::Value & value,uint32_t tag_id,int32_t * result)404 status_t GetInt32Value(const Json::Value& value, uint32_t tag_id,
405                        int32_t* result /*out*/) {
406   if (result == nullptr) {
407     return BAD_VALUE;
408   }
409 
410   if (value.isString()) {
411     errno = 0;
412     if (IsDigit(value.asString())) {
413       auto int_value = strtol(value.asCString(), nullptr, 10);
414       if ((int_value >= INT32_MIN) && (int_value <= INT32_MAX) && (errno == 0)) {
415         *result = int_value;
416       } else {
417         ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
418         return BAD_VALUE;
419       }
420     } else {
421       return GetEnumValue(tag_id, value.asCString(), result);
422     }
423   } else {
424     ALOGE(
425         "%s: Unexpected json type: %d! All value types are expected to be "
426         "strings!",
427         __FUNCTION__, value.type());
428     return BAD_VALUE;
429   }
430 
431   return OK;
432 }
433 
GetInt64Value(const Json::Value & value,uint32_t tag_id,int64_t * result)434 status_t GetInt64Value(const Json::Value& value, uint32_t tag_id,
435                        int64_t* result /*out*/) {
436   if (result == nullptr) {
437     return BAD_VALUE;
438   }
439 
440   if (value.isString()) {
441     errno = 0;
442     auto int_value = strtoll(value.asCString(), nullptr, 10);
443     if ((int_value >= INT64_MIN) && (int_value <= INT64_MAX) && (errno == 0)) {
444       *result = int_value;
445     } else {
446       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
447       return BAD_VALUE;
448     }
449   } else {
450     ALOGE(
451         "%s: Unexpected json type: %d! All value types are expected to be "
452         "strings!",
453         __FUNCTION__, value.type());
454     return BAD_VALUE;
455   }
456 
457   return OK;
458 }
459 
GetFloatValue(const Json::Value & value,uint32_t tag_id,float * result)460 status_t GetFloatValue(const Json::Value& value, uint32_t tag_id,
461                        float* result /*out*/) {
462   if (result == nullptr) {
463     return BAD_VALUE;
464   }
465 
466   if (value.isString()) {
467     errno = 0;
468     auto float_value = strtof(value.asCString(), nullptr);
469     if (errno == 0) {
470       *result = float_value;
471     } else {
472       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
473       return BAD_VALUE;
474     }
475   } else {
476     ALOGE(
477         "%s: Unexpected json type: %d! All value types are expected to be "
478         "strings!",
479         __FUNCTION__, value.type());
480     return BAD_VALUE;
481   }
482 
483   return OK;
484 }
485 
GetDoubleValue(const Json::Value & value,uint32_t tag_id,double * result)486 status_t GetDoubleValue(const Json::Value& value, uint32_t tag_id,
487                         double* result /*out*/) {
488   if (result == nullptr) {
489     return BAD_VALUE;
490   }
491 
492   if (value.isString()) {
493     errno = 0;
494     auto double_value = strtod(value.asCString(), nullptr);
495     if (errno == 0) {
496       *result = double_value;
497     } else {
498       ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
499       return BAD_VALUE;
500     }
501   } else {
502     ALOGE(
503         "%s: Unexpected json type: %d! All value types are expected to be "
504         "strings!",
505         __FUNCTION__, value.type());
506     return BAD_VALUE;
507   }
508 
509   return OK;
510 }
511 
512 template <typename T>
FilterVendorKeys(uint32_t tag_id,std::vector<T> * values)513 void FilterVendorKeys(uint32_t tag_id, std::vector<T>* values) {
514   if ((values == nullptr) || (values->empty())) {
515     return;
516   }
517 
518   switch (tag_id) {
519     case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS:
520     case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS:
521     case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS:
522     case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
523       auto it = values->begin();
524       while (it != values->end()) {
525         // Per spec. the tags we are handling here will be "int32_t".
526         // In this case all vendor defined values will be negative.
527         if (*it < 0) {
528           it = values->erase(it);
529         } else {
530           it++;
531         }
532       }
533     } break;
534     default:
535       // no-op
536       break;
537   }
538 }
539 
540 template <typename T, typename func_type>
InsertTag(const Json::Value & json_value,uint32_t tag_id,func_type get_val_func,HalCameraMetadata * meta)541 status_t InsertTag(const Json::Value& json_value, uint32_t tag_id,
542                    func_type get_val_func, HalCameraMetadata* meta /*out*/) {
543   if (meta == nullptr) {
544     return BAD_VALUE;
545   }
546 
547   std::vector<T> values;
548   T result;
549   status_t ret = -1;
550   values.reserve(json_value.size());
551   for (const auto& val : json_value) {
552     ret = get_val_func(val, tag_id, &result);
553     if (ret != OK) {
554       break;
555     }
556 
557     values.push_back(result);
558   }
559 
560   if (ret == OK) {
561     FilterVendorKeys(tag_id, &values);
562     ret = meta->Set(tag_id, values.data(), values.size());
563   }
564 
565   return ret;
566 }
567 
InsertRationalTag(const Json::Value & json_value,uint32_t tag_id,HalCameraMetadata * meta)568 status_t InsertRationalTag(const Json::Value& json_value, uint32_t tag_id,
569                            HalCameraMetadata* meta /*out*/) {
570   if (meta == nullptr) {
571     return BAD_VALUE;
572   }
573 
574   std::vector<camera_metadata_rational_t> values;
575   status_t ret = OK;
576   if (json_value.isArray() && ((json_value.size() % 2) == 0)) {
577     values.reserve(json_value.size() / 2);
578     auto it = json_value.begin();
579     while (it != json_value.end()) {
580       camera_metadata_rational_t result;
581       ret = GetInt32Value((*it), tag_id, &result.numerator);
582       it++;
583       ret |= GetInt32Value((*it), tag_id, &result.denominator);
584       it++;
585       if (ret != OK) {
586         break;
587       }
588       values.push_back(result);
589     }
590   } else {
591     ALOGE("%s: json type: %d doesn't match with rational tag type",
592           __FUNCTION__, json_value.type());
593     return BAD_VALUE;
594   }
595 
596   if (ret == OK) {
597     ret = meta->Set(tag_id, values.data(), values.size());
598   }
599 
600   return ret;
601 }
602 
ParseCharacteristics(const Json::Value & value,ssize_t id)603 uint32_t EmulatedCameraProviderHwlImpl::ParseCharacteristics(
604     const Json::Value& value, ssize_t id) {
605   if (!value.isObject()) {
606     ALOGE("%s: Configuration root is not an object", __FUNCTION__);
607     return BAD_VALUE;
608   }
609 
610   auto static_meta = HalCameraMetadata::Create(1, 10);
611   auto members = value.getMemberNames();
612   for (const auto& member : members) {
613     uint32_t tag_id;
614     auto stat = GetTagFromName(member.c_str(), &tag_id);
615     if (stat != OK) {
616       ALOGE("%s: tag %s not supported, skipping!", __func__, member.c_str());
617       continue;
618     }
619 
620     auto tag_type = get_camera_metadata_tag_type(tag_id);
621     auto tag_value = value[member.c_str()];
622     switch (tag_type) {
623       case TYPE_BYTE:
624         InsertTag<uint8_t>(tag_value, tag_id, GetUInt8Value, static_meta.get());
625         break;
626       case TYPE_INT32:
627         InsertTag<int32_t>(tag_value, tag_id, GetInt32Value, static_meta.get());
628         break;
629       case TYPE_INT64:
630         InsertTag<int64_t>(tag_value, tag_id, GetInt64Value, static_meta.get());
631         break;
632       case TYPE_FLOAT:
633         InsertTag<float>(tag_value, tag_id, GetFloatValue, static_meta.get());
634         break;
635       case TYPE_DOUBLE:
636         InsertTag<double>(tag_value, tag_id, GetDoubleValue, static_meta.get());
637         break;
638       case TYPE_RATIONAL:
639         InsertRationalTag(tag_value, tag_id, static_meta.get());
640         break;
641       default:
642         ALOGE("%s: Unsupported tag type: %d!", __FUNCTION__, tag_type);
643     }
644   }
645 
646   SensorCharacteristics sensor_characteristics;
647   auto ret =
648       GetSensorCharacteristics(static_meta.get(), &sensor_characteristics);
649   if (ret != OK) {
650     ALOGE("%s: Unable to extract sensor characteristics!", __FUNCTION__);
651     return ret;
652   }
653 
654   if (!EmulatedSensor::AreCharacteristicsSupported(sensor_characteristics)) {
655     ALOGE("%s: Sensor characteristics not supported!", __FUNCTION__);
656     return BAD_VALUE;
657   }
658 
659   // Although we don't support HdrPlus, this data is still required by HWL
660   int32_t payload_frames = 0;
661   static_meta->Set(google_camera_hal::kHdrplusPayloadFrames, &payload_frames, 1);
662 
663   if (id < 0) {
664     static_metadata_.push_back(std::move(static_meta));
665     id = static_metadata_.size() - 1;
666   } else {
667     static_metadata_[id] = std::move(static_meta);
668   }
669 
670   return id;
671 }
672 
WaitForQemuSfFakeCameraPropertyAvailable()673 status_t EmulatedCameraProviderHwlImpl::WaitForQemuSfFakeCameraPropertyAvailable() {
674   // Camera service may start running before qemu-props sets
675   // vendor.qemu.sf.fake_camera to any of the following four values:
676   // "none,front,back,both"; so we need to wait.
677   int num_attempts = 100;
678   char prop[PROPERTY_VALUE_MAX];
679   bool timeout = true;
680   for (int i = 0; i < num_attempts; ++i) {
681     if (property_get("vendor.qemu.sf.fake_camera", prop, nullptr) != 0) {
682       timeout = false;
683       break;
684     }
685     usleep(5000);
686   }
687   if (timeout) {
688     ALOGE("timeout (%dms) waiting for property vendor.qemu.sf.fake_camera to be set\n",
689           5 * num_attempts);
690     return BAD_VALUE;
691   }
692   return OK;
693 }
694 
Initialize()695 status_t EmulatedCameraProviderHwlImpl::Initialize() {
696   // GCH expects all physical ids to be bigger than the logical ones.
697   // Resize 'static_metadata_' to fit all logical devices and insert them
698   // accordingly, push any remaining physical cameras in the back.
699   std::string config;
700   size_t logical_id = 0;
701   std::vector<const char*> configurationFileLocation;
702   char prop[PROPERTY_VALUE_MAX];
703   if (!property_get_bool("ro.boot.qemu", false)) {
704     for (const auto& iter : kConfigurationFileLocation) {
705       configurationFileLocation.emplace_back(iter);
706     }
707   } else {
708     // Android Studio Emulator
709     if (!property_get_bool("ro.boot.qemu.legacy_fake_camera", false)) {
710       if (WaitForQemuSfFakeCameraPropertyAvailable() == OK) {
711         property_get("vendor.qemu.sf.fake_camera", prop, nullptr);
712         if (strcmp(prop, "both") == 0) {
713           configurationFileLocation.emplace_back(kConfigurationFileLocation[0]);
714           configurationFileLocation.emplace_back(kConfigurationFileLocation[1]);
715         } else if (strcmp(prop, "front") == 0) {
716           configurationFileLocation.emplace_back(kConfigurationFileLocation[1]);
717           logical_id = 1;
718         } else if (strcmp(prop, "back") == 0) {
719           configurationFileLocation.emplace_back(kConfigurationFileLocation[0]);
720           logical_id = 1;
721         }
722       }
723     }
724   }
725   static_metadata_.resize(ARRAY_SIZE(kConfigurationFileLocation));
726 
727   for (const auto& config_path : configurationFileLocation) {
728     if (!android::base::ReadFileToString(config_path, &config)) {
729       ALOGW("%s: Could not open configuration file: %s", __FUNCTION__,
730             config_path);
731       continue;
732     }
733 
734     Json::CharReaderBuilder builder;
735     std::unique_ptr<Json::CharReader> config_reader(builder.newCharReader());
736     Json::Value root;
737     std::string errorMessage;
738     if (!config_reader->parse(&*config.begin(), &*config.end(), &root, &errorMessage)) {
739       ALOGE("Could not parse configuration file: %s",
740             errorMessage.c_str());
741       return BAD_VALUE;
742     }
743 
744     if (root.isArray()) {
745       auto device_iter = root.begin();
746       auto result_id = ParseCharacteristics(*device_iter, logical_id);
747       if (logical_id != result_id) {
748         return result_id;
749       }
750       device_iter++;
751 
752       // The first device entry is always the logical camera followed by the
753       // physical devices. They must be at least 2.
754       camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
755       if (root.size() >= 3) {
756         camera_id_map_[logical_id].reserve(root.size() - 1);
757         size_t current_physical_device = 0;
758         while (device_iter != root.end()) {
759           auto physical_id = ParseCharacteristics(*device_iter, /*id*/ -1);
760           if (physical_id < 0) {
761             return physical_id;
762           }
763           // Only notify unavailable physical camera if there are more than 2
764           // physical cameras backing the logical camera
765           auto device_status = (current_physical_device < 2) ? CameraDeviceStatus::kPresent :
766               CameraDeviceStatus::kNotPresent;
767           camera_id_map_[logical_id].push_back(std::make_pair(device_status, physical_id));
768           device_iter++; current_physical_device++;
769         }
770 
771         auto physical_devices = std::make_unique<PhysicalDeviceMap>();
772         for (const auto& physical_device : camera_id_map_[logical_id]) {
773           physical_devices->emplace(
774               physical_device.second, std::make_pair(physical_device.first,
775               HalCameraMetadata::Clone(
776                   static_metadata_[physical_device.second].get())));
777         }
778         auto updated_logical_chars =
779             EmulatedLogicalRequestState::AdaptLogicalCharacteristics(
780                 HalCameraMetadata::Clone(static_metadata_[logical_id].get()),
781                 std::move(physical_devices));
782         if (updated_logical_chars.get() != nullptr) {
783           static_metadata_[logical_id].swap(updated_logical_chars);
784         } else {
785           ALOGE("%s: Failed to updating logical camera characteristics!",
786                 __FUNCTION__);
787           return BAD_VALUE;
788         }
789       }
790     } else {
791       auto result_id = ParseCharacteristics(root, logical_id);
792       if (result_id != logical_id) {
793         return result_id;
794       }
795       camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
796     }
797 
798     logical_id++;
799   }
800 
801   return OK;
802 }
803 
SetCallback(const HwlCameraProviderCallback & callback)804 status_t EmulatedCameraProviderHwlImpl::SetCallback(
805     const HwlCameraProviderCallback& callback) {
806   torch_cb_ = callback.torch_mode_status_change;
807   physical_camera_status_cb_ = callback.physical_camera_device_status_change;
808 
809   return OK;
810 }
811 
TriggerDeferredCallbacks()812 status_t EmulatedCameraProviderHwlImpl::TriggerDeferredCallbacks() {
813   std::lock_guard<std::mutex> lock(status_callback_future_lock_);
814   if (status_callback_future_.valid()) {
815     return OK;
816   }
817 
818   status_callback_future_ = std::async(
819       std::launch::async,
820       &EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable, this);
821   return OK;
822 }
823 
WaitForStatusCallbackFuture()824 void EmulatedCameraProviderHwlImpl::WaitForStatusCallbackFuture() {
825   {
826     std::lock_guard<std::mutex> lock(status_callback_future_lock_);
827     if (!status_callback_future_.valid()) {
828       // If there is no future pending, construct a dummy one.
829       status_callback_future_ = std::async([]() { return; });
830     }
831   }
832   status_callback_future_.wait();
833 }
834 
NotifyPhysicalCameraUnavailable()835 void EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable() {
836   for (const auto& one_map : camera_id_map_) {
837     for (const auto& physical_device : one_map.second) {
838       if (physical_device.first != CameraDeviceStatus::kNotPresent) {
839         continue;
840       }
841 
842       uint32_t logical_camera_id = one_map.first;
843       uint32_t physical_camera_id = physical_device.second;
844       physical_camera_status_cb_(
845           logical_camera_id, physical_camera_id,
846           CameraDeviceStatus::kNotPresent);
847     }
848   }
849 }
850 
GetVendorTags(std::vector<VendorTagSection> * vendor_tag_sections)851 status_t EmulatedCameraProviderHwlImpl::GetVendorTags(
852     std::vector<VendorTagSection>* vendor_tag_sections) {
853   if (vendor_tag_sections == nullptr) {
854     ALOGE("%s: vendor_tag_sections is nullptr.", __FUNCTION__);
855     return BAD_VALUE;
856   }
857 
858   // No vendor specific tags as of now
859   return OK;
860 }
861 
GetVisibleCameraIds(std::vector<std::uint32_t> * camera_ids)862 status_t EmulatedCameraProviderHwlImpl::GetVisibleCameraIds(
863     std::vector<std::uint32_t>* camera_ids) {
864   if (camera_ids == nullptr) {
865     ALOGE("%s: camera_ids is nullptr.", __FUNCTION__);
866     return BAD_VALUE;
867   }
868 
869   for (const auto& device : camera_id_map_) {
870     camera_ids->push_back(device.first);
871   }
872 
873   return OK;
874 }
875 
CreateCameraDeviceHwl(uint32_t camera_id,std::unique_ptr<CameraDeviceHwl> * camera_device_hwl)876 status_t EmulatedCameraProviderHwlImpl::CreateCameraDeviceHwl(
877     uint32_t camera_id, std::unique_ptr<CameraDeviceHwl>* camera_device_hwl) {
878   if (camera_device_hwl == nullptr) {
879     ALOGE("%s: camera_device_hwl is nullptr.", __FUNCTION__);
880     return BAD_VALUE;
881   }
882 
883   if (camera_id_map_.find(camera_id) == camera_id_map_.end()) {
884     ALOGE("%s: Invalid camera id: %u", __func__, camera_id);
885     return BAD_VALUE;
886   }
887 
888   std::unique_ptr<HalCameraMetadata> meta =
889       HalCameraMetadata::Clone(static_metadata_[camera_id].get());
890 
891   std::shared_ptr<EmulatedTorchState> torch_state;
892   camera_metadata_ro_entry entry;
893   bool flash_supported = false;
894   auto ret = meta->Get(ANDROID_FLASH_INFO_AVAILABLE, &entry);
895   if ((ret == OK) && (entry.count == 1)) {
896     if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
897       flash_supported = true;
898     }
899   }
900 
901   if (flash_supported) {
902     torch_state = std::make_shared<EmulatedTorchState>(camera_id, torch_cb_);
903   }
904 
905   auto physical_devices = std::make_unique<PhysicalDeviceMap>();
906   for (const auto& physical_device : camera_id_map_[camera_id]) {
907       physical_devices->emplace(
908           physical_device.second, std::make_pair(physical_device.first,
909           HalCameraMetadata::Clone(static_metadata_[physical_device.second].get())));
910   }
911   *camera_device_hwl = EmulatedCameraDeviceHwlImpl::Create(
912       camera_id, std::move(meta), std::move(physical_devices), torch_state);
913   if (*camera_device_hwl == nullptr) {
914     ALOGE("%s: Cannot create EmulatedCameraDeviceHWlImpl.", __FUNCTION__);
915     return BAD_VALUE;
916   }
917 
918   return OK;
919 }
920 
CreateBufferAllocatorHwl(std::unique_ptr<CameraBufferAllocatorHwl> * camera_buffer_allocator_hwl)921 status_t EmulatedCameraProviderHwlImpl::CreateBufferAllocatorHwl(
922     std::unique_ptr<CameraBufferAllocatorHwl>* camera_buffer_allocator_hwl) {
923   if (camera_buffer_allocator_hwl == nullptr) {
924     ALOGE("%s: camera_buffer_allocator_hwl is nullptr.", __FUNCTION__);
925     return BAD_VALUE;
926   }
927 
928   // Currently not supported
929   return INVALID_OPERATION;
930 }
931 
NotifyDeviceStateChange(DeviceState)932 status_t EmulatedCameraProviderHwlImpl::NotifyDeviceStateChange(
933     DeviceState /*device_state*/) {
934   return OK;
935 }
936 }  // namespace android
937