1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 //#define LOG_NNDEBUG 0
19 #define LOG_TAG "EmulatedSensor"
20 #define ATRACE_TAG ATRACE_TAG_CAMERA
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27 
28 #include "EmulatedSensor.h"
29 
30 #include <cutils/properties.h>
31 #include <inttypes.h>
32 #include <libyuv.h>
33 #include <system/camera_metadata.h>
34 #include <utils/Log.h>
35 #include <utils/Trace.h>
36 
37 #include <cmath>
38 #include <cstdlib>
39 
40 #include "utils/ExifUtils.h"
41 #include "utils/HWLUtils.h"
42 
43 namespace android {
44 
45 using google_camera_hal::HalCameraMetadata;
46 using google_camera_hal::MessageType;
47 using google_camera_hal::NotifyMessage;
48 
49 const uint32_t EmulatedSensor::kRegularSceneHandshake = 1; // Scene handshake divider
50 const uint32_t EmulatedSensor::kReducedSceneHandshake = 2; // Scene handshake divider
51 
52 // 1 us - 30 sec
53 const nsecs_t EmulatedSensor::kSupportedExposureTimeRange[2] = {1000LL,
54                                                                 30000000000LL};
55 
56 // ~1/30 s - 30 sec
57 const nsecs_t EmulatedSensor::kSupportedFrameDurationRange[2] = {33331760LL,
58                                                                  30000000000LL};
59 
60 const int32_t EmulatedSensor::kSupportedSensitivityRange[2] = {100, 1600};
61 const int32_t EmulatedSensor::kDefaultSensitivity = 100;  // ISO
62 const nsecs_t EmulatedSensor::kDefaultExposureTime = ms2ns(15);
63 const nsecs_t EmulatedSensor::kDefaultFrameDuration = ms2ns(33);
64 // Deadline within we should return the results as soon as possible to
65 // avoid skewing the frame cycle due to external delays.
66 const nsecs_t EmulatedSensor::kReturnResultThreshod = 3 * kDefaultFrameDuration;
67 
68 // Sensor defaults
69 const uint8_t EmulatedSensor::kSupportedColorFilterArrangement =
70     ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
71 const uint32_t EmulatedSensor::kDefaultMaxRawValue = 4000;
72 const uint32_t EmulatedSensor::kDefaultBlackLevelPattern[4] = {1000, 1000, 1000,
73                                                                1000};
74 
75 const nsecs_t EmulatedSensor::kMinVerticalBlank = 10000L;
76 
77 // Sensor sensitivity
78 const float EmulatedSensor::kSaturationVoltage = 0.520f;
79 const uint32_t EmulatedSensor::kSaturationElectrons = 2000;
80 const float EmulatedSensor::kVoltsPerLuxSecond = 0.100f;
81 
82 const float EmulatedSensor::kElectronsPerLuxSecond =
83     EmulatedSensor::kSaturationElectrons / EmulatedSensor::kSaturationVoltage *
84     EmulatedSensor::kVoltsPerLuxSecond;
85 
86 const float EmulatedSensor::kReadNoiseStddevBeforeGain = 1.177;  // in electrons
87 const float EmulatedSensor::kReadNoiseStddevAfterGain =
88     2.100;  // in digital counts
89 const float EmulatedSensor::kReadNoiseVarBeforeGain =
90     EmulatedSensor::kReadNoiseStddevBeforeGain *
91     EmulatedSensor::kReadNoiseStddevBeforeGain;
92 const float EmulatedSensor::kReadNoiseVarAfterGain =
93     EmulatedSensor::kReadNoiseStddevAfterGain *
94     EmulatedSensor::kReadNoiseStddevAfterGain;
95 
96 const uint32_t EmulatedSensor::kMaxRAWStreams = 1;
97 const uint32_t EmulatedSensor::kMaxProcessedStreams = 3;
98 const uint32_t EmulatedSensor::kMaxStallingStreams = 2;
99 const uint32_t EmulatedSensor::kMaxInputStreams = 1;
100 
101 const uint32_t EmulatedSensor::kMaxLensShadingMapSize[2]{64, 64};
102 const int32_t EmulatedSensor::kFixedBitPrecision = 64;  // 6-bit
103 // In fixed-point math, saturation point of sensor after gain
104 const int32_t EmulatedSensor::kSaturationPoint = kFixedBitPrecision * 255;
105 const camera_metadata_rational EmulatedSensor::kNeutralColorPoint[3] = {
106     {255, 1}, {255, 1}, {255, 1}};
107 const float EmulatedSensor::kGreenSplit = 1.f;  // No divergence
108 // Reduce memory usage by allowing only one buffer in sensor, one in jpeg
109 // compressor and one pending request to avoid stalls.
110 const uint8_t EmulatedSensor::kPipelineDepth = 3;
111 
112 const camera_metadata_rational EmulatedSensor::kDefaultColorTransform[9] = {
113     {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
114 const float EmulatedSensor::kDefaultColorCorrectionGains[4] = {1.0f, 1.0f, 1.0f,
115                                                                1.0f};
116 
117 const float EmulatedSensor::kDefaultToneMapCurveRed[4] = {.0f, .0f, 1.f, 1.f};
118 const float EmulatedSensor::kDefaultToneMapCurveGreen[4] = {.0f, .0f, 1.f, 1.f};
119 const float EmulatedSensor::kDefaultToneMapCurveBlue[4] = {.0f, .0f, 1.f, 1.f};
120 
121 /** A few utility functions for math, normal distributions */
122 
123 // Take advantage of IEEE floating-point format to calculate an approximate
124 // square root. Accurate to within +-3.6%
sqrtf_approx(float r)125 float sqrtf_approx(float r) {
126   // Modifier is based on IEEE floating-point representation; the
127   // manipulations boil down to finding approximate log2, dividing by two, and
128   // then inverting the log2. A bias is added to make the relative error
129   // symmetric about the real answer.
130   const int32_t modifier = 0x1FBB4000;
131 
132   int32_t r_i = *(int32_t*)(&r);
133   r_i = (r_i >> 1) + modifier;
134 
135   return *(float*)(&r_i);
136 }
137 
EmulatedSensor()138 EmulatedSensor::EmulatedSensor() : Thread(false), got_vsync_(false) {
139   gamma_table_.resize(kSaturationPoint + 1);
140   for (int32_t i = 0; i <= kSaturationPoint; i++) {
141     gamma_table_[i] = ApplysRGBGamma(i, kSaturationPoint);
142   }
143 }
144 
~EmulatedSensor()145 EmulatedSensor::~EmulatedSensor() {
146   ShutDown();
147 }
148 
AreCharacteristicsSupported(const SensorCharacteristics & characteristics)149 bool EmulatedSensor::AreCharacteristicsSupported(
150     const SensorCharacteristics& characteristics) {
151   if ((characteristics.width == 0) || (characteristics.height == 0)) {
152     ALOGE("%s: Invalid sensor size %zux%zu", __FUNCTION__,
153           characteristics.width, characteristics.height);
154     return false;
155   }
156 
157   if ((characteristics.full_res_width == 0) ||
158       (characteristics.full_res_height == 0)) {
159     ALOGE("%s: Invalid sensor full res size %zux%zu", __FUNCTION__,
160           characteristics.full_res_width, characteristics.full_res_height);
161     return false;
162   }
163 
164   if ((characteristics.exposure_time_range[0] >=
165        characteristics.exposure_time_range[1]) ||
166       ((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
167        (characteristics.exposure_time_range[1] >
168         kSupportedExposureTimeRange[1]))) {
169     ALOGE("%s: Unsupported exposure range", __FUNCTION__);
170     return false;
171   }
172 
173   if ((characteristics.frame_duration_range[0] >=
174        characteristics.frame_duration_range[1]) ||
175       ((characteristics.frame_duration_range[0] <
176         kSupportedFrameDurationRange[0]) ||
177        (characteristics.frame_duration_range[1] >
178         kSupportedFrameDurationRange[1]))) {
179     ALOGE("%s: Unsupported frame duration range", __FUNCTION__);
180     return false;
181   }
182 
183   if ((characteristics.sensitivity_range[0] >=
184        characteristics.sensitivity_range[1]) ||
185       ((characteristics.sensitivity_range[0] < kSupportedSensitivityRange[0]) ||
186        (characteristics.sensitivity_range[1] > kSupportedSensitivityRange[1])) ||
187       (!((kDefaultSensitivity >= characteristics.sensitivity_range[0]) &&
188          (kDefaultSensitivity <= characteristics.sensitivity_range[1])))) {
189     ALOGE("%s: Unsupported sensitivity range", __FUNCTION__);
190     return false;
191   }
192 
193   if (characteristics.color_arangement != kSupportedColorFilterArrangement) {
194     ALOGE("%s: Unsupported color arrangement!", __FUNCTION__);
195     return false;
196   }
197 
198   for (const auto& blackLevel : characteristics.black_level_pattern) {
199     if (blackLevel >= characteristics.max_raw_value) {
200       ALOGE("%s: Black level matches or exceeds max RAW value!", __FUNCTION__);
201       return false;
202     }
203   }
204 
205   if ((characteristics.frame_duration_range[0] / characteristics.height) == 0) {
206     ALOGE("%s: Zero row readout time!", __FUNCTION__);
207     return false;
208   }
209 
210   if (characteristics.max_raw_streams > kMaxRAWStreams) {
211     ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
212           __FUNCTION__, characteristics.max_raw_streams, kMaxRAWStreams);
213     return false;
214   }
215 
216   if (characteristics.max_processed_streams > kMaxProcessedStreams) {
217     ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
218           __FUNCTION__, characteristics.max_processed_streams,
219           kMaxProcessedStreams);
220     return false;
221   }
222 
223   if (characteristics.max_stalling_streams > kMaxStallingStreams) {
224     ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
225           __FUNCTION__, characteristics.max_stalling_streams,
226           kMaxStallingStreams);
227     return false;
228   }
229 
230   if (characteristics.max_input_streams > kMaxInputStreams) {
231     ALOGE("%s: Input streams maximum %u exceeds supported maximum %u",
232           __FUNCTION__, characteristics.max_input_streams, kMaxInputStreams);
233     return false;
234   }
235 
236   if ((characteristics.lens_shading_map_size[0] > kMaxLensShadingMapSize[0]) ||
237       (characteristics.lens_shading_map_size[1] > kMaxLensShadingMapSize[1])) {
238     ALOGE("%s: Lens shading map [%dx%d] exceeds supprorted maximum [%dx%d]",
239           __FUNCTION__, characteristics.lens_shading_map_size[0],
240           characteristics.lens_shading_map_size[1], kMaxLensShadingMapSize[0],
241           kMaxLensShadingMapSize[1]);
242     return false;
243   }
244 
245   if (characteristics.max_pipeline_depth < kPipelineDepth) {
246     ALOGE("%s: Pipeline depth %d smaller than supprorted minimum %d",
247           __FUNCTION__, characteristics.max_pipeline_depth, kPipelineDepth);
248     return false;
249   }
250 
251   return true;
252 }
253 
SplitStreamCombination(const StreamConfiguration & original_config,StreamConfiguration * default_mode_config,StreamConfiguration * max_resolution_mode_config,StreamConfiguration * input_stream_config)254 static void SplitStreamCombination(
255     const StreamConfiguration& original_config,
256     StreamConfiguration* default_mode_config,
257     StreamConfiguration* max_resolution_mode_config,
258     StreamConfiguration* input_stream_config) {
259   // Go through the streams
260   if (default_mode_config == nullptr || max_resolution_mode_config == nullptr ||
261       input_stream_config == nullptr) {
262     ALOGE("%s: Input stream / output stream configs are nullptr", __FUNCTION__);
263     return;
264   }
265   for (const auto& stream : original_config.streams) {
266     if (stream.stream_type == google_camera_hal::StreamType::kInput) {
267       input_stream_config->streams.push_back(stream);
268       continue;
269     }
270     if (stream.used_in_default_resolution_mode) {
271       default_mode_config->streams.push_back(stream);
272     }
273     if (stream.used_in_max_resolution_mode) {
274       max_resolution_mode_config->streams.push_back(stream);
275     }
276   }
277 }
278 
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & default_config_map,StreamConfigurationMap & max_resolution_config_map,const PhysicalStreamConfigurationMap & physical_map,const PhysicalStreamConfigurationMap & physical_map_max_resolution,const LogicalCharacteristics & sensor_chars)279 bool EmulatedSensor::IsStreamCombinationSupported(
280     uint32_t logical_id, const StreamConfiguration& config,
281     StreamConfigurationMap& default_config_map,
282     StreamConfigurationMap& max_resolution_config_map,
283     const PhysicalStreamConfigurationMap& physical_map,
284     const PhysicalStreamConfigurationMap& physical_map_max_resolution,
285     const LogicalCharacteristics& sensor_chars) {
286   StreamConfiguration default_mode_config, max_resolution_mode_config,
287       input_stream_config;
288   SplitStreamCombination(config, &default_mode_config,
289                          &max_resolution_mode_config, &input_stream_config);
290 
291   return IsStreamCombinationSupported(logical_id, default_mode_config,
292                                       default_config_map, physical_map,
293                                       sensor_chars) &&
294          IsStreamCombinationSupported(
295              logical_id, max_resolution_mode_config, max_resolution_config_map,
296              physical_map_max_resolution, sensor_chars, /*is_max_res*/ true) &&
297 
298          (IsStreamCombinationSupported(logical_id, input_stream_config,
299                                        default_config_map, physical_map,
300                                        sensor_chars) ||
301           IsStreamCombinationSupported(
302               logical_id, input_stream_config, max_resolution_config_map,
303               physical_map_max_resolution, sensor_chars, /*is_max_res*/ true));
304 }
305 
IsStreamCombinationSupported(uint32_t logical_id,const StreamConfiguration & config,StreamConfigurationMap & config_map,const PhysicalStreamConfigurationMap & physical_map,const LogicalCharacteristics & sensor_chars,bool is_max_res)306 bool EmulatedSensor::IsStreamCombinationSupported(
307     uint32_t logical_id, const StreamConfiguration& config,
308     StreamConfigurationMap& config_map,
309     const PhysicalStreamConfigurationMap& physical_map,
310     const LogicalCharacteristics& sensor_chars, bool is_max_res) {
311   uint32_t input_stream_count = 0;
312   // Map from physical camera id to number of streams for that physical camera
313   std::map<uint32_t, uint32_t> raw_stream_count;
314   std::map<uint32_t, uint32_t> processed_stream_count;
315   std::map<uint32_t, uint32_t> stalling_stream_count;
316 
317   // Only allow the stream configurations specified in
318   // dynamicSizeStreamConfigurations.
319   for (const auto& stream : config.streams) {
320     bool is_dynamic_output =
321         (stream.is_physical_camera_stream && stream.group_id != -1);
322     if (stream.rotation != google_camera_hal::StreamRotation::kRotation0) {
323       ALOGE("%s: Stream rotation: 0x%x not supported!", __FUNCTION__,
324             stream.rotation);
325       return false;
326     }
327 
328     if (stream.stream_type == google_camera_hal::StreamType::kInput) {
329       if (sensor_chars.at(logical_id).max_input_streams == 0) {
330         ALOGE("%s: Input streams are not supported on this device!",
331               __FUNCTION__);
332         return false;
333       }
334 
335       auto const& supported_outputs =
336           config_map.GetValidOutputFormatsForInput(stream.format);
337       if (supported_outputs.empty()) {
338         ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
339               __FUNCTION__, stream.format);
340         return false;
341       }
342 
343       input_stream_count++;
344     } else {
345       if (stream.is_physical_camera_stream &&
346           physical_map.find(stream.physical_camera_id) == physical_map.end()) {
347         ALOGE("%s: Invalid physical camera id %d", __FUNCTION__,
348               stream.physical_camera_id);
349         return false;
350       }
351 
352       if (is_dynamic_output) {
353         auto dynamic_physical_output_formats =
354             physical_map.at(stream.physical_camera_id)
355                 ->GetDynamicPhysicalStreamOutputFormats();
356         if (dynamic_physical_output_formats.find(stream.format) ==
357             dynamic_physical_output_formats.end()) {
358           ALOGE("%s: Unsupported physical stream format %d", __FUNCTION__,
359                 stream.format);
360           return false;
361         }
362       }
363 
364       switch (stream.format) {
365         case HAL_PIXEL_FORMAT_BLOB:
366           if ((stream.data_space != HAL_DATASPACE_V0_JFIF) &&
367               (stream.data_space != HAL_DATASPACE_UNKNOWN)) {
368             ALOGE("%s: Unsupported Blob dataspace 0x%x", __FUNCTION__,
369                   stream.data_space);
370             return false;
371           }
372           if (stream.is_physical_camera_stream) {
373             stalling_stream_count[stream.physical_camera_id]++;
374           } else {
375             for (const auto& p : physical_map) {
376               stalling_stream_count[p.first]++;
377             }
378           }
379           break;
380         case HAL_PIXEL_FORMAT_RAW16: {
381           const SensorCharacteristics& sensor_char =
382               stream.is_physical_camera_stream
383                   ? sensor_chars.at(stream.physical_camera_id)
384                   : sensor_chars.at(logical_id);
385           auto sensor_height =
386               is_max_res ? sensor_char.full_res_height : sensor_char.height;
387           auto sensor_width =
388               is_max_res ? sensor_char.full_res_width : sensor_char.width;
389           if (stream.height != sensor_height || stream.width != sensor_width) {
390             ALOGE(
391                 "%s, RAW16 buffer height %d and width %d must match sensor "
392                 "height: %zu"
393                 " and width: %zu",
394                 __FUNCTION__, stream.height, stream.width, sensor_height,
395                 sensor_width);
396             return false;
397           }
398           if (stream.is_physical_camera_stream) {
399             raw_stream_count[stream.physical_camera_id]++;
400           } else {
401             for (const auto& p : physical_map) {
402               raw_stream_count[p.first]++;
403             }
404           }
405         } break;
406         default:
407           if (stream.is_physical_camera_stream) {
408             processed_stream_count[stream.physical_camera_id]++;
409           } else {
410             for (const auto& p : physical_map) {
411               processed_stream_count[p.first]++;
412             }
413           }
414       }
415 
416       auto output_sizes =
417           is_dynamic_output
418               ? physical_map.at(stream.physical_camera_id)
419                     ->GetDynamicPhysicalStreamOutputSizes(stream.format)
420               : stream.is_physical_camera_stream
421                     ? physical_map.at(stream.physical_camera_id)
422                           ->GetOutputSizes(stream.format)
423                     : config_map.GetOutputSizes(stream.format);
424 
425       auto stream_size = std::make_pair(stream.width, stream.height);
426       if (output_sizes.find(stream_size) == output_sizes.end()) {
427         ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
428               __FUNCTION__, stream.width, stream.height, stream.format);
429         return false;
430       }
431     }
432   }
433 
434   for (const auto& raw_count : raw_stream_count) {
435     unsigned int max_raw_streams =
436         sensor_chars.at(raw_count.first).max_raw_streams +
437         (is_max_res
438              ? 1
439              : 0);  // The extra raw stream is allowed for remosaic reprocessing.
440     if (raw_count.second > max_raw_streams) {
441       ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
442             __FUNCTION__, raw_count.second, max_raw_streams);
443       return false;
444     }
445   }
446 
447   for (const auto& stalling_count : stalling_stream_count) {
448     if (stalling_count.second >
449         sensor_chars.at(stalling_count.first).max_stalling_streams) {
450       ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
451             __FUNCTION__, stalling_count.second,
452             sensor_chars.at(stalling_count.first).max_stalling_streams);
453       return false;
454     }
455   }
456 
457   for (const auto& processed_count : processed_stream_count) {
458     if (processed_count.second >
459         sensor_chars.at(processed_count.first).max_processed_streams) {
460       ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
461             __FUNCTION__, processed_count.second,
462             sensor_chars.at(processed_count.first).max_processed_streams);
463       return false;
464     }
465   }
466 
467   if (input_stream_count > sensor_chars.at(logical_id).max_input_streams) {
468     ALOGE("%s: Input stream maximum %u exceeds supported maximum %u",
469           __FUNCTION__, input_stream_count,
470           sensor_chars.at(logical_id).max_input_streams);
471     return false;
472   }
473 
474   return true;
475 }
476 
StartUp(uint32_t logical_camera_id,std::unique_ptr<LogicalCharacteristics> logical_chars)477 status_t EmulatedSensor::StartUp(
478     uint32_t logical_camera_id,
479     std::unique_ptr<LogicalCharacteristics> logical_chars) {
480   if (isRunning()) {
481     return OK;
482   }
483 
484   if (logical_chars.get() == nullptr) {
485     return BAD_VALUE;
486   }
487 
488   chars_ = std::move(logical_chars);
489   auto device_chars = chars_->find(logical_camera_id);
490   if (device_chars == chars_->end()) {
491     ALOGE(
492         "%s: Logical camera id: %u absent from logical camera characteristics!",
493         __FUNCTION__, logical_camera_id);
494     return BAD_VALUE;
495   }
496 
497   for (const auto& it : *chars_) {
498     if (!AreCharacteristicsSupported(it.second)) {
499       ALOGE("%s: Sensor characteristics for camera id: %u not supported!",
500             __FUNCTION__, it.first);
501       return BAD_VALUE;
502     }
503   }
504 
505   logical_camera_id_ = logical_camera_id;
506   scene_ = new EmulatedScene(
507       device_chars->second.full_res_width, device_chars->second.full_res_height,
508       kElectronsPerLuxSecond, device_chars->second.orientation,
509       device_chars->second.is_front_facing);
510   scene_->InitializeSensorQueue();
511   jpeg_compressor_ = std::make_unique<JpegCompressor>();
512 
513   auto res = run(LOG_TAG, ANDROID_PRIORITY_URGENT_DISPLAY);
514   if (res != OK) {
515     ALOGE("Unable to start up sensor capture thread: %d", res);
516   }
517 
518   return res;
519 }
520 
ShutDown()521 status_t EmulatedSensor::ShutDown() {
522   int res;
523   res = requestExitAndWait();
524   if (res != OK) {
525     ALOGE("Unable to shut down sensor capture thread: %d", res);
526   }
527   return res;
528 }
529 
SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,std::unique_ptr<HwlPipelineResult> result,std::unique_ptr<Buffers> input_buffers,std::unique_ptr<Buffers> output_buffers)530 void EmulatedSensor::SetCurrentRequest(
531     std::unique_ptr<LogicalCameraSettings> logical_settings,
532     std::unique_ptr<HwlPipelineResult> result,
533     std::unique_ptr<Buffers> input_buffers,
534     std::unique_ptr<Buffers> output_buffers) {
535   Mutex::Autolock lock(control_mutex_);
536   current_settings_ = std::move(logical_settings);
537   current_result_ = std::move(result);
538   current_input_buffers_ = std::move(input_buffers);
539   current_output_buffers_ = std::move(output_buffers);
540 }
541 
WaitForVSyncLocked(nsecs_t reltime)542 bool EmulatedSensor::WaitForVSyncLocked(nsecs_t reltime) {
543   got_vsync_ = false;
544   while (!got_vsync_) {
545     auto res = vsync_.waitRelative(control_mutex_, reltime);
546     if (res != OK && res != TIMED_OUT) {
547       ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
548       return false;
549     }
550   }
551 
552   return got_vsync_;
553 }
554 
WaitForVSync(nsecs_t reltime)555 bool EmulatedSensor::WaitForVSync(nsecs_t reltime) {
556   Mutex::Autolock lock(control_mutex_);
557 
558   return WaitForVSyncLocked(reltime);
559 }
560 
Flush()561 status_t EmulatedSensor::Flush() {
562   Mutex::Autolock lock(control_mutex_);
563   auto ret = WaitForVSyncLocked(kSupportedFrameDurationRange[1]);
564 
565   // First recreate the jpeg compressor. This will abort any ongoing processing
566   // and flush any pending jobs.
567   jpeg_compressor_ = std::make_unique<JpegCompressor>();
568 
569   // Then return any pending frames here
570   if ((current_input_buffers_.get() != nullptr) &&
571       (!current_input_buffers_->empty())) {
572     current_input_buffers_->clear();
573   }
574   if ((current_output_buffers_.get() != nullptr) &&
575       (!current_output_buffers_->empty())) {
576     for (const auto& buffer : *current_output_buffers_) {
577       buffer->stream_buffer.status = BufferStatus::kError;
578     }
579 
580     if ((current_result_.get() != nullptr) &&
581         (current_result_->result_metadata.get() != nullptr)) {
582       if (current_output_buffers_->at(0)->callback.notify != nullptr) {
583         NotifyMessage msg{
584             .type = MessageType::kError,
585             .message.error = {
586                 .frame_number = current_output_buffers_->at(0)->frame_number,
587                 .error_stream_id = -1,
588                 .error_code = ErrorCode::kErrorResult,
589             }};
590 
591         current_output_buffers_->at(0)->callback.notify(
592             current_result_->pipeline_id, msg);
593       }
594     }
595 
596     current_output_buffers_->clear();
597   }
598 
599   return ret ? OK : TIMED_OUT;
600 }
601 
threadLoop()602 bool EmulatedSensor::threadLoop() {
603   ATRACE_CALL();
604   /**
605    * Sensor capture operation main loop.
606    *
607    */
608 
609   /**
610    * Stage 1: Read in latest control parameters
611    */
612   std::unique_ptr<Buffers> next_buffers;
613   std::unique_ptr<Buffers> next_input_buffer;
614   std::unique_ptr<HwlPipelineResult> next_result;
615   std::unique_ptr<LogicalCameraSettings> settings;
616   HwlPipelineCallback callback = {nullptr, nullptr};
617   {
618     Mutex::Autolock lock(control_mutex_);
619     std::swap(settings, current_settings_);
620     std::swap(next_buffers, current_output_buffers_);
621     std::swap(next_input_buffer, current_input_buffers_);
622     std::swap(next_result, current_result_);
623 
624     // Signal VSync for start of readout
625     ALOGVV("Sensor VSync");
626     got_vsync_ = true;
627     vsync_.signal();
628   }
629 
630   auto frame_duration = EmulatedSensor::kSupportedFrameDurationRange[0];
631   // Frame duration must always be the same among all physical devices
632   if ((settings.get() != nullptr) && (!settings->empty())) {
633     frame_duration = settings->begin()->second.frame_duration;
634   }
635 
636   nsecs_t start_real_time = systemTime();
637   // Stagefright cares about system time for timestamps, so base simulated
638   // time on that.
639   nsecs_t frame_end_real_time = start_real_time + frame_duration;
640 
641   /**
642    * Stage 2: Capture new image
643    */
644   next_capture_time_ = frame_end_real_time;
645 
646   sensor_binning_factor_info_.clear();
647 
648   bool reprocess_request = false;
649   if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
650     if (next_input_buffer->size() > 1) {
651       ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
652     }
653 
654       camera_metadata_ro_entry_t entry;
655       auto ret =
656           next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
657       if ((ret == OK) && (entry.count == 1)) {
658         next_capture_time_ = entry.data.i64[0];
659       } else {
660         ALOGW("%s: Reprocess timestamp absent!", __FUNCTION__);
661       }
662 
663       reprocess_request = true;
664   }
665 
666   if ((next_buffers != nullptr) && (settings != nullptr)) {
667     callback = next_buffers->at(0)->callback;
668     if (callback.notify != nullptr) {
669       NotifyMessage msg{
670           .type = MessageType::kShutter,
671           .message.shutter = {
672               .frame_number = next_buffers->at(0)->frame_number,
673               .timestamp_ns = static_cast<uint64_t>(next_capture_time_)}};
674       callback.notify(next_result->pipeline_id, msg);
675     }
676     auto b = next_buffers->begin();
677     while (b != next_buffers->end()) {
678       auto device_settings = settings->find((*b)->camera_id);
679       if (device_settings == settings->end()) {
680         ALOGE("%s: Sensor settings absent for device: %d", __func__,
681               (*b)->camera_id);
682         b = next_buffers->erase(b);
683         continue;
684       }
685 
686       auto device_chars = chars_->find((*b)->camera_id);
687       if (device_chars == chars_->end()) {
688         ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
689               (*b)->camera_id);
690         b = next_buffers->erase(b);
691         continue;
692       }
693 
694       sensor_binning_factor_info_[(*b)->camera_id].quad_bayer_sensor =
695           device_chars->second.quad_bayer_sensor;
696 
697       ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
698              ns2ms(device_settings->second.exposure_time),
699              device_settings->second.gain);
700 
701       scene_->Initialize(device_chars->second.full_res_width,
702                          device_chars->second.full_res_height,
703                          kElectronsPerLuxSecond);
704       scene_->SetExposureDuration((float)device_settings->second.exposure_time /
705                                   1e9);
706       scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
707                                 device_chars->second.color_filter.rY,
708                                 device_chars->second.color_filter.rZ,
709                                 device_chars->second.color_filter.grX,
710                                 device_chars->second.color_filter.grY,
711                                 device_chars->second.color_filter.grZ,
712                                 device_chars->second.color_filter.gbX,
713                                 device_chars->second.color_filter.gbY,
714                                 device_chars->second.color_filter.gbZ,
715                                 device_chars->second.color_filter.bX,
716                                 device_chars->second.color_filter.bY,
717                                 device_chars->second.color_filter.bZ);
718       scene_->SetTestPattern(device_settings->second.test_pattern_mode ==
719                              ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
720       scene_->SetTestPatternData(device_settings->second.test_pattern_data);
721 
722       uint32_t handshake_divider =
723         (device_settings->second.video_stab == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) ?
724         kReducedSceneHandshake : kRegularSceneHandshake;
725       scene_->CalculateScene(next_capture_time_, handshake_divider);
726 
727       (*b)->stream_buffer.status = BufferStatus::kOk;
728       bool max_res_mode = device_settings->second.sensor_pixel_mode;
729       sensor_binning_factor_info_[(*b)->camera_id].max_res_request =
730           max_res_mode;
731       switch ((*b)->format) {
732         case PixelFormat::RAW16:
733           sensor_binning_factor_info_[(*b)->camera_id].has_raw_stream = true;
734           break;
735         default:
736           sensor_binning_factor_info_[(*b)->camera_id].has_non_raw_stream = true;
737       }
738 
739       // TODO: remove hack. Implement RAW -> YUV / JPEG reprocessing http://b/192382904
740       bool treat_as_reprocess =
741           (device_chars->second.quad_bayer_sensor && reprocess_request &&
742            (*next_input_buffer->begin())->format == PixelFormat::RAW16)
743               ? false
744               : reprocess_request;
745 
746       switch ((*b)->format) {
747         case PixelFormat::RAW16:
748           if (!reprocess_request) {
749             uint64_t min_full_res_raw_size =
750                 2 * device_chars->second.full_res_width *
751                 device_chars->second.full_res_height;
752             uint64_t min_default_raw_size =
753                 2 * device_chars->second.width * device_chars->second.height;
754             bool default_mode_for_qb =
755                 device_chars->second.quad_bayer_sensor && !max_res_mode;
756             size_t buffer_size = (*b)->plane.img.buffer_size;
757             if (default_mode_for_qb) {
758               if (buffer_size < min_default_raw_size) {
759                 ALOGE(
760                     "%s: Output buffer size too small for RAW capture in "
761                     "default "
762                     "mode, "
763                     "expected %" PRIu64 ", got %zu, for camera id %d",
764                     __FUNCTION__, min_default_raw_size, buffer_size,
765                     (*b)->camera_id);
766                 (*b)->stream_buffer.status = BufferStatus::kError;
767                 break;
768               }
769             } else if (buffer_size < min_full_res_raw_size) {
770               ALOGE(
771                   "%s: Output buffer size too small for RAW capture in max res "
772                   "mode, "
773                   "expected %" PRIu64 ", got %zu, for camera id %d",
774                   __FUNCTION__, min_full_res_raw_size, buffer_size,
775                   (*b)->camera_id);
776               (*b)->stream_buffer.status = BufferStatus::kError;
777               break;
778             }
779             if (default_mode_for_qb) {
780               CaptureRawBinned(
781                   (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
782                   device_settings->second.gain, device_chars->second);
783             } else {
784               CaptureRawFullRes(
785                   (*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
786                   device_settings->second.gain, device_chars->second);
787             }
788           } else {
789             if (!device_chars->second.quad_bayer_sensor) {
790               ALOGE(
791                   "%s: Reprocess requests with output format %x no supported!",
792                   __FUNCTION__, (*b)->format);
793               (*b)->stream_buffer.status = BufferStatus::kError;
794               break;
795             }
796             // Remosaic the RAW input buffer
797             if ((*next_input_buffer->begin())->width != (*b)->width ||
798                 (*next_input_buffer->begin())->height != (*b)->height) {
799               ALOGE(
800                   "%s: RAW16 input dimensions %dx%d don't match output buffer "
801                   "dimensions %dx%d",
802                   __FUNCTION__, (*next_input_buffer->begin())->width,
803                   (*next_input_buffer->begin())->height, (*b)->width,
804                   (*b)->height);
805               (*b)->stream_buffer.status = BufferStatus::kError;
806               break;
807             }
808             ALOGV("%s remosaic Raw16 Image", __FUNCTION__);
809             RemosaicRAW16Image(
810                 (uint16_t*)(*next_input_buffer->begin())->plane.img.img,
811                 (uint16_t*)(*b)->plane.img.img, (*b)->plane.img.stride_in_bytes,
812                 device_chars->second);
813           }
814           break;
815         case PixelFormat::RGB_888:
816           if (!reprocess_request) {
817             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
818                        (*b)->plane.img.stride_in_bytes, RGBLayout::RGB,
819                        device_settings->second.gain, device_chars->second);
820           } else {
821             ALOGE("%s: Reprocess requests with output format %x no supported!",
822                   __FUNCTION__, (*b)->format);
823             (*b)->stream_buffer.status = BufferStatus::kError;
824           }
825           break;
826         case PixelFormat::RGBA_8888:
827           if (!reprocess_request) {
828             CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
829                        (*b)->plane.img.stride_in_bytes, RGBLayout::RGBA,
830                        device_settings->second.gain, device_chars->second);
831           } else {
832             ALOGE("%s: Reprocess requests with output format %x no supported!",
833                   __FUNCTION__, (*b)->format);
834             (*b)->stream_buffer.status = BufferStatus::kError;
835           }
836           break;
837         case PixelFormat::BLOB:
838           if ((*b)->dataSpace == HAL_DATASPACE_V0_JFIF) {
839             YUV420Frame yuv_input{
840                 .width = treat_as_reprocess
841                              ? (*next_input_buffer->begin())->width
842                              : 0,
843                 .height = treat_as_reprocess
844                               ? (*next_input_buffer->begin())->height
845                               : 0,
846                 .planes = treat_as_reprocess
847                               ? (*next_input_buffer->begin())->plane.img_y_crcb
848                               : YCbCrPlanes{}};
849             auto jpeg_input = std::make_unique<JpegYUV420Input>();
850             jpeg_input->width = (*b)->width;
851             jpeg_input->height = (*b)->height;
852             auto img =
853                 new uint8_t[(jpeg_input->width * jpeg_input->height * 3) / 2];
854             jpeg_input->yuv_planes = {
855                 .img_y = img,
856                 .img_cb = img + jpeg_input->width * jpeg_input->height,
857                 .img_cr = img + (jpeg_input->width * jpeg_input->height * 5) / 4,
858                 .y_stride = jpeg_input->width,
859                 .cbcr_stride = jpeg_input->width / 2,
860                 .cbcr_step = 1};
861             jpeg_input->buffer_owner = true;
862             YUV420Frame yuv_output{.width = jpeg_input->width,
863                                    .height = jpeg_input->height,
864                                    .planes = jpeg_input->yuv_planes};
865 
866             bool rotate =
867                 device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
868             ProcessType process_type =
869                 treat_as_reprocess ? REPROCESS
870                                    : (device_settings->second.edge_mode ==
871                                       ANDROID_EDGE_MODE_HIGH_QUALITY)
872                                          ? HIGH_QUALITY
873                                          : REGULAR;
874             auto ret = ProcessYUV420(
875                 yuv_input, yuv_output, device_settings->second.gain,
876                 process_type, device_settings->second.zoom_ratio,
877                 rotate, device_chars->second);
878             if (ret != 0) {
879               (*b)->stream_buffer.status = BufferStatus::kError;
880               break;
881             }
882 
883             auto jpeg_job = std::make_unique<JpegYUV420Job>();
884             jpeg_job->exif_utils = std::unique_ptr<ExifUtils>(
885                 ExifUtils::Create(device_chars->second));
886             jpeg_job->input = std::move(jpeg_input);
887             // If jpeg compression is successful, then the jpeg compressor
888             // must set the corresponding status.
889             (*b)->stream_buffer.status = BufferStatus::kError;
890             std::swap(jpeg_job->output, *b);
891             jpeg_job->result_metadata =
892                 HalCameraMetadata::Clone(next_result->result_metadata.get());
893 
894             Mutex::Autolock lock(control_mutex_);
895             jpeg_compressor_->QueueYUV420(std::move(jpeg_job));
896           } else {
897             ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
898                   (*b)->format, (*b)->dataSpace);
899             (*b)->stream_buffer.status = BufferStatus::kError;
900           }
901           break;
902         case PixelFormat::YCRCB_420_SP:
903         case PixelFormat::YCBCR_420_888: {
904           YUV420Frame yuv_input{
905               .width =
906                   treat_as_reprocess ? (*next_input_buffer->begin())->width : 0,
907               .height =
908                   treat_as_reprocess ? (*next_input_buffer->begin())->height : 0,
909               .planes = treat_as_reprocess
910                             ? (*next_input_buffer->begin())->plane.img_y_crcb
911                             : YCbCrPlanes{}};
912           YUV420Frame yuv_output{.width = (*b)->width,
913                                  .height = (*b)->height,
914                                  .planes = (*b)->plane.img_y_crcb};
915           bool rotate =
916               device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
917           ProcessType process_type = treat_as_reprocess
918                                          ? REPROCESS
919                                          : (device_settings->second.edge_mode ==
920                                             ANDROID_EDGE_MODE_HIGH_QUALITY)
921                                                ? HIGH_QUALITY
922                                                : REGULAR;
923           auto ret = ProcessYUV420(
924               yuv_input, yuv_output, device_settings->second.gain,
925               process_type, device_settings->second.zoom_ratio,
926               rotate, device_chars->second);
927           if (ret != 0) {
928             (*b)->stream_buffer.status = BufferStatus::kError;
929           }
930         } break;
931         case PixelFormat::Y16:
932           if (!reprocess_request) {
933             if ((*b)->dataSpace == HAL_DATASPACE_DEPTH) {
934               CaptureDepth((*b)->plane.img.img, device_settings->second.gain,
935                            (*b)->width, (*b)->height,
936                            (*b)->plane.img.stride_in_bytes,
937                            device_chars->second);
938             } else {
939               ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
940                     (*b)->format, (*b)->dataSpace);
941               (*b)->stream_buffer.status = BufferStatus::kError;
942             }
943           } else {
944             ALOGE("%s: Reprocess requests with output format %x no supported!",
945                   __FUNCTION__, (*b)->format);
946             (*b)->stream_buffer.status = BufferStatus::kError;
947           }
948           break;
949         case PixelFormat::YCBCR_P010:
950             if (!reprocess_request) {
951               bool rotate = device_settings->second.rotate_and_crop ==
952                             ANDROID_SCALER_ROTATE_AND_CROP_90;
953               CaptureYUV420((*b)->plane.img_y_crcb, (*b)->width, (*b)->height,
954                             device_settings->second.gain,
955                             device_settings->second.zoom_ratio, rotate,
956                             device_chars->second);
957             } else {
958               ALOGE(
959                   "%s: Reprocess requests with output format %x no supported!",
960                   __FUNCTION__, (*b)->format);
961               (*b)->stream_buffer.status = BufferStatus::kError;
962             }
963             break;
964         default:
965           ALOGE("%s: Unknown format %x, no output", __FUNCTION__, (*b)->format);
966           (*b)->stream_buffer.status = BufferStatus::kError;
967           break;
968       }
969 
970       b = next_buffers->erase(b);
971     }
972   }
973 
974   if (reprocess_request) {
975     auto input_buffer = next_input_buffer->begin();
976     while (input_buffer != next_input_buffer->end()) {
977       (*input_buffer++)->stream_buffer.status = BufferStatus::kOk;
978     }
979     next_input_buffer->clear();
980   }
981 
982   nsecs_t work_done_real_time = systemTime();
983   // Returning the results at this point is not entirely correct from timing
984   // perspective. Under ideal conditions where 'ReturnResults' completes
985   // in less than 'time_accuracy' we need to return the results after the
986   // frame cycle expires. However under real conditions various system
987   // components like SurfaceFlinger, Encoder, LMK etc. could be consuming most
988   // of the resources and the duration of "ReturnResults" can get comparable to
989   // 'kDefaultFrameDuration'. This will skew the frame cycle and can result in
990   // potential frame drops. To avoid this scenario when we are running under
991   // tight deadlines (less than 'kReturnResultThreshod') try to return the
992   // results immediately. In all other cases with more relaxed deadlines
993   // the occasional bump during 'ReturnResults' should not have any
994   // noticeable effect.
995   if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
996     ReturnResults(callback, std::move(settings), std::move(next_result),
997                   reprocess_request);
998   }
999 
1000   work_done_real_time = systemTime();
1001   ALOGVV("Sensor vertical blanking interval");
1002   const nsecs_t time_accuracy = 2e6;  // 2 ms of imprecision is ok
1003   if (work_done_real_time < frame_end_real_time - time_accuracy) {
1004     timespec t;
1005     t.tv_sec = (frame_end_real_time - work_done_real_time) / 1000000000L;
1006     t.tv_nsec = (frame_end_real_time - work_done_real_time) % 1000000000L;
1007 
1008     int ret;
1009     do {
1010       ret = nanosleep(&t, &t);
1011     } while (ret != 0);
1012   }
1013   nsecs_t end_real_time __unused = systemTime();
1014   ALOGVV("Frame cycle took %" PRIu64 "  ms, target %" PRIu64 " ms",
1015          ns2ms(end_real_time - start_real_time), ns2ms(frame_duration));
1016 
1017   ReturnResults(callback, std::move(settings), std::move(next_result),
1018                 reprocess_request);
1019 
1020   return true;
1021 };
1022 
ReturnResults(HwlPipelineCallback callback,std::unique_ptr<LogicalCameraSettings> settings,std::unique_ptr<HwlPipelineResult> result,bool reprocess_request)1023 void EmulatedSensor::ReturnResults(
1024     HwlPipelineCallback callback,
1025     std::unique_ptr<LogicalCameraSettings> settings,
1026     std::unique_ptr<HwlPipelineResult> result, bool reprocess_request) {
1027   if ((callback.process_pipeline_result != nullptr) &&
1028       (result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
1029     auto logical_settings = settings->find(logical_camera_id_);
1030     if (logical_settings == settings->end()) {
1031       ALOGE("%s: Logical camera id: %u not found in settings!", __FUNCTION__,
1032             logical_camera_id_);
1033       return;
1034     }
1035     auto device_chars = chars_->find(logical_camera_id_);
1036     if (device_chars == chars_->end()) {
1037       ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1038             logical_camera_id_);
1039       return;
1040     }
1041     result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
1042                                  1);
1043     uint8_t raw_binned_factor_used = false;
1044     if (sensor_binning_factor_info_.find(logical_camera_id_) !=
1045         sensor_binning_factor_info_.end()) {
1046       auto& info = sensor_binning_factor_info_[logical_camera_id_];
1047       // Logical stream was included in the request
1048       if (!reprocess_request && info.quad_bayer_sensor && info.max_res_request &&
1049           info.has_raw_stream && !info.has_non_raw_stream) {
1050         raw_binned_factor_used = true;
1051       }
1052       result->result_metadata->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1053                                    &raw_binned_factor_used, 1);
1054     }
1055     if (logical_settings->second.lens_shading_map_mode ==
1056         ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
1057       if ((device_chars->second.lens_shading_map_size[0] > 0) &&
1058           (device_chars->second.lens_shading_map_size[1] > 0)) {
1059         // Perfect lens, no actual shading needed.
1060         std::vector<float> lens_shading_map(
1061             device_chars->second.lens_shading_map_size[0] *
1062                 device_chars->second.lens_shading_map_size[1] * 4,
1063             1.f);
1064 
1065         result->result_metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP,
1066                                      lens_shading_map.data(),
1067                                      lens_shading_map.size());
1068       }
1069     }
1070     if (logical_settings->second.report_video_stab) {
1071       result->result_metadata->Set(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
1072                                    &logical_settings->second.video_stab, 1);
1073     }
1074     if (logical_settings->second.report_edge_mode) {
1075       result->result_metadata->Set(ANDROID_EDGE_MODE,
1076                                    &logical_settings->second.edge_mode, 1);
1077     }
1078     if (logical_settings->second.report_neutral_color_point) {
1079       result->result_metadata->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
1080                                    kNeutralColorPoint,
1081                                    ARRAY_SIZE(kNeutralColorPoint));
1082     }
1083     if (logical_settings->second.report_green_split) {
1084       result->result_metadata->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1085     }
1086     if (logical_settings->second.report_noise_profile) {
1087       CalculateAndAppendNoiseProfile(
1088           logical_settings->second.gain,
1089           GetBaseGainFactor(device_chars->second.max_raw_value),
1090           result->result_metadata.get());
1091     }
1092     if (logical_settings->second.report_rotate_and_crop) {
1093       result->result_metadata->Set(ANDROID_SCALER_ROTATE_AND_CROP,
1094           &logical_settings->second.rotate_and_crop, 1);
1095     }
1096 
1097     if (!result->physical_camera_results.empty()) {
1098       for (auto& it : result->physical_camera_results) {
1099         auto physical_settings = settings->find(it.first);
1100         if (physical_settings == settings->end()) {
1101           ALOGE("%s: Physical settings for camera id: %u are absent!",
1102                 __FUNCTION__, it.first);
1103           continue;
1104         }
1105         uint8_t raw_binned_factor_used = false;
1106         if (sensor_binning_factor_info_.find(it.first) !=
1107             sensor_binning_factor_info_.end()) {
1108           auto& info = sensor_binning_factor_info_[it.first];
1109           // physical stream was included in the request
1110           if (!reprocess_request && info.quad_bayer_sensor &&
1111               info.max_res_request && info.has_raw_stream &&
1112               !info.has_non_raw_stream) {
1113             raw_binned_factor_used = true;
1114           }
1115           it.second->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
1116                          &raw_binned_factor_used, 1);
1117         }
1118         // Sensor timestamp for all physical devices must be the same.
1119         it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
1120         if (physical_settings->second.report_neutral_color_point) {
1121           it.second->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, kNeutralColorPoint,
1122                          ARRAY_SIZE(kNeutralColorPoint));
1123         }
1124         if (physical_settings->second.report_green_split) {
1125           it.second->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
1126         }
1127         if (physical_settings->second.report_noise_profile) {
1128           auto device_chars = chars_->find(it.first);
1129           if (device_chars == chars_->end()) {
1130             ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
1131                   it.first);
1132           }
1133           CalculateAndAppendNoiseProfile(
1134               physical_settings->second.gain,
1135               GetBaseGainFactor(device_chars->second.max_raw_value),
1136               it.second.get());
1137         }
1138       }
1139     }
1140 
1141     callback.process_pipeline_result(std::move(result));
1142   }
1143 }
1144 
CalculateAndAppendNoiseProfile(float gain,float base_gain_factor,HalCameraMetadata * result)1145 void EmulatedSensor::CalculateAndAppendNoiseProfile(
1146     float gain /*in ISO*/, float base_gain_factor,
1147     HalCameraMetadata* result /*out*/) {
1148   if (result != nullptr) {
1149     float total_gain = gain / 100.0 * base_gain_factor;
1150     float noise_var_gain = total_gain * total_gain;
1151     float read_noise_var =
1152         kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1153     // Noise profile is the same across all 4 CFA channels
1154     double noise_profile[2 * 4] = {
1155         noise_var_gain, read_noise_var, noise_var_gain, read_noise_var,
1156         noise_var_gain, read_noise_var, noise_var_gain, read_noise_var};
1157     result->Set(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
1158                 ARRAY_SIZE(noise_profile));
1159   }
1160 }
1161 
GetQuadBayerColor(uint32_t x,uint32_t y)1162 EmulatedScene::ColorChannels EmulatedSensor::GetQuadBayerColor(uint32_t x,
1163                                                                uint32_t y) {
1164   // Row within larger set of quad bayer filter
1165   uint32_t row_mod = y % 4;
1166   // Column within larger set of quad bayer filter
1167   uint32_t col_mod = x % 4;
1168 
1169   // Row is within the left quadrants of a quad bayer sensor
1170   if (row_mod < 2) {
1171     if (col_mod < 2) {
1172       return EmulatedScene::ColorChannels::R;
1173     }
1174     return EmulatedScene::ColorChannels::Gr;
1175   } else {
1176     if (col_mod < 2) {
1177       return EmulatedScene::ColorChannels::Gb;
1178     }
1179     return EmulatedScene::ColorChannels::B;
1180   }
1181 }
1182 
RemosaicQuadBayerBlock(uint16_t * img_in,uint16_t * img_out,int xstart,int ystart,int row_stride_in_bytes)1183 void EmulatedSensor::RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
1184                                             int xstart, int ystart,
1185                                             int row_stride_in_bytes) {
1186   uint32_t quad_block_copy_idx_map[16] = {0, 2, 1, 3, 8,  10, 6,  11,
1187                                           4, 9, 5, 7, 12, 14, 13, 15};
1188   uint16_t quad_block_copy[16];
1189   uint32_t i = 0;
1190   for (uint32_t row = 0; row < 4; row++) {
1191     uint16_t* quad_bayer_row =
1192         img_in + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1193     for (uint32_t j = 0; j < 4; j++, i++) {
1194       quad_block_copy[i] = quad_bayer_row[j];
1195     }
1196   }
1197 
1198   for (uint32_t row = 0; row < 4; row++) {
1199     uint16_t* regular_bayer_row =
1200         img_out + (ystart + row) * (row_stride_in_bytes / 2) + xstart;
1201     for (uint32_t j = 0; j < 4; j++, i++) {
1202       uint32_t idx = quad_block_copy_idx_map[row + 4 * j];
1203       regular_bayer_row[j] = quad_block_copy[idx];
1204     }
1205   }
1206 }
1207 
RemosaicRAW16Image(uint16_t * img_in,uint16_t * img_out,size_t row_stride_in_bytes,const SensorCharacteristics & chars)1208 status_t EmulatedSensor::RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
1209                                             size_t row_stride_in_bytes,
1210                                             const SensorCharacteristics& chars) {
1211   if (chars.full_res_width % 2 != 0 || chars.full_res_height % 2 != 0) {
1212     ALOGE(
1213         "%s RAW16 Image with quad CFA, height %zu and width %zu, not multiples "
1214         "of 4",
1215         __FUNCTION__, chars.full_res_height, chars.full_res_width);
1216     return BAD_VALUE;
1217   }
1218   for (uint32_t i = 0; i < chars.full_res_width; i += 4) {
1219     for (uint32_t j = 0; j < chars.full_res_height; j += 4) {
1220       RemosaicQuadBayerBlock(img_in, img_out, i, j, row_stride_in_bytes);
1221     }
1222   }
1223   return OK;
1224 }
1225 
CaptureRawBinned(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1226 void EmulatedSensor::CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes,
1227                                       uint32_t gain,
1228                                       const SensorCharacteristics& chars) {
1229   ATRACE_CALL();
1230   // inc = how many pixels to skip while reading every next pixel
1231   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1232   float noise_var_gain = total_gain * total_gain;
1233   float read_noise_var =
1234       kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1235   int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
1236                          EmulatedScene::B};
1237   scene_->SetReadoutPixel(0, 0);
1238   for (unsigned int out_y = 0; out_y < chars.height; out_y++) {
1239     // Stride still stays width since the buffer is binned size.
1240     int* bayer_row = bayer_select + (out_y & 0x1) * 2;
1241     uint16_t* px = (uint16_t*)img + out_y * (row_stride_in_bytes / 2);
1242     for (unsigned int out_x = 0; out_x < chars.width; out_x++) {
1243       int color_idx = bayer_row[out_x & 0x1];
1244       uint16_t raw_count = 0;
1245       // Color  filter will be the same for each quad.
1246       uint32_t electron_count = 0;
1247       int x, y;
1248       float norm_x = (float)out_x / chars.width;
1249       float norm_y = (float)out_y / chars.height;
1250       x = static_cast<int>(chars.full_res_width * norm_x);
1251       y = static_cast<int>(chars.full_res_height * norm_y);
1252 
1253       x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1254       y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1255 
1256       scene_->SetReadoutPixel(x, y);
1257 
1258       const uint32_t* pixel = scene_->GetPixelElectrons();
1259       electron_count = pixel[color_idx];
1260       // TODO: Better pixel saturation curve?
1261       electron_count = (electron_count < kSaturationElectrons)
1262                            ? electron_count
1263                            : kSaturationElectrons;
1264 
1265       // TODO: Better A/D saturation curve?
1266       raw_count = electron_count * total_gain;
1267       raw_count =
1268           (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
1269 
1270       // Calculate noise value
1271       // TODO: Use more-correct Gaussian instead of uniform noise
1272       float photon_noise_var = electron_count * noise_var_gain;
1273       float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
1274       // Scaled to roughly match gaussian/uniform noise stddev
1275       float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1276 
1277       raw_count += chars.black_level_pattern[color_idx];
1278       raw_count += noise_stddev * noise_sample;
1279       *px++ = raw_count;
1280     }
1281   }
1282   ALOGVV("Binned RAW sensor image captured");
1283 }
1284 
CaptureRawFullRes(uint8_t * img,size_t row_stride_in_bytes,uint32_t gain,const SensorCharacteristics & chars)1285 void EmulatedSensor::CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
1286                                        uint32_t gain,
1287                                        const SensorCharacteristics& chars) {
1288   ATRACE_CALL();
1289   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1290   float noise_var_gain = total_gain * total_gain;
1291   float read_noise_var =
1292       kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
1293 
1294   scene_->SetReadoutPixel(0, 0);
1295   // RGGB
1296   int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
1297                          EmulatedScene::B};
1298 
1299   for (unsigned int y = 0; y < chars.full_res_height; y++) {
1300     int* bayer_row = bayer_select + (y & 0x1) * 2;
1301     uint16_t* px = (uint16_t*)img + y * (row_stride_in_bytes / 2);
1302     for (unsigned int x = 0; x < chars.full_res_width; x++) {
1303       int color_idx = chars.quad_bayer_sensor ? GetQuadBayerColor(x, y)
1304                                               : bayer_row[x & 0x1];
1305       uint32_t electron_count;
1306       electron_count = scene_->GetPixelElectrons()[color_idx];
1307 
1308       // TODO: Better pixel saturation curve?
1309       electron_count = (electron_count < kSaturationElectrons)
1310                            ? electron_count
1311                            : kSaturationElectrons;
1312 
1313       // TODO: Better A/D saturation curve?
1314       uint16_t raw_count = electron_count * total_gain;
1315       raw_count =
1316           (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
1317 
1318       // Calculate noise value
1319       // TODO: Use more-correct Gaussian instead of uniform noise
1320       float photon_noise_var = electron_count * noise_var_gain;
1321       float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
1322       // Scaled to roughly match gaussian/uniform noise stddev
1323       float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1324 
1325       raw_count += chars.black_level_pattern[color_idx];
1326       raw_count += noise_stddev * noise_sample;
1327 
1328       *px++ = raw_count;
1329     }
1330     // TODO: Handle this better
1331     // simulatedTime += mRowReadoutTime;
1332   }
1333   ALOGVV("Raw sensor image captured");
1334 }
1335 
CaptureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,RGBLayout layout,uint32_t gain,const SensorCharacteristics & chars)1336 void EmulatedSensor::CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
1337                                 uint32_t stride, RGBLayout layout, uint32_t gain,
1338                                 const SensorCharacteristics& chars) {
1339   ATRACE_CALL();
1340   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1341   // In fixed-point math, calculate total scaling from electrons to 8bpp
1342   int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
1343   uint32_t inc_h = ceil((float)chars.full_res_width / width);
1344   uint32_t inc_v = ceil((float)chars.full_res_height / height);
1345 
1346   for (unsigned int y = 0, outy = 0; y < chars.full_res_height;
1347        y += inc_v, outy++) {
1348     scene_->SetReadoutPixel(0, y);
1349     uint8_t* px = img + outy * stride;
1350     for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1351       uint32_t r_count, g_count, b_count;
1352       // TODO: Perfect demosaicing is a cheat
1353       const uint32_t* pixel = scene_->GetPixelElectrons();
1354       r_count = pixel[EmulatedScene::R] * scale64x;
1355       g_count = pixel[EmulatedScene::Gr] * scale64x;
1356       b_count = pixel[EmulatedScene::B] * scale64x;
1357 
1358       uint8_t r = r_count < 255 * 64 ? r_count / 64 : 255;
1359       uint8_t g = g_count < 255 * 64 ? g_count / 64 : 255;
1360       uint8_t b = b_count < 255 * 64 ? b_count / 64 : 255;
1361       switch (layout) {
1362         case RGB:
1363           *px++ = r;
1364           *px++ = g;
1365           *px++ = b;
1366           break;
1367         case RGBA:
1368           *px++ = r;
1369           *px++ = g;
1370           *px++ = b;
1371           *px++ = 255;
1372           break;
1373         case ARGB:
1374           *px++ = 255;
1375           *px++ = r;
1376           *px++ = g;
1377           *px++ = b;
1378           break;
1379         default:
1380           ALOGE("%s: RGB layout: %d not supported", __FUNCTION__, layout);
1381           return;
1382       }
1383       for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1384     }
1385   }
1386   ALOGVV("RGB sensor image captured");
1387 }
1388 
CaptureYUV420(YCbCrPlanes yuv_layout,uint32_t width,uint32_t height,uint32_t gain,float zoom_ratio,bool rotate,const SensorCharacteristics & chars)1389 void EmulatedSensor::CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width,
1390                                    uint32_t height, uint32_t gain,
1391                                    float zoom_ratio, bool rotate,
1392                                    const SensorCharacteristics& chars) {
1393   ATRACE_CALL();
1394   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1395   // Using fixed-point math with 6 bits of fractional precision.
1396   // In fixed-point math, calculate total scaling from electrons to 8bpp
1397   const int scale64x =
1398       kFixedBitPrecision * total_gain * 255 / chars.max_raw_value;
1399   // Fixed-point coefficients for RGB-YUV transform
1400   // Based on JFIF RGB->YUV transform.
1401   // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1402   const int rgb_to_y[] = {19, 37, 7};
1403   const int rgb_to_cb[] = {-10, -21, 32, 524288};
1404   const int rgb_to_cr[] = {32, -26, -5, 524288};
1405   // Scale back to 8bpp non-fixed-point
1406   const int scale_out = 64;
1407   const int scale_out_sq = scale_out * scale_out;  // after multiplies
1408 
1409   // inc = how many pixels to skip while reading every next pixel
1410   const float aspect_ratio = static_cast<float>(width) / height;
1411 
1412   // precalculate normalized coordinates and dimensions
1413   const float norm_left_top = 0.5f - 0.5f / zoom_ratio;
1414   const float norm_rot_top = norm_left_top;
1415   const float norm_width = 1 / zoom_ratio;
1416   const float norm_rot_width = norm_width / aspect_ratio;
1417   const float norm_rot_height = norm_width;
1418   const float norm_rot_left =
1419       norm_left_top + (norm_width + norm_rot_width) * 0.5f;
1420 
1421   for (unsigned int out_y = 0; out_y < height; out_y++) {
1422     uint8_t* px_y = yuv_layout.img_y + out_y * yuv_layout.y_stride;
1423     uint8_t* px_cb = yuv_layout.img_cb + (out_y / 2) * yuv_layout.cbcr_stride;
1424     uint8_t* px_cr = yuv_layout.img_cr + (out_y / 2) * yuv_layout.cbcr_stride;
1425 
1426     for (unsigned int out_x = 0; out_x < width; out_x++) {
1427       int x, y;
1428       float norm_x = out_x / (width * zoom_ratio);
1429       float norm_y = out_y / (height * zoom_ratio);
1430       if (rotate) {
1431         x = static_cast<int>(chars.full_res_width *
1432                              (norm_rot_left - norm_y * norm_rot_width));
1433         y = static_cast<int>(chars.full_res_height *
1434                              (norm_rot_top + norm_x * norm_rot_height));
1435       } else {
1436         x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
1437         y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
1438       }
1439       x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
1440       y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
1441       scene_->SetReadoutPixel(x, y);
1442 
1443       int32_t r_count, g_count, b_count;
1444       // TODO: Perfect demosaicing is a cheat
1445       const uint32_t* pixel = rotate ? scene_->GetPixelElectronsColumn()
1446                                      : scene_->GetPixelElectrons();
1447       r_count = pixel[EmulatedScene::R] * scale64x;
1448       r_count = r_count < kSaturationPoint ? r_count : kSaturationPoint;
1449       g_count = pixel[EmulatedScene::Gr] * scale64x;
1450       g_count = g_count < kSaturationPoint ? g_count : kSaturationPoint;
1451       b_count = pixel[EmulatedScene::B] * scale64x;
1452       b_count = b_count < kSaturationPoint ? b_count : kSaturationPoint;
1453 
1454       // Gamma correction
1455       r_count = gamma_table_[r_count];
1456       g_count = gamma_table_[g_count];
1457       b_count = gamma_table_[b_count];
1458 
1459       uint8_t y8 = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
1460                     rgb_to_y[2] * b_count) /
1461                    scale_out_sq;
1462       if (yuv_layout.bytesPerPixel == 1) {
1463         *px_y = y8;
1464       } else if (yuv_layout.bytesPerPixel == 2) {
1465         *(reinterpret_cast<uint16_t*>(px_y)) = htole16(y8 << 8);
1466       } else {
1467         ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1468               yuv_layout.bytesPerPixel);
1469         return;
1470       }
1471       px_y += yuv_layout.bytesPerPixel;
1472 
1473       if (out_y % 2 == 0 && out_x % 2 == 0) {
1474         uint8_t cb8 = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
1475                        rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
1476                       scale_out_sq;
1477         uint8_t cr8 = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
1478                        rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
1479                       scale_out_sq;
1480         if (yuv_layout.bytesPerPixel == 1) {
1481           *px_cb = cb8;
1482           *px_cr = cr8;
1483         } else if (yuv_layout.bytesPerPixel == 2) {
1484           *(reinterpret_cast<uint16_t*>(px_cb)) = htole16(cb8 << 8);
1485           *(reinterpret_cast<uint16_t*>(px_cr)) = htole16(cr8 << 8);
1486         } else {
1487           ALOGE("%s: Unsupported bytes per pixel value: %zu", __func__,
1488                 yuv_layout.bytesPerPixel);
1489           return;
1490         }
1491         px_cr += yuv_layout.cbcr_step;
1492         px_cb += yuv_layout.cbcr_step;
1493       }
1494     }
1495   }
1496   ALOGVV("YUV420 sensor image captured");
1497 }
1498 
CaptureDepth(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,uint32_t stride,const SensorCharacteristics & chars)1499 void EmulatedSensor::CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width,
1500                                   uint32_t height, uint32_t stride,
1501                                   const SensorCharacteristics& chars) {
1502   ATRACE_CALL();
1503   float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1504   // In fixed-point math, calculate scaling factor to 13bpp millimeters
1505   int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
1506   uint32_t inc_h = ceil((float)chars.full_res_width / width);
1507   uint32_t inc_v = ceil((float)chars.full_res_height / height);
1508 
1509   for (unsigned int y = 0, out_y = 0; y < chars.full_res_height;
1510        y += inc_v, out_y++) {
1511     scene_->SetReadoutPixel(0, y);
1512     uint16_t* px = (uint16_t*)(img + (out_y * stride));
1513     for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
1514       uint32_t depth_count;
1515       // TODO: Make up real depth scene instead of using green channel
1516       // as depth
1517       const uint32_t* pixel = scene_->GetPixelElectrons();
1518       depth_count = pixel[EmulatedScene::Gr] * scale64x;
1519 
1520       *px++ = depth_count < 8191 * 64 ? depth_count / 64 : 0;
1521       for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1522     }
1523     // TODO: Handle this better
1524     // simulatedTime += mRowReadoutTime;
1525   }
1526   ALOGVV("Depth sensor image captured");
1527 }
1528 
ProcessYUV420(const YUV420Frame & input,const YUV420Frame & output,uint32_t gain,ProcessType process_type,float zoom_ratio,bool rotate_and_crop,const SensorCharacteristics & chars)1529 status_t EmulatedSensor::ProcessYUV420(const YUV420Frame& input,
1530                                        const YUV420Frame& output, uint32_t gain,
1531                                        ProcessType process_type, float zoom_ratio,
1532                                        bool rotate_and_crop,
1533                                        const SensorCharacteristics& chars) {
1534   ATRACE_CALL();
1535   size_t input_width, input_height;
1536   YCbCrPlanes input_planes, output_planes;
1537   std::vector<uint8_t> temp_yuv, temp_output_uv, temp_input_uv;
1538 
1539   // Overwrite HIGH_QUALITY to REGULAR for Emulator if property
1540   // ro.boot.qemu.camera_hq_edge_processing is false;
1541   if (process_type == HIGH_QUALITY &&
1542       !property_get_bool("ro.boot.qemu.camera_hq_edge_processing", true)) {
1543     process_type = REGULAR;
1544   }
1545 
1546   switch (process_type) {
1547     case HIGH_QUALITY:
1548       CaptureYUV420(output.planes, output.width, output.height, gain, zoom_ratio,
1549                     rotate_and_crop, chars);
1550       return OK;
1551     case REPROCESS:
1552       input_width = input.width;
1553       input_height = input.height;
1554       input_planes = input.planes;
1555 
1556       // libyuv only supports planar YUV420 during scaling.
1557       // Split the input U/V plane in separate planes if needed.
1558       if (input_planes.cbcr_step == 2) {
1559         temp_input_uv.resize(input_width * input_height / 2);
1560         auto temp_uv_buffer = temp_input_uv.data();
1561         input_planes.img_cb = temp_uv_buffer;
1562         input_planes.img_cr = temp_uv_buffer + (input_width * input_height) / 4;
1563         input_planes.cbcr_stride = input_width / 2;
1564         if (input.planes.img_cb < input.planes.img_cr) {
1565           libyuv::SplitUVPlane(input.planes.img_cb, input.planes.cbcr_stride,
1566                                input_planes.img_cb, input_planes.cbcr_stride,
1567                                input_planes.img_cr, input_planes.cbcr_stride,
1568                                input_width / 2, input_height / 2);
1569         } else {
1570           libyuv::SplitUVPlane(input.planes.img_cr, input.planes.cbcr_stride,
1571                                input_planes.img_cr, input_planes.cbcr_stride,
1572                                input_planes.img_cb, input_planes.cbcr_stride,
1573                                input_width / 2, input_height / 2);
1574         }
1575       }
1576       break;
1577     case REGULAR:
1578     default:
1579       // Generate the smallest possible frame with the expected AR and
1580       // then scale using libyuv.
1581       float aspect_ratio = static_cast<float>(output.width) / output.height;
1582       zoom_ratio = std::max(1.f, zoom_ratio);
1583       input_width = EmulatedScene::kSceneWidth * aspect_ratio;
1584       input_height = EmulatedScene::kSceneHeight;
1585       temp_yuv.reserve((input_width * input_height * 3) / 2);
1586       auto temp_yuv_buffer = temp_yuv.data();
1587       input_planes = {
1588           .img_y = temp_yuv_buffer,
1589           .img_cb = temp_yuv_buffer + input_width * input_height,
1590           .img_cr = temp_yuv_buffer + (input_width * input_height * 5) / 4,
1591           .y_stride = static_cast<uint32_t>(input_width),
1592           .cbcr_stride = static_cast<uint32_t>(input_width) / 2,
1593           .cbcr_step = 1};
1594       CaptureYUV420(input_planes, input_width, input_height, gain, zoom_ratio,
1595                     rotate_and_crop, chars);
1596   }
1597 
1598   output_planes = output.planes;
1599   // libyuv only supports planar YUV420 during scaling.
1600   // Treat the output UV space as planar first and then
1601   // interleave in the second step.
1602   if (output_planes.cbcr_step == 2) {
1603     temp_output_uv.resize(output.width * output.height / 2);
1604     auto temp_uv_buffer = temp_output_uv.data();
1605     output_planes.img_cb = temp_uv_buffer;
1606     output_planes.img_cr = temp_uv_buffer + output.width * output.height / 4;
1607     output_planes.cbcr_stride = output.width / 2;
1608   }
1609 
1610   auto ret = I420Scale(
1611       input_planes.img_y, input_planes.y_stride, input_planes.img_cb,
1612       input_planes.cbcr_stride, input_planes.img_cr, input_planes.cbcr_stride,
1613       input_width, input_height, output_planes.img_y, output_planes.y_stride,
1614       output_planes.img_cb, output_planes.cbcr_stride, output_planes.img_cr,
1615       output_planes.cbcr_stride, output.width, output.height,
1616       libyuv::kFilterNone);
1617   if (ret != 0) {
1618     ALOGE("%s: Failed during YUV scaling: %d", __FUNCTION__, ret);
1619     return ret;
1620   }
1621 
1622   // Merge U/V Planes for the interleaved case
1623   if (output_planes.cbcr_step == 2) {
1624     if (output.planes.img_cb < output.planes.img_cr) {
1625       libyuv::MergeUVPlane(output_planes.img_cb, output_planes.cbcr_stride,
1626                            output_planes.img_cr, output_planes.cbcr_stride,
1627                            output.planes.img_cb, output.planes.cbcr_stride,
1628                            output.width / 2, output.height / 2);
1629     } else {
1630       libyuv::MergeUVPlane(output_planes.img_cr, output_planes.cbcr_stride,
1631                            output_planes.img_cb, output_planes.cbcr_stride,
1632                            output.planes.img_cr, output.planes.cbcr_stride,
1633                            output.width / 2, output.height / 2);
1634     }
1635   }
1636 
1637   return ret;
1638 }
1639 
ApplysRGBGamma(int32_t value,int32_t saturation)1640 int32_t EmulatedSensor::ApplysRGBGamma(int32_t value, int32_t saturation) {
1641   float n_value = (static_cast<float>(value) / saturation);
1642   n_value = (n_value <= 0.0031308f)
1643                 ? n_value * 12.92f
1644                 : 1.055f * pow(n_value, 0.4166667f) - 0.055f;
1645   return n_value * saturation;
1646 }
1647 
1648 }  // namespace android
1649