1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /**
18  * This class is a simple simulation of a typical CMOS cellphone imager chip,
19  * which outputs 12-bit Bayer-mosaic raw images.
20  *
21  * Unlike most real image sensors, this one's native color space is linear sRGB.
22  *
23  * The sensor is abstracted as operating as a pipeline 3 stages deep;
24  * conceptually, each frame to be captured goes through these three stages. The
25  * processing step for the sensor is marked off by vertical sync signals, which
26  * indicate the start of readout of the oldest frame. The interval between
27  * processing steps depends on the frame duration of the frame currently being
28  * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29  * configuration, the sensor's registers for settings such as exposure time,
30  * frame duration, and gain are set for the next frame to be captured. In stage
31  * 2, the image data for the frame is actually captured by the sensor. Finally,
32  * in stage 3, the just-captured data is read out and sent to the rest of the
33  * system.
34  *
35  * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36  * sensor are exposed earlier in time than larger-numbered rows, with the time
37  * offset between each row being equal to the row readout time.
38  *
39  * The characteristics of this sensor don't correspond to any actual sensor,
40  * but are not far off typical sensors.
41  *
42  * Example timing diagram, with three frames:
43  *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44  *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
45  * Legend:
46  *   C = update sensor registers for frame
47  *   v = row in reset (vertical blanking interval)
48  *   E = row capturing image data
49  *   R = row being read out
50  *   | = vertical sync signal
51  *time(ms)|   0          55        105       155            230     270
52  * Frame 0|   :configure : capture : readout :              :       :
53  *  Row # | ..|CCCC______|_________|_________|              :       :
54  *      0 |   :\          \vvvvvEEEER         \             :       :
55  *    500 |   : \          \vvvvvEEEER         \            :       :
56  *   1000 |   :  \          \vvvvvEEEER         \           :       :
57  *   1500 |   :   \          \vvvvvEEEER         \          :       :
58  *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
59  * Frame 1|   :           configure  capture      readout   :       :
60  *  Row # |   :          |CCCC_____|_________|______________|       :
61  *      0 |   :          :\         \vvvvvEEEER              \      :
62  *    500 |   :          : \         \vvvvvEEEER              \     :
63  *   1000 |   :          :  \         \vvvvvEEEER              \    :
64  *   1500 |   :          :   \         \vvvvvEEEER              \   :
65  *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
66  * Frame 2|   :          :          configure     capture    readout:
67  *  Row # |   :          :         |CCCC_____|______________|_______|...
68  *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
69  *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
70  *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
71  *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
72  *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
73  */
74 
75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76 #define HW_EMULATOR_CAMERA2_SENSOR_H
77 
78 #include <hwl_types.h>
79 
80 #include <functional>
81 
82 #include "Base.h"
83 #include "EmulatedScene.h"
84 #include "HandleImporter.h"
85 #include "JpegCompressor.h"
86 #include "utils/Mutex.h"
87 #include "utils/StreamConfigurationMap.h"
88 #include "utils/Thread.h"
89 #include "utils/Timers.h"
90 
91 namespace android {
92 
93 using android::hardware::camera::common::V1_0::helper::HandleImporter;
94 using google_camera_hal::HwlPipelineCallback;
95 using google_camera_hal::HwlPipelineResult;
96 using google_camera_hal::StreamConfiguration;
97 
98 /*
99  * Default to sRGB with D65 white point
100  */
101 struct ColorFilterXYZ {
102   float rX = 3.2406f;
103   float rY = -1.5372f;
104   float rZ = -0.4986f;
105   float grX = -0.9689f;
106   float grY = 1.8758f;
107   float grZ = 0.0415f;
108   float gbX = -0.9689f;
109   float gbY = 1.8758f;
110   float gbZ = 0.0415f;
111   float bX = 0.0557f;
112   float bY = -0.2040f;
113   float bZ = 1.0570f;
114 };
115 
116 struct SensorCharacteristics {
117   size_t width = 0;
118   size_t height = 0;
119   size_t full_res_width = 0;
120   size_t full_res_height = 0;
121   nsecs_t exposure_time_range[2] = {0};
122   nsecs_t frame_duration_range[2] = {0};
123   int32_t sensitivity_range[2] = {0};
124   camera_metadata_enum_android_sensor_info_color_filter_arrangement
125       color_arangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
126   ColorFilterXYZ color_filter;
127   uint32_t max_raw_value = 0;
128   uint32_t black_level_pattern[4] = {0};
129   uint32_t max_raw_streams = 0;
130   uint32_t max_processed_streams = 0;
131   uint32_t max_stalling_streams = 0;
132   uint32_t max_input_streams = 0;
133   uint32_t physical_size[2] = {0};
134   bool is_flash_supported = false;
135   uint32_t lens_shading_map_size[2] = {0};
136   uint32_t max_pipeline_depth = 0;
137   uint32_t orientation = 0;
138   bool is_front_facing = false;
139   bool quad_bayer_sensor = false;
140 };
141 
142 // Maps logical/physical camera ids to sensor characteristics
143 typedef std::unordered_map<uint32_t, SensorCharacteristics> LogicalCharacteristics;
144 
145 class EmulatedSensor : private Thread, public virtual RefBase {
146  public:
147   EmulatedSensor();
148   ~EmulatedSensor();
149 
OverrideFormat(android_pixel_format_t format)150   static android_pixel_format_t OverrideFormat(android_pixel_format_t format) {
151     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
152       return HAL_PIXEL_FORMAT_YCBCR_420_888;
153     }
154 
155     return format;
156   }
157 
IsReprocessPathSupported(android_pixel_format_t input_format,android_pixel_format_t output_format)158   static bool IsReprocessPathSupported(android_pixel_format_t input_format,
159                                        android_pixel_format_t output_format) {
160     if ((HAL_PIXEL_FORMAT_YCBCR_420_888 == input_format) &&
161         ((HAL_PIXEL_FORMAT_YCBCR_420_888 == output_format) ||
162          (HAL_PIXEL_FORMAT_BLOB == output_format))) {
163       return true;
164     }
165 
166     if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
167         HAL_PIXEL_FORMAT_RAW16 == output_format) {
168       return true;
169     }
170 
171     return false;
172   }
173 
174   static bool AreCharacteristicsSupported(
175       const SensorCharacteristics& characteristics);
176 
177   static bool IsStreamCombinationSupported(
178       uint32_t logical_id, const StreamConfiguration& config,
179       StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
180       const PhysicalStreamConfigurationMap& physical_map,
181       const PhysicalStreamConfigurationMap& physical_map_max_resolution,
182       const LogicalCharacteristics& sensor_chars);
183 
184   static bool IsStreamCombinationSupported(
185       uint32_t logical_id, const StreamConfiguration& config,
186       StreamConfigurationMap& map,
187       const PhysicalStreamConfigurationMap& physical_map,
188       const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
189 
190   /*
191    * Power control
192    */
193 
194   status_t StartUp(uint32_t logical_camera_id,
195                    std::unique_ptr<LogicalCharacteristics> logical_chars);
196   status_t ShutDown();
197 
198   /*
199    * Physical camera settings control
200    */
201   struct SensorSettings {
202     nsecs_t exposure_time = 0;
203     nsecs_t frame_duration = 0;
204     uint32_t gain = 0;  // ISO
205     uint32_t lens_shading_map_mode;
206     bool report_neutral_color_point = false;
207     bool report_green_split = false;
208     bool report_noise_profile = false;
209     float zoom_ratio = 1.0f;
210     bool report_rotate_and_crop = false;
211     uint8_t rotate_and_crop = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
212     bool report_video_stab = false;
213     uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
214     bool report_edge_mode = false;
215     uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
216     uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
217     uint8_t test_pattern_mode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
218     uint32_t test_pattern_data[4] = {0, 0, 0, 0};
219   };
220 
221   // Maps physical and logical camera ids to individual device settings
222   typedef std::unordered_map<uint32_t, SensorSettings> LogicalCameraSettings;
223 
224   void SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,
225                          std::unique_ptr<HwlPipelineResult> result,
226                          std::unique_ptr<Buffers> input_buffers,
227                          std::unique_ptr<Buffers> output_buffers);
228 
229   status_t Flush();
230 
231   /*
232    * Synchronizing with sensor operation (vertical sync)
233    */
234 
235   // Wait until the sensor outputs its next vertical sync signal, meaning it
236   // is starting readout of its latest frame of data. Returns true if vertical
237   // sync is signaled, false if the wait timed out.
238   bool WaitForVSync(nsecs_t rel_time);
239 
240   static const nsecs_t kSupportedExposureTimeRange[2];
241   static const nsecs_t kSupportedFrameDurationRange[2];
242   static const int32_t kSupportedSensitivityRange[2];
243   static const uint8_t kSupportedColorFilterArrangement;
244   static const uint32_t kDefaultMaxRawValue;
245   static const nsecs_t kDefaultExposureTime;
246   static const int32_t kDefaultSensitivity;
247   static const nsecs_t kDefaultFrameDuration;
248   static const nsecs_t kReturnResultThreshod;
249   static const uint32_t kDefaultBlackLevelPattern[4];
250   static const camera_metadata_rational kDefaultColorTransform[9];
251   static const float kDefaultColorCorrectionGains[4];
252   static const float kDefaultToneMapCurveRed[4];
253   static const float kDefaultToneMapCurveGreen[4];
254   static const float kDefaultToneMapCurveBlue[4];
255   static const uint8_t kPipelineDepth;
256 
257  private:
258   // Scene stabilization
259   static const uint32_t kRegularSceneHandshake;
260   static const uint32_t kReducedSceneHandshake;
261 
262   /**
263    * Logical characteristics
264    */
265   std::unique_ptr<LogicalCharacteristics> chars_;
266 
267   uint32_t logical_camera_id_ = 0;
268 
269   static const nsecs_t kMinVerticalBlank;
270 
271   // Sensor sensitivity, approximate
272 
273   static const float kSaturationVoltage;
274   static const uint32_t kSaturationElectrons;
275   static const float kVoltsPerLuxSecond;
276   static const float kElectronsPerLuxSecond;
277 
278   static const float kReadNoiseStddevBeforeGain;  // In electrons
279   static const float kReadNoiseStddevAfterGain;   // In raw digital units
280   static const float kReadNoiseVarBeforeGain;
281   static const float kReadNoiseVarAfterGain;
282   static const camera_metadata_rational kNeutralColorPoint[3];
283   static const float kGreenSplit;
284 
285   static const uint32_t kMaxRAWStreams;
286   static const uint32_t kMaxProcessedStreams;
287   static const uint32_t kMaxStallingStreams;
288   static const uint32_t kMaxInputStreams;
289   static const uint32_t kMaxLensShadingMapSize[2];
290   static const int32_t kFixedBitPrecision;
291   static const int32_t kSaturationPoint;
292 
293   std::vector<int32_t> gamma_table_;
294 
295   Mutex control_mutex_;  // Lock before accessing control parameters
296   // Start of control parameters
297   Condition vsync_;
298   bool got_vsync_;
299   std::unique_ptr<LogicalCameraSettings> current_settings_;
300   std::unique_ptr<HwlPipelineResult> current_result_;
301   std::unique_ptr<Buffers> current_output_buffers_;
302   std::unique_ptr<Buffers> current_input_buffers_;
303   std::unique_ptr<JpegCompressor> jpeg_compressor_;
304 
305   // End of control parameters
306 
307   unsigned int rand_seed_ = 1;
308 
309   /**
310    * Inherited Thread virtual overrides, and members only used by the
311    * processing thread
312    */
313   bool threadLoop() override;
314 
315   nsecs_t next_capture_time_;
316 
317   struct SensorBinningFactorInfo {
318     bool has_raw_stream = false;
319     bool has_non_raw_stream = false;
320     bool quad_bayer_sensor = false;
321     bool max_res_request = false;
322   };
323 
324   std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
325 
326   sp<EmulatedScene> scene_;
327 
328   static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
329 
330   static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
331                                      int xstart, int ystart,
332                                      int row_stride_in_bytes);
333 
334   static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
335                                      size_t row_stride_in_bytes,
336                                      const SensorCharacteristics& chars);
337 
338   void CaptureRawBinned(uint8_t* img, size_t row_stride_in_bytes, uint32_t gain,
339                         const SensorCharacteristics& chars);
340 
341   void CaptureRawFullRes(uint8_t* img, size_t row_stride_in_bytes,
342                          uint32_t gain, const SensorCharacteristics& chars);
343 
344   enum RGBLayout { RGB, RGBA, ARGB };
345   void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
346                   uint32_t stride, RGBLayout layout, uint32_t gain,
347                   const SensorCharacteristics& chars);
348   void CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width, uint32_t height,
349                      uint32_t gain, float zoom_ratio, bool rotate,
350                      const SensorCharacteristics& chars);
351   void CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width, uint32_t height,
352                     uint32_t stride, const SensorCharacteristics& chars);
353 
354   struct YUV420Frame {
355     uint32_t width = 0;
356     uint32_t height = 0;
357     YCbCrPlanes planes;
358   };
359 
360   enum ProcessType { REPROCESS, HIGH_QUALITY, REGULAR };
361   status_t ProcessYUV420(const YUV420Frame& input, const YUV420Frame& output,
362                          uint32_t gain, ProcessType process_type,
363                          float zoom_ratio, bool rotate_and_crop,
364                          const SensorCharacteristics& chars);
365 
366   inline int32_t ApplysRGBGamma(int32_t value, int32_t saturation);
367 
368   bool WaitForVSyncLocked(nsecs_t reltime);
369   void CalculateAndAppendNoiseProfile(float gain /*in ISO*/,
370                                       float base_gain_factor,
371                                       HalCameraMetadata* result /*out*/);
372 
373   void ReturnResults(HwlPipelineCallback callback,
374                      std::unique_ptr<LogicalCameraSettings> settings,
375                      std::unique_ptr<HwlPipelineResult> result,
376                      bool reprocess_request);
377 
GetBaseGainFactor(float max_raw_value)378   static float GetBaseGainFactor(float max_raw_value) {
379     return max_raw_value / EmulatedSensor::kSaturationElectrons;
380   }
381 };
382 
383 }  // namespace android
384 
385 #endif  // HW_EMULATOR_CAMERA2_SENSOR_H
386