1# Copyright 2024 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""Verify that the switch from UW to W has similar RGB values."""
15
16
17import logging
18import math
19import os.path
20import pathlib
21
22from mobly import test_runner
23import numpy as np
24
25import its_base_test
26import camera_properties_utils
27import image_processing_utils
28import its_session_utils
29import opencv_processing_utils
30import preview_processing_utils
31
32
33_AE_ATOL = 4.0
34_AE_RTOL = 0.04  # 4%
35_AF_ATOL = 0.02  # 2%
36_ARUCO_MARKERS_COUNT = 4
37_AWB_ATOL = 0.02  # 2%
38_CH_FULL_SCALE = 255
39_COLORS = ('r', 'g', 'b', 'gray')
40_IMG_FORMAT = 'png'
41_NAME = os.path.splitext(os.path.basename(__file__))[0]
42_PATCH_MARGIN = 50  # pixels
43_RECORDING_DURATION = 400  # milliseconds
44_SENSOR_ORIENTATIONS = (90, 270)
45_SKIP_INITIAL_FRAMES = 15
46_TAP_COORDINATES = (500, 500)  # Location to tap tablet screen via adb
47_ZOOM_RANGE_UW_W = (0.95, 2.05)  # UW/W crossover range
48_ZOOM_STEP = 0.01
49
50
51def _get_error_msg(failed_awb_msg, failed_ae_msg, failed_af_msg):
52  """"Returns the error message string.
53
54  Args:
55    failed_awb_msg: list of awb error msgs
56    failed_ae_msg: list of ae error msgs
57    failed_af_msg: list of af error msgs
58  Returns:
59    error_msg: str; error_msg string
60  """
61  error_msg = ''
62  if failed_awb_msg:
63    error_msg = f'{error_msg}----AWB Check----\n'
64    for msg in failed_awb_msg:
65      error_msg = f'{error_msg}{msg}\n'
66  if failed_ae_msg:
67    error_msg = f'{error_msg}----AE Check----\n'
68    for msg in failed_ae_msg:
69      error_msg = f'{error_msg}{msg}\n'
70  if failed_af_msg:
71    error_msg = f'{error_msg}----AF Check----\n'
72    for msg in failed_af_msg:
73      error_msg = f'{error_msg}{msg}\n'
74  return error_msg
75
76
77def _check_orientation_and_flip(props, uw_img, w_img, img_name_stem):
78  """Checks the sensor orientation and flips image.
79
80  The preview stream captures are flipped based on the sensor
81  orientation while using the front camera. In such cases, check the
82  sensor orientation and flip the image if needed.
83
84  Args:
85    props: camera properties object.
86    uw_img: image captured using UW lens.
87    w_img: image captured using W lens.
88    img_name_stem: prefix for the img name to be saved
89
90  Returns:
91    numpy array of uw_img and w_img.
92  """
93  uw_img = (
94      preview_processing_utils.mirror_preview_image_by_sensor_orientation(
95          props['android.sensor.orientation'], uw_img))
96  w_img = (
97      preview_processing_utils.mirror_preview_image_by_sensor_orientation(
98          props['android.sensor.orientation'], w_img))
99  uw_img_name = f'{img_name_stem}_uw.png'
100  w_img_name = f'{img_name_stem}_w.png'
101  image_processing_utils.write_image(uw_img / _CH_FULL_SCALE, uw_img_name)
102  image_processing_utils.write_image(w_img / _CH_FULL_SCALE, w_img_name)
103  return uw_img, w_img
104
105
106def _do_ae_check(uw_img, w_img, log_path, suffix):
107  """Checks that the luma change is within range.
108
109  Args:
110    uw_img: image captured using UW lens.
111    w_img: image captured using W lens.
112    log_path: path to save the image.
113    suffix: str; patch suffix to be used in file name.
114  Returns:
115    failed_ae_msg: Failed AE check messages if any. None otherwise.
116    uw_y_avg: y_avg value for UW lens
117    w_y_avg: y_avg value for W lens
118  """
119  failed_ae_msg = []
120  file_stem = f'{os.path.join(log_path, _NAME)}_{suffix}'
121  uw_y = _extract_y(
122      uw_img, f'{file_stem}_uw_y.png')
123  uw_y_avg = np.average(uw_y)
124  logging.debug('UW y_avg: %.4f', uw_y_avg)
125
126  w_y = _extract_y(w_img, f'{file_stem}_w_y.png')
127  w_y_avg = np.average(w_y)
128  logging.debug('W y_avg: %.4f', w_y_avg)
129
130  y_avg_change_percent = (abs(w_y_avg-uw_y_avg)/uw_y_avg)*100
131  logging.debug('y_avg_change_percent: %.4f', y_avg_change_percent)
132
133  if not math.isclose(uw_y_avg, w_y_avg, rel_tol=_AE_RTOL, abs_tol=_AE_ATOL):
134    failed_ae_msg.append('y_avg change is greater than threshold value for '
135                         f'patch: {suffix} '
136                         f'diff: {abs(w_y_avg-uw_y_avg):.4f} '
137                         f'ATOL: {_AE_ATOL} '
138                         f'RTOL: {_AE_RTOL} '
139                         f'uw_y_avg: {uw_y_avg:.4f} '
140                         f'w_y_avg: {w_y_avg:.4f} ')
141  return failed_ae_msg, uw_y_avg, w_y_avg
142
143
144def _do_af_check(uw_img, w_img):
145  """Checks the AF behavior between the uw and w img.
146
147  Args:
148    uw_img: image captured using UW lens.
149    w_img: image captured using W lens.
150
151  Returns:
152    failed_af_msg: Failed AF check messages if any. None otherwise.
153    sharpness_uw: sharpness value for UW lens
154    sharpness_w: sharpness value for W lens
155  """
156  failed_af_msg = []
157  sharpness_uw = image_processing_utils.compute_image_sharpness(uw_img)
158  logging.debug('Sharpness for UW patch: %.2f', sharpness_uw)
159  sharpness_w = image_processing_utils.compute_image_sharpness(w_img)
160  logging.debug('Sharpness for W patch: %.2f', sharpness_w)
161
162  if not math.isclose(sharpness_w, sharpness_uw, abs_tol=_AF_ATOL):
163    failed_af_msg.append('Sharpness difference > threshold value.'
164                         f' ATOL: {_AF_ATOL} '
165                         f'sharpness_w: {sharpness_w:.4f} '
166                         f'sharpness_uw: {sharpness_uw:.4f}')
167  return failed_af_msg, sharpness_uw, sharpness_w
168
169
170def _do_awb_check(uw_img, w_img):
171  """Checks the ratio of R/G and B/G for UW and W img.
172
173  Args:
174    uw_img: image captured using UW lens.
175    w_img: image captured using W lens.
176  Returns:
177    failed_awb_msg: Failed AWB check messages if any. None otherwise.
178  """
179  failed_awb_msg = []
180  uw_r_g_ratio, uw_b_g_ratio = _get_color_ratios(uw_img, 'UW')
181  w_r_g_ratio, w_b_g_ratio = _get_color_ratios(w_img, 'W')
182
183  if not math.isclose(uw_r_g_ratio, w_r_g_ratio,
184                      abs_tol=_AWB_ATOL):
185    failed_awb_msg.append(f'R/G change is greater than the threshold value: '
186                          f'ATOL: {_AWB_ATOL} '
187                          f'uw_r_g_ratio: {uw_r_g_ratio:.4f} '
188                          f'w_r_g_ratio: {w_r_g_ratio:.4f}')
189  if not math.isclose(uw_b_g_ratio, w_b_g_ratio,
190                      abs_tol=_AWB_ATOL):
191    failed_awb_msg.append(f'B/G change is greater than the threshold value: '
192                          f'ATOL: {_AWB_ATOL} '
193                          f'uw_b_g_ratio: {uw_b_g_ratio:.4f} '
194                          f'w_b_g_ratio: {w_b_g_ratio:.4f}')
195  return failed_awb_msg
196
197
198def _extract_main_patch(corners, ids, img_rgb, img_path, lens_suffix):
199  """Extracts the main rectangle patch from the captured frame.
200
201  Find aruco markers in the captured image and detects if the
202  expected number of aruco markers have been found or not.
203  It then, extracts the main rectangle patch and saves it
204  without the aruco markers in it.
205
206  Args:
207    corners: list of detected corners.
208    ids: list of int ids for each ArUco markers in the input_img.
209    img_rgb: An openCV image in RGB order.
210    img_path: Path to save the image.
211    lens_suffix: str; suffix used to save the image.
212  Returns:
213    rectangle_patch: numpy float image array of the rectangle patch.
214  """
215  rectangle_patch = opencv_processing_utils.get_patch_from_aruco_markers(
216      img_rgb, corners, ids)
217  patch_path = img_path.with_name(
218      f'{img_path.stem}_{lens_suffix}_patch{img_path.suffix}')
219  image_processing_utils.write_image(rectangle_patch/_CH_FULL_SCALE, patch_path)
220  return rectangle_patch
221
222
223def _extract_y(img_uint8, file_name):
224  """Converts an RGB uint8 image to YUV and returns Y.
225
226  The Y img is saved with file_name in the test dir.
227
228  Args:
229    img_uint8: An openCV image in RGB order.
230    file_name: file name along with the path to save the image.
231
232  Returns:
233    An openCV image converted to Y.
234  """
235  y_uint8 = opencv_processing_utils.convert_to_y(img_uint8, 'RGB')
236  y_uint8 = np.expand_dims(y_uint8, axis=2)  # add plane to save image
237  image_processing_utils.write_image(y_uint8/_CH_FULL_SCALE, file_name)
238  return y_uint8
239
240
241def _find_aruco_markers(img_bw, img_path, lens_suffix):
242  """Detect ArUco markers in the input image.
243
244  Args:
245    img_bw: input img in black and white with ArUco markers.
246    img_path: path to save the image.
247    lens_suffix: suffix used to save the image.
248  Returns:
249    corners: list of detected corners.
250    ids: list of int ids for each ArUco markers in the input_img.
251  """
252  aruco_path = img_path.with_name(
253      f'{img_path.stem}_{lens_suffix}_aruco{img_path.suffix}')
254  corners, ids, _ = opencv_processing_utils.find_aruco_markers(
255      img_bw, aruco_path)
256  if len(ids) != _ARUCO_MARKERS_COUNT:
257    raise AssertionError(
258        f'{_ARUCO_MARKERS_COUNT} ArUco markers should be detected.')
259  return corners, ids
260
261
262def _get_color_ratios(img, identifier):
263  """Computes the ratios of R/G and B/G for img.
264
265  Args:
266    img: RGB img in numpy format.
267    identifier: str; identifier for logging statement. ie. 'UW' or 'W'
268
269  Returns:
270    r_g_ratio: Ratio of R and G channel means.
271    b_g_ratio: Ratio of B and G channel means.
272  """
273  img_means = image_processing_utils.compute_image_means(img)
274  r = img_means[0]
275  g = img_means[1]
276  b = img_means[2]
277  logging.debug('%s R mean: %.4f', identifier, r)
278  logging.debug('%s G mean: %.4f', identifier, g)
279  logging.debug('%s B mean: %.4f', identifier, b)
280  r_g_ratio = r/g
281  b_g_ratio = b/g
282  logging.debug('%s R/G ratio: %.4f', identifier, r_g_ratio)
283  logging.debug('%s B/G ratio: %.4f', identifier, b_g_ratio)
284  return r_g_ratio, b_g_ratio
285
286
287def _get_four_quadrant_patches(img, img_path, lens_suffix):
288  """Divides the img in 4 equal parts and returns the patches.
289
290  Args:
291    img: an openCV image in RGB order.
292    img_path: path to save the image.
293    lens_suffix: str; suffix used to save the image.
294  Returns:
295    four_quadrant_patches: list of 4 patches.
296  """
297  num_rows = 2
298  num_columns = 2
299  size_x = math.floor(img.shape[1])
300  size_y = math.floor(img.shape[0])
301  four_quadrant_patches = []
302  for i in range(0, num_rows):
303    for j in range(0, num_columns):
304      x = size_x / num_rows * j
305      y = size_y / num_columns * i
306      h = size_y / num_columns
307      w = size_x / num_rows
308      patch = img[int(y):int(y+h), int(x):int(x+w)]
309      patch_path = img_path.with_name(
310          f'{img_path.stem}_{lens_suffix}_patch_'
311          f'{i}_{j}{img_path.suffix}')
312      image_processing_utils.write_image(patch/_CH_FULL_SCALE, patch_path)
313      cropped_patch = patch[_PATCH_MARGIN:-_PATCH_MARGIN,
314                            _PATCH_MARGIN:-_PATCH_MARGIN]
315      four_quadrant_patches.append(cropped_patch)
316      cropped_patch_path = img_path.with_name(
317          f'{img_path.stem}_{lens_suffix}_cropped_patch_'
318          f'{i}_{j}{img_path.suffix}')
319      image_processing_utils.write_image(
320          cropped_patch/_CH_FULL_SCALE, cropped_patch_path)
321  return four_quadrant_patches
322
323
324def _get_slanted_edge_patch(img, img_path, lens_suffix):
325  """Crops the central slanted edge part of the img and returns the patch.
326
327  Args:
328    img: an openCV image in RGB order.
329    img_path: path to save the image.
330    lens_suffix: str; suffix used to save the image. ie: 'w' or 'uw'.
331
332  Returns:
333    slanted_edge_patch: list of 4 coordinates.
334  """
335  num_rows = 3
336  num_columns = 5
337  size_x = math.floor(img.shape[1])
338  size_y = math.floor(img.shape[0])
339  slanted_edge_patch = []
340  x = int(round(size_x / num_columns * (num_columns // 2), 0))
341  y = int(round(size_y / num_rows * (num_rows // 2), 0))
342  w = int(round(size_x / num_columns, 0))
343  h = int(round(size_y / num_rows, 0))
344  patch = img[y:y+h, x:x+w]
345  slanted_edge_patch = patch[_PATCH_MARGIN:-_PATCH_MARGIN,
346                             _PATCH_MARGIN:-_PATCH_MARGIN]
347  filename_with_path = img_path.with_name(
348      f'{img_path.stem}_{lens_suffix}_slanted_edge{img_path.suffix}'
349  )
350  image_processing_utils.write_rgb_uint8_image(
351      slanted_edge_patch, filename_with_path
352  )
353  return slanted_edge_patch
354
355
356class MultiCameraSwitchTest(its_base_test.ItsBaseTest):
357  """Test that the switch from UW to W lens has similar RGB values.
358
359  This test uses various zoom ratios within range android.control.zoomRatioRange
360  to capture images and find the point when the physical camera changes
361  to determine the crossover point of change from UW to W.
362  It does preview recording at UW and W crossover point to verify that
363  the AE, AWB and AF behavior remains the same.
364  """
365
366  def test_multi_camera_switch(self):
367    with its_session_utils.ItsSession(
368        device_id=self.dut.serial,
369        camera_id=self.camera_id,
370        hidden_physical_id=self.hidden_physical_id) as cam:
371      props = cam.get_camera_properties()
372      props = cam.override_with_hidden_physical_camera_props(props)
373      chart_distance = self.chart_distance
374      failed_awb_msg = []
375      failed_ae_msg = []
376      failed_af_msg = []
377
378      # check SKIP conditions
379      first_api_level = its_session_utils.get_first_api_level(self.dut.serial)
380      camera_properties_utils.skip_unless(
381          first_api_level >= its_session_utils.ANDROID15_API_LEVEL and
382          camera_properties_utils.zoom_ratio_range(props) and
383          camera_properties_utils.logical_multi_camera(props))
384
385      # Check the zoom range
386      zoom_range = props['android.control.zoomRatioRange']
387      logging.debug('zoomRatioRange: %s', zoom_range)
388      camera_properties_utils.skip_unless(
389          len(zoom_range) > 1 and
390          (zoom_range[0] <= _ZOOM_RANGE_UW_W[0] <= zoom_range[1]) and
391          (zoom_range[0] <= _ZOOM_RANGE_UW_W[1] <= zoom_range[1]))
392
393      its_session_utils.load_scene(
394          cam, props, self.scene, self.tablet, chart_distance)
395      # Tap tablet to remove gallery buttons
396      if self.tablet:
397        self.tablet.adb.shell(
398            f'input tap {_TAP_COORDINATES[0]} {_TAP_COORDINATES[1]}')
399
400      preview_test_size = preview_processing_utils.get_max_preview_test_size(
401          cam, self.camera_id)
402      cam.do_3a()
403
404      # Start dynamic preview recording and collect results
405      capture_results, file_list = (
406          preview_processing_utils.preview_over_zoom_range(
407              self.dut, cam, preview_test_size, _ZOOM_RANGE_UW_W[0],
408              _ZOOM_RANGE_UW_W[1], _ZOOM_STEP, self.log_path)
409      )
410
411      physical_id_before = None
412      counter = 0  # counter for the index of crossover point result
413      lens_changed = False
414
415      for capture_result in capture_results:
416        counter += 1
417        physical_id = capture_result[
418            'android.logicalMultiCamera.activePhysicalId']
419        if not physical_id_before:
420          physical_id_before = physical_id
421        zoom_ratio = float(capture_result['android.control.zoomRatio'])
422        if physical_id_before == physical_id:
423          continue
424        else:
425          logging.debug('Active physical id changed')
426          logging.debug('Crossover zoom ratio point: %f', zoom_ratio)
427          physical_id_before = physical_id
428          lens_changed = True
429          break
430
431      # Raise error is lens did not switch within the range
432      # _ZOOM_RANGE_UW_W
433      # TODO(ruchamk): Add lens_changed to the CameraITS metrics
434      if not lens_changed:
435        e_msg = 'Crossover point not found. Try running the test again!'
436        raise AssertionError(e_msg)
437
438      img_uw_file = file_list[counter-2]
439      capture_result_uw = capture_results[counter-2]
440      logging.debug('Capture results uw crossover: %s', capture_result_uw)
441      img_w_file = file_list[counter-1]
442      capture_result_w = capture_results[counter-1]
443      logging.debug('Capture results w crossover: %s', capture_result_w)
444
445      # Remove unwanted frames and only save the UW and
446      # W crossover point frames along with mp4 recording
447      its_session_utils.remove_frame_files(self.log_path, [
448          os.path.join(self.log_path, img_uw_file),
449          os.path.join(self.log_path, img_w_file)])
450
451      # Add suffix to the UW and W image files
452      uw_path = pathlib.Path(os.path.join(self.log_path, img_uw_file))
453      uw_name = uw_path.with_name(f'{uw_path.stem}_uw{uw_path.suffix}')
454      os.rename(os.path.join(self.log_path, img_uw_file), uw_name)
455
456      w_path = pathlib.Path(os.path.join(self.log_path, img_w_file))
457      w_name = w_path.with_name(f'{w_path.stem}_w{w_path.suffix}')
458      os.rename(os.path.join(self.log_path, img_w_file), w_name)
459
460      # Convert UW and W img to numpy array
461      uw_img = image_processing_utils.convert_image_to_numpy_array(
462          str(uw_name))
463      w_img = image_processing_utils.convert_image_to_numpy_array(
464          str(w_name))
465
466      # Check the sensor orientation and flip image
467      if (props['android.lens.facing'] ==
468          camera_properties_utils.LENS_FACING['FRONT']):
469        img_name_stem = os.path.join(self.log_path, 'flipped_preview')
470        uw_img, w_img = _check_orientation_and_flip(
471            props, uw_img, w_img, img_name_stem
472        )
473
474      # Convert UW and W img to black and white
475      uw_img_bw = (
476          opencv_processing_utils.convert_image_to_high_contrast_black_white(
477              uw_img))
478      w_img_bw = (
479          opencv_processing_utils.convert_image_to_high_contrast_black_white(
480              w_img))
481
482      # Find ArUco markers in the image with UW lens
483      # and extract the outer box patch
484      corners, ids = _find_aruco_markers(uw_img_bw, uw_path, 'uw')
485      uw_chart_patch = _extract_main_patch(
486          corners, ids, uw_img, uw_path, 'uw')
487      uw_four_patches = _get_four_quadrant_patches(
488          uw_chart_patch, uw_path, 'uw')
489
490      # Find ArUco markers in the image with W lens
491      # and extract the outer box patch
492      corners, ids = _find_aruco_markers(w_img_bw, w_path, 'w')
493      w_chart_patch = _extract_main_patch(
494          corners, ids, w_img, w_path, 'w')
495      w_four_patches = _get_four_quadrant_patches(
496          w_chart_patch, w_path, 'w')
497
498      ae_uw_y_avgs = {}
499      ae_w_y_avgs = {}
500
501      for uw_patch, w_patch, color in zip(
502          uw_four_patches, w_four_patches, _COLORS):
503        logging.debug('Checking for quadrant color: %s', color)
504
505        # AE Check: Extract the Y component from rectangle patch
506        failed_ae_msg, uw_y_avg, w_y_avg = _do_ae_check(
507            uw_patch, w_patch, self.log_path, color)
508        ae_uw_y_avgs.update({color: f'{uw_y_avg:.4f}'})
509        ae_w_y_avgs.update({color: f'{w_y_avg:.4f}'})
510
511        # AWB Check : Verify that R/G and B/G ratios are within the limits
512        failed_awb_msg = _do_awb_check(uw_patch, w_patch)
513
514      # Below print statements are for logging purpose.
515      # Do not replace with logging.
516      print(f'{_NAME}_ae_uw_y_avgs: ', ae_uw_y_avgs)
517      print(f'{_NAME}_ae_w_y_avgs: ', ae_w_y_avgs)
518
519      # AF check using slanted edge
520      uw_slanted_edge_patch = _get_slanted_edge_patch(
521          uw_chart_patch, uw_path, 'uw')
522      w_slanted_edge_patch = _get_slanted_edge_patch(
523          w_chart_patch, w_path, 'w')
524      failed_af_msg, sharpness_uw, sharpness_w = _do_af_check(
525          uw_slanted_edge_patch, w_slanted_edge_patch)
526      print(f'{_NAME}_uw_sharpness: {sharpness_uw:.4f}')
527      print(f'{_NAME}_w_sharpness: {sharpness_w:.4f}')
528
529      if failed_awb_msg or failed_ae_msg or failed_af_msg:
530        error_msg = _get_error_msg(failed_awb_msg, failed_ae_msg, failed_af_msg)
531        raise AssertionError(f'{_NAME} failed with following errors:\n'
532                             f'{error_msg}')
533
534if __name__ == '__main__':
535  test_runner.main()
536