1# Copyright 2014 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""Verify image and inertial sensor events are well synchronized."""
15
16
17import json
18import logging
19import math
20import os
21import sys
22import threading
23import time
24
25from matplotlib import pylab
26import matplotlib.pyplot
27from mobly import test_runner
28import numpy as np
29import scipy.spatial
30
31import its_base_test
32import camera_properties_utils
33import capture_request_utils
34import image_processing_utils
35import its_session_utils
36import sensor_fusion_utils
37
38_CAM_FRAME_RANGE_MAX = 9.0  # Seconds: max allowed camera frame range.
39_GYRO_SAMP_RATE_MIN = 100.0  # Samples/second: min gyro sample rate.
40_NAME = os.path.splitext(os.path.basename(__file__))[0]
41_ARDUINO_INIT_WAIT_TIME = 3.0  # Seconds to wait for Arduino comm
42_NUM_ROTATIONS = 10
43_START_FRAME = 1
44_FRAME_DELTA_RTOL = 1.5  # 50% margin over nominal FPS of captures
45_POLYFIT_DEGREES_LEGACY = (2, 3)
46_POLYFIT_DEGREES = (3,)
47
48# Constants to convert between different units (for clarity).
49_SEC_TO_MSEC = 1000
50_MSEC_TO_NSEC = 1000*1000
51_NSEC_TO_SEC = 1.0E-9
52_CM_TO_M = 1E-2
53_RADS_TO_DEGS = 180/math.pi
54
55# PASS/FAIL thresholds.
56_CORR_DIST_THRESH_MAX = 0.005
57_OFFSET_MS_THRESH_MAX = 1  # mseconds
58
59# Set maximum exposure time to reduce motion blur
60# blur = velocity * exp_time * N_pixels / FOV
61# blur: 3 pixels, v: chart R (17.7cm) / rot_time (1.5s), N: 640, FOV: 30cm
62_EXP_MAX = 4*_MSEC_TO_NSEC  # 4 ms
63_GYRO_INIT_WAIT_TIME = 0.5  # Seconds to wait for gyro to stabilize.
64_GYRO_POST_WAIT_TIME = 0.2  # Seconds to wait to capture some extra gyro data.
65_IMG_SIZE_MAX = 640 * 480  # Maximum image size.
66_NUM_FRAMES_MAX = 300  # fps*test_length should be < this for smooth captures.
67
68
69def _collect_data(cam, fps, w, h, test_length, rot_rig, chart_dist,
70                  name_with_log_path):
71  """Capture a new set of data from the device.
72
73  Captures camera frames while the user is moving the device in the proscribed
74  manner. Note since the capture request is for a manual request, optical
75  image stabilization (OIS) is disabled.
76
77  Args:
78    cam: camera object.
79    fps: frames per second capture rate.
80    w: pixel width of frames.
81    h: pixel height of frames.
82    test_length: length of time for test in seconds.
83    rot_rig: dict with 'cntl' and 'ch' defined.
84    chart_dist: float value of distance to chart in meters.
85    name_with_log_path: file name with location to save data.
86
87  Returns:
88    frames: list of RGB images as numpy arrays.
89  """
90  logging.debug('Starting sensor event collection')
91  props = cam.get_camera_properties()
92  props = cam.override_with_hidden_physical_camera_props(props)
93  camera_properties_utils.skip_unless(
94      camera_properties_utils.sensor_fusion_capable(props) and
95      cam.get_sensors().get('gyro'))
96
97  # Start camera rotation.
98  serial_port = None
99  if rot_rig['cntl'].lower() == sensor_fusion_utils.ARDUINO_STRING.lower():
100    # identify port
101    serial_port = sensor_fusion_utils.serial_port_def(
102        sensor_fusion_utils.ARDUINO_STRING)
103    # send test cmd to Arduino until cmd returns properly
104    sensor_fusion_utils.establish_serial_comm(serial_port)
105  p = threading.Thread(
106      target=sensor_fusion_utils.rotation_rig,
107      args=(
108          rot_rig['cntl'],
109          rot_rig['ch'],
110          _NUM_ROTATIONS,
111          sensor_fusion_utils.ARDUINO_ANGLES_SENSOR_FUSION,
112          sensor_fusion_utils.ARDUINO_SERVO_SPEED_SENSOR_FUSION,
113          sensor_fusion_utils.ARDUINO_MOVE_TIME_SENSOR_FUSION,
114          serial_port,
115      ),
116  )
117  p.start()
118
119  cam.start_sensor_events()
120
121  # Sleep a while for gyro events to stabilize.
122  time.sleep(_GYRO_INIT_WAIT_TIME)
123  if rot_rig['cntl'].lower() == 'arduino':
124    time.sleep(_ARDUINO_INIT_WAIT_TIME)
125
126  # Raise error if not FRONT or REAR facing camera.
127  facing = props['android.lens.facing']
128  camera_properties_utils.check_front_or_rear_camera(props)
129
130  # Capture frames.
131  fmt = {'format': 'yuv', 'width': w, 'height': h}
132  s, e, _, _, _ = cam.do_3a(get_results=True, do_af=False)
133  logging.debug('3A ISO: %d, exp: %.3fms', s, e/_MSEC_TO_NSEC)
134  if e > _EXP_MAX:
135    logging.debug('Re-assigning exposure time')
136    s *= int(round(e / _EXP_MAX))
137    e = _EXP_MAX
138    s_max = props['android.sensor.info.sensitivityRange'][1]
139    if s > s_max:
140      logging.debug('Calculated ISO too large! ISO: %d, MAX: %d '
141                    'Setting ISO to max analog gain value.', s, s_max)
142      s = s_max
143  req = capture_request_utils.manual_capture_request(s, e)
144  capture_request_utils.turn_slow_filters_off(props, req)
145  fd_min = props['android.lens.info.minimumFocusDistance']
146  fd_chart = 1 / chart_dist
147  fd_meas = min(fd_min, fd_chart)
148  logging.debug('fd_min: %.3f, fd_chart: %.3f, fd_meas: %.3f',
149                fd_min, fd_chart, fd_meas)
150  req['android.lens.focusDistance'] = fd_meas
151  req['android.control.aeTargetFpsRange'] = [fps, fps]
152  req['android.sensor.frameDuration'] = int(1 / _NSEC_TO_SEC / fps)
153  logging.debug('Capturing %dx%d with ISO %d, exp %.3fms at %dfps',
154                w, h, s, e/_MSEC_TO_NSEC, fps)
155  caps = cam.do_capture([req] * int(fps * test_length), fmt)
156
157  # Capture a bit more gyro samples for use in get_best_alignment_offset
158  time.sleep(_GYRO_POST_WAIT_TIME)
159
160  # Get the gyro events.
161  logging.debug('Reading out inertial sensor events')
162  gyro = cam.get_sensor_events()['gyro']
163  logging.debug('Number of gyro samples %d', len(gyro))
164
165  # Combine the gyro and camera events into a single structure.
166  logging.debug('Dumping event data')
167  starts = [cap['metadata']['android.sensor.timestamp'] for cap in caps]
168  exptimes = [cap['metadata']['android.sensor.exposureTime'] for cap in caps]
169  readouts = [cap['metadata']['android.sensor.rollingShutterSkew']
170              for cap in caps]
171  events = {'gyro': gyro, 'cam': list(zip(starts, exptimes, readouts)),
172            'facing': facing}
173  with open(f'{name_with_log_path}_events.txt', 'w') as f:
174    f.write(json.dumps(events))
175
176  # Convert frames to RGB.
177  logging.debug('Dumping frames')
178  frames = []
179  for i, cap in enumerate(caps):
180    img = image_processing_utils.convert_capture_to_rgb_image(cap)
181    frames.append(img)
182    image_processing_utils.write_image(
183        img, f'{name_with_log_path}_frame{i:03d}.png')
184  return events, frames
185
186
187def _get_cam_times(cam_events, fps):
188  """Get the camera frame times.
189
190  Assign a time to each frame. Assumes the image is instantly captured in the
191  middle of exposure.
192
193  Args:
194    cam_events: List of (start_exposure, exposure_time, readout_duration)
195                tuples, one per captured frame, with times in nanoseconds.
196    fps: float of frames per second value
197
198  Returns:
199    frame_times: Array of N times, one corresponding to the 'middle' of the
200                 exposure of each frame.
201  """
202  starts = np.array([start for start, exptime, readout in cam_events])
203  max_frame_delta_ms = (np.amax(np.subtract(starts[1:], starts[0:-1])) /
204                        _MSEC_TO_NSEC)
205  logging.debug('Maximum frame delta: %.3f ms', max_frame_delta_ms)
206  frame_delta_tol_ms = _FRAME_DELTA_RTOL * (1 / fps) * _SEC_TO_MSEC
207  if max_frame_delta_ms > frame_delta_tol_ms:
208    raise AssertionError(f'Frame drop! Max delta: {max_frame_delta_ms:.3f}ms, '
209                         f'ATOL: {frame_delta_tol_ms}ms')
210  exptimes = np.array([exptime for start, exptime, readout in cam_events])
211  if not np.all(exptimes == exptimes[0]):
212    raise AssertionError(f'Exposure times vary in frames! {exptimes}')
213  readouts = np.array([readout for start, exptime, readout in cam_events])
214  if not np.all(readouts == readouts[0]):
215    raise AssertionError(f'Rolling shutter not always equal! {readouts}')
216  frame_times = starts + (exptimes + readouts) / 2.0
217  return frame_times
218
219
220def _plot_best_shift(best, coeff, x, y, name_with_log_path, degree):
221  """Saves a plot the best offset, fit data and x,y data.
222
223  Args:
224    best: x value of best fit data.
225    coeff: 3 element np array. Return of np.polyfit(x,y) for 2nd order fit.
226    x: np array of x data that was fit.
227    y: np array of y data that was fit.
228    name_with_log_path: file name with where to store data.
229    degree: degree of polynomial used to fit.
230  """
231  xfit = np.arange(x[0], x[-1], 0.05).tolist()
232  polynomial = sensor_fusion_utils.polynomial_from_coefficients(coeff)
233  yfit = [polynomial(x) for x in xfit]
234  logging.debug('Degree %s best x: %s, y: %s', degree, best, min(yfit))
235  pylab.figure()
236  pylab.title(f'{_NAME} Degree: {degree} Gyro/Camera Time Correlation')
237  pylab.plot(x, y, 'ro', label='data', alpha=0.7)
238  pylab.plot(xfit, yfit, 'b', label='fit', alpha=0.7)
239  pylab.plot(best, min(yfit), 'g*', label='best', markersize=10)
240  pylab.ticklabel_format(axis='y', style='sci', scilimits=(-3, -3))
241  pylab.xlabel('Relative horizontal shift between curves (ms)')
242  pylab.ylabel('Correlation distance')
243  pylab.legend()
244  matplotlib.pyplot.savefig(
245      f'{name_with_log_path}_plot_shifts_degree{degree}.png')
246
247
248def _plot_rotations(cam_rots, gyro_rots, name_with_log_path):
249  """Saves a plot of the camera vs. gyro rotational measurements.
250
251  Args:
252    cam_rots: Array of camera rotation measurements (rads).
253    gyro_rots: Array of gyro rotation measurements (rads).
254    name_with_log_path: File name with location to store data.
255  """
256  # For plot, scale rotations to degrees.
257  pylab.figure()
258  pylab.title(f'{_NAME} Gyro/Camera Rotations')
259  pylab.plot(range(len(cam_rots)), cam_rots*_RADS_TO_DEGS, '-r.',
260             label='camera', alpha=0.7)
261  pylab.plot(range(len(gyro_rots)), gyro_rots*_RADS_TO_DEGS, '-b.',
262             label='gyro', alpha=0.7)
263  pylab.xlabel('Camera frame number')
264  pylab.ylabel('Angular displacement between adjacent camera frames (degrees)')
265  pylab.xlim([0, len(cam_rots)])
266  pylab.legend()
267  matplotlib.pyplot.savefig(f'{name_with_log_path}_plot_rotations.png')
268
269
270def load_data():
271  """Load a set of previously captured data.
272
273  Returns:
274    events: Dictionary containing all gyro events and cam timestamps.
275    frames: List of RGB images as numpy arrays.
276    w:      Pixel width of frames
277    h:      Pixel height of frames
278  """
279  with open(f'{_NAME}_events.txt', 'r') as f:
280    events = json.loads(f.read())
281  n = len(events['cam'])
282  frames = []
283  for i in range(n):
284    img = image_processing_utils.read_image(f'{_NAME}_frame{i:03d}.png')
285    w, h = img.size[0:2]
286    frames.append(np.array(img).reshape((h, w, 3)) / 255)
287  return events, frames, w, h
288
289
290class SensorFusionTest(its_base_test.ItsBaseTest):
291  """Tests if image and motion sensor events are well synchronized.
292
293  Tests gyro and camera timestamp differences while camera is rotating.
294  Test description is in SensorFusion.pdf file. Test rotates phone in proscribed
295  manner and captures images.
296
297  Camera rotation is determined from images and from gyroscope events.
298  Timestamp offset between gyro and camera is determined using scipy
299  spacial correlation distance. The min value is determined as the optimum.
300
301  PASS/FAIL based on the offset and also the correlation distance.
302  """
303
304  def _assert_gyro_encompasses_camera(self, cam_times, gyro_times):
305    """Confirms the camera events are bounded by the gyroscope events.
306
307    Also ensures:
308      1. Camera frame range is less than MAX_CAMERA_FRAME_RANGE. When camera
309      frame range is significantly larger than spec, the system is usually in a
310      busy/bad state.
311      2. Gyro samples per second are greater than GYRO_SAMP_RATE_MIN
312
313    Args:
314      cam_times: numpy array of camera times.
315      gyro_times: List of 'gyro' times.
316    """
317    min_cam_time = min(cam_times) * _NSEC_TO_SEC
318    max_cam_time = max(cam_times) * _NSEC_TO_SEC
319    min_gyro_time = min(gyro_times) * _NSEC_TO_SEC
320    max_gyro_time = max(gyro_times) * _NSEC_TO_SEC
321    if not (min_cam_time > min_gyro_time and max_cam_time < max_gyro_time):
322      raise AssertionError(
323          f'Camera timestamps [{min_cam_time}, {max_cam_time}] not '
324          f'enclosed by gyro timestamps [{min_gyro_time}, {max_gyro_time}]')
325
326    cam_frame_range = max_cam_time - min_cam_time
327    logging.debug('Camera frame range: %f', cam_frame_range)
328
329    gyro_time_range = max_gyro_time - min_gyro_time
330    gyro_smp_per_sec = len(gyro_times) / gyro_time_range
331    logging.debug('Gyro samples per second: %f', gyro_smp_per_sec)
332    if cam_frame_range > _CAM_FRAME_RANGE_MAX:
333      raise AssertionError(
334          f'Camera frame range, {cam_frame_range}s, too high!')
335    if gyro_smp_per_sec < _GYRO_SAMP_RATE_MIN:
336      raise AssertionError(f'Gyro sample rate, {gyro_smp_per_sec}S/s, low!')
337
338  def test_sensor_fusion(self):
339    # Determine if '--replay' in cmd line
340    replay = False
341    for s in list(sys.argv[1:]):
342      if '--replay' in s:
343        replay = True
344        logging.info('test_sensor_fusion.py not run. Using existing data set.')
345
346    rot_rig = {}
347    fps = float(self.fps)
348    img_w, img_h = self.img_w, self.img_h
349    test_length = float(self.test_length)
350    name_with_log_path = os.path.join(self.log_path, _NAME)
351    chart_distance = self.chart_distance * _CM_TO_M
352
353    if img_w * img_h > _IMG_SIZE_MAX or fps * test_length > _NUM_FRAMES_MAX:
354      logging.debug(
355          'Warning: Your test parameters may require fast write speeds'
356          ' to run smoothly.  If you run into problems, consider'
357          " smaller values of 'w', 'h', 'fps', or 'test_length'.")
358
359    if replay:
360      events, frames, _, _ = load_data()
361    else:
362      with its_session_utils.ItsSession(
363          device_id=self.dut.serial,
364          camera_id=self.camera_id,
365          hidden_physical_id=self.hidden_physical_id) as cam:
366
367        rot_rig['cntl'] = self.rotator_cntl
368        rot_rig['ch'] = self.rotator_ch
369        events, frames = _collect_data(
370            cam, fps, img_w, img_h, test_length, rot_rig, chart_distance,
371            name_with_log_path)
372    logging.debug('Start frame: %d', _START_FRAME)
373
374    sensor_fusion_utils.plot_gyro_events(events['gyro'], _NAME, self.log_path)
375
376    # Validity check on gyro/camera timestamps
377    cam_times = _get_cam_times(
378        events['cam'][_START_FRAME:], fps)
379    gyro_times = [e['time'] for e in events['gyro']]
380    self._assert_gyro_encompasses_camera(cam_times, gyro_times)
381
382    # Compute cam rotation displacement(rads) between pairs of adjacent frames.
383    cam_rots = sensor_fusion_utils.get_cam_rotations(
384        frames[_START_FRAME:], events['facing'], img_h,
385        name_with_log_path, _START_FRAME)
386    logging.debug('cam_rots: %s', str(cam_rots))
387    gyro_rots = sensor_fusion_utils.get_gyro_rotations(
388        events['gyro'], cam_times)
389    _plot_rotations(cam_rots, gyro_rots, name_with_log_path)
390
391    # Find the best offset. Starting with Android 14, use 3rd order polynomial
392    first_api_level = its_session_utils.get_first_api_level(self.dut.serial)
393    polyfit_degrees = list(_POLYFIT_DEGREES)
394    if first_api_level <= its_session_utils.ANDROID13_API_LEVEL:
395      polyfit_degrees = list(_POLYFIT_DEGREES_LEGACY)
396    logging.debug('Attempting to fit data to polynomials of degrees: %s',
397                  polyfit_degrees)
398    for degree in polyfit_degrees:
399      output = sensor_fusion_utils.get_best_alignment_offset(
400          cam_times, cam_rots, events['gyro'], degree=degree
401      )
402      if not output:
403        logging.debug('Degree %d was not a good fit.', degree)
404        continue
405      logging.debug('Degree %d was a good fit.', degree)
406      offset_ms, coeffs, candidates, distances = output
407      _plot_best_shift(
408          offset_ms, coeffs, candidates, distances, name_with_log_path, degree)
409      break
410    else:
411      raise AssertionError(
412          f'No degree in {polyfit_degrees} was a good fit for the data!'
413      )
414
415    # Calculate correlation distance with best offset.
416    corr_dist = scipy.spatial.distance.correlation(cam_rots, gyro_rots)
417    logging.debug('Best correlation of %f at shift of %.3fms',
418                  corr_dist, offset_ms)
419    print(f'test_sensor_fusion_corr_dist: {corr_dist}')
420    print(f'test_sensor_fusion_offset_ms: {offset_ms:.3f}')
421
422    # Assert PASS/FAIL criteria.
423    if corr_dist > _CORR_DIST_THRESH_MAX:
424      raise AssertionError(f'Poor gyro/camera correlation: {corr_dist:.6f}, '
425                           f'ATOL: {_CORR_DIST_THRESH_MAX}.')
426    if abs(offset_ms) > _OFFSET_MS_THRESH_MAX:
427      raise AssertionError('Offset too large. Measured (ms): '
428                           f'{offset_ms:.3f}, ATOL: {_OFFSET_MS_THRESH_MAX}.')
429
430    else:  # remove frames if PASS
431      its_session_utils.remove_tmp_files(self.log_path, f'{_NAME}_frame*.png')
432
433if __name__ == '__main__':
434  test_runner.main()
435