1# Copyright 2014 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import logging
6import optparse
7import os
8import sys
9import time
10
11from catapult_base import cloud_storage  # pylint: disable=import-error
12
13from telemetry.core import exceptions
14from telemetry.internal.actions import page_action
15from telemetry.internal.browser import browser_finder
16from telemetry.internal.results import results_options
17from telemetry.internal.util import exception_formatter
18from telemetry import page
19from telemetry.page import page_test
20from telemetry import story as story_module
21from telemetry.util import wpr_modes
22from telemetry.value import failure
23from telemetry.value import skip
24from telemetry.web_perf import story_test
25
26
27class ArchiveError(Exception):
28  pass
29
30
31def AddCommandLineArgs(parser):
32  story_module.StoryFilter.AddCommandLineArgs(parser)
33  results_options.AddResultsOptions(parser)
34
35  # Page set options
36  group = optparse.OptionGroup(parser, 'Page set repeat options')
37  group.add_option('--page-repeat', default=1, type='int',
38                   help='Number of times to repeat each individual page '
39                   'before proceeding with the next page in the pageset.')
40  group.add_option('--pageset-repeat', default=1, type='int',
41                   help='Number of times to repeat the entire pageset.')
42  group.add_option('--max-failures', default=None, type='int',
43                   help='Maximum number of test failures before aborting '
44                   'the run. Defaults to the number specified by the '
45                   'PageTest.')
46  parser.add_option_group(group)
47
48  # WPR options
49  group = optparse.OptionGroup(parser, 'Web Page Replay options')
50  group.add_option('--use-live-sites',
51      dest='use_live_sites', action='store_true',
52      help='Run against live sites and ignore the Web Page Replay archives.')
53  parser.add_option_group(group)
54
55  parser.add_option('-d', '--also-run-disabled-tests',
56                    dest='run_disabled_tests',
57                    action='store_true', default=False,
58                    help='Ignore @Disabled and @Enabled restrictions.')
59
60def ProcessCommandLineArgs(parser, args):
61  story_module.StoryFilter.ProcessCommandLineArgs(parser, args)
62  results_options.ProcessCommandLineArgs(parser, args)
63
64  # Page set options
65  if args.page_repeat < 1:
66    parser.error('--page-repeat must be a positive integer.')
67  if args.pageset_repeat < 1:
68    parser.error('--pageset-repeat must be a positive integer.')
69
70
71def _RunStoryAndProcessErrorIfNeeded(story, results, state, test):
72  def ProcessError():
73    results.AddValue(failure.FailureValue(story, sys.exc_info()))
74  try:
75    if isinstance(test, story_test.StoryTest):
76      test.WillRunStory(state.platform)
77    state.WillRunStory(story)
78    if not state.CanRunStory(story):
79      results.AddValue(skip.SkipValue(
80          story,
81          'Skipped because story is not supported '
82          '(SharedState.CanRunStory() returns False).'))
83      return
84    state.RunStory(results)
85    if isinstance(test, story_test.StoryTest):
86      test.Measure(state.platform, results)
87  except (page_test.Failure, exceptions.TimeoutException,
88          exceptions.LoginException, exceptions.ProfilingException):
89    ProcessError()
90  except exceptions.Error:
91    ProcessError()
92    raise
93  except page_action.PageActionNotSupported as e:
94    results.AddValue(
95        skip.SkipValue(story, 'Unsupported page action: %s' % e))
96  except Exception:
97    results.AddValue(
98        failure.FailureValue(
99            story, sys.exc_info(), 'Unhandlable exception raised.'))
100    raise
101  finally:
102    has_existing_exception = (sys.exc_info() != (None, None, None))
103    try:
104      state.DidRunStory(results)
105      # if state.DidRunStory raises exception, things are messed up badly and we
106      # do not need to run test.DidRunStory at that point.
107      if isinstance(test, story_test.StoryTest):
108        test.DidRunStory(state.platform)
109      else:
110        test.DidRunPage(state.platform)
111    except Exception:
112      if not has_existing_exception:
113        raise
114      # Print current exception and propagate existing exception.
115      exception_formatter.PrintFormattedException(
116          msg='Exception raised when cleaning story run: ')
117
118
119class StoryGroup(object):
120  def __init__(self, shared_state_class):
121    self._shared_state_class = shared_state_class
122    self._stories = []
123
124  @property
125  def shared_state_class(self):
126    return self._shared_state_class
127
128  @property
129  def stories(self):
130    return self._stories
131
132  def AddStory(self, story):
133    assert (story.shared_state_class is
134            self._shared_state_class)
135    self._stories.append(story)
136
137
138def StoriesGroupedByStateClass(story_set, allow_multiple_groups):
139  """ Returns a list of story groups which each contains stories with
140  the same shared_state_class.
141
142  Example:
143    Assume A1, A2, A3 are stories with same shared story class, and
144    similar for B1, B2.
145    If their orders in story set is A1 A2 B1 B2 A3, then the grouping will
146    be [A1 A2] [B1 B2] [A3].
147
148  It's purposefully done this way to make sure that order of
149  stories are the same of that defined in story_set. It's recommended that
150  stories with the same states should be arranged next to each others in
151  story sets to reduce the overhead of setting up & tearing down the
152  shared story state.
153  """
154  story_groups = []
155  story_groups.append(
156      StoryGroup(story_set[0].shared_state_class))
157  for story in story_set:
158    if (story.shared_state_class is not
159        story_groups[-1].shared_state_class):
160      if not allow_multiple_groups:
161        raise ValueError('This StorySet is only allowed to have one '
162                         'SharedState but contains the following '
163                         'SharedState classes: %s, %s.\n Either '
164                         'remove the extra SharedStates or override '
165                         'allow_mixed_story_states.' % (
166                         story_groups[-1].shared_state_class,
167                         story.shared_state_class))
168      story_groups.append(
169          StoryGroup(story.shared_state_class))
170    story_groups[-1].AddStory(story)
171  return story_groups
172
173
174def Run(test, story_set, finder_options, results, max_failures=None,
175        should_tear_down_state_after_each_story_run=False):
176  """Runs a given test against a given page_set with the given options.
177
178  Stop execution for unexpected exceptions such as KeyboardInterrupt.
179  We "white list" certain exceptions for which the story runner
180  can continue running the remaining stories.
181  """
182  # Filter page set based on options.
183  stories = filter(story_module.StoryFilter.IsSelected, story_set)
184
185  if (not finder_options.use_live_sites and story_set.bucket and
186      finder_options.browser_options.wpr_mode != wpr_modes.WPR_RECORD):
187    serving_dirs = story_set.serving_dirs
188    for directory in serving_dirs:
189      cloud_storage.GetFilesInDirectoryIfChanged(directory,
190                                                 story_set.bucket)
191    if not _UpdateAndCheckArchives(
192        story_set.archive_data_file, story_set.wpr_archive_info,
193        stories):
194      return
195
196  if not stories:
197    return
198
199  # Effective max failures gives priority to command-line flag value.
200  effective_max_failures = finder_options.max_failures
201  if effective_max_failures is None:
202    effective_max_failures = max_failures
203
204  story_groups = StoriesGroupedByStateClass(
205      stories,
206      story_set.allow_mixed_story_states)
207
208  for group in story_groups:
209    state = None
210    try:
211      for _ in xrange(finder_options.pageset_repeat):
212        for story in group.stories:
213          for _ in xrange(finder_options.page_repeat):
214            if not state:
215              # Construct shared state by using a copy of finder_options. Shared
216              # state may update the finder_options. If we tear down the shared
217              # state after this story run, we want to construct the shared
218              # state for the next story from the original finder_options.
219              state = group.shared_state_class(
220                  test, finder_options.Copy(), story_set)
221            results.WillRunPage(story)
222            try:
223              _WaitForThermalThrottlingIfNeeded(state.platform)
224              _RunStoryAndProcessErrorIfNeeded(story, results, state, test)
225            except exceptions.Error:
226              # Catch all Telemetry errors to give the story a chance to retry.
227              # The retry is enabled by tearing down the state and creating
228              # a new state instance in the next iteration.
229              try:
230                # If TearDownState raises, do not catch the exception.
231                # (The Error was saved as a failure value.)
232                state.TearDownState()
233              finally:
234                # Later finally-blocks use state, so ensure it is cleared.
235                state = None
236            finally:
237              has_existing_exception = sys.exc_info() != (None, None, None)
238              try:
239                if state:
240                  _CheckThermalThrottling(state.platform)
241                results.DidRunPage(story)
242              except Exception:
243                if not has_existing_exception:
244                  raise
245                # Print current exception and propagate existing exception.
246                exception_formatter.PrintFormattedException(
247                    msg='Exception from result processing:')
248              if state and should_tear_down_state_after_each_story_run:
249                state.TearDownState()
250                state = None
251          if (effective_max_failures is not None and
252              len(results.failures) > effective_max_failures):
253            logging.error('Too many failures. Aborting.')
254            return
255    finally:
256      if state:
257        has_existing_exception = sys.exc_info() != (None, None, None)
258        try:
259          state.TearDownState()
260        except Exception:
261          if not has_existing_exception:
262            raise
263          # Print current exception and propagate existing exception.
264          exception_formatter.PrintFormattedException(
265              msg='Exception from TearDownState:')
266
267
268def RunBenchmark(benchmark, finder_options):
269  """Run this test with the given options.
270
271  Returns:
272    The number of failure values (up to 254) or 255 if there is an uncaught
273    exception.
274  """
275  benchmark.CustomizeBrowserOptions(finder_options.browser_options)
276
277  possible_browser = browser_finder.FindBrowser(finder_options)
278  if possible_browser and benchmark.ShouldDisable(possible_browser):
279    logging.warning('%s is disabled on the selected browser', benchmark.Name())
280    if finder_options.run_disabled_tests:
281      logging.warning(
282          'Running benchmark anyway due to: --also-run-disabled-tests')
283    else:
284      logging.warning(
285          'Try --also-run-disabled-tests to force the benchmark to run.')
286      return 1
287
288  pt = benchmark.CreatePageTest(finder_options)
289  pt.__name__ = benchmark.__class__.__name__
290
291  if hasattr(benchmark, '_disabled_strings'):
292    # pylint: disable=protected-access
293    pt._disabled_strings = benchmark._disabled_strings
294  if hasattr(benchmark, '_enabled_strings'):
295    # pylint: disable=protected-access
296    pt._enabled_strings = benchmark._enabled_strings
297
298  stories = benchmark.CreateStorySet(finder_options)
299  if isinstance(pt, page_test.PageTest):
300    if any(not isinstance(p, page.Page) for p in stories.stories):
301      raise Exception(
302          'PageTest must be used with StorySet containing only '
303          'telemetry.page.Page stories.')
304
305  benchmark_metadata = benchmark.GetMetadata()
306  with results_options.CreateResults(
307      benchmark_metadata, finder_options,
308      benchmark.ValueCanBeAddedPredicate) as results:
309    try:
310      Run(pt, stories, finder_options, results, benchmark.max_failures,
311          benchmark.ShouldTearDownStateAfterEachStoryRun())
312      return_code = min(254, len(results.failures))
313    except Exception:
314      exception_formatter.PrintFormattedException()
315      return_code = 255
316
317    try:
318      bucket = cloud_storage.BUCKET_ALIASES[finder_options.upload_bucket]
319      if finder_options.upload_results:
320        results.UploadTraceFilesToCloud(bucket)
321        results.UploadProfilingFilesToCloud(bucket)
322    finally:
323      results.PrintSummary()
324  return return_code
325
326
327def _UpdateAndCheckArchives(archive_data_file, wpr_archive_info,
328                            filtered_stories):
329  """Verifies that all stories are local or have WPR archives.
330
331  Logs warnings and returns False if any are missing.
332  """
333  # Report any problems with the entire story set.
334  if any(not story.is_local for story in filtered_stories):
335    if not archive_data_file:
336      logging.error('The story set is missing an "archive_data_file" '
337                    'property.\nTo run from live sites pass the flag '
338                    '--use-live-sites.\nTo create an archive file add an '
339                    'archive_data_file property to the story set and then '
340                    'run record_wpr.')
341      raise ArchiveError('No archive data file.')
342    if not wpr_archive_info:
343      logging.error('The archive info file is missing.\n'
344                    'To fix this, either add svn-internal to your '
345                    '.gclient using http://goto/read-src-internal, '
346                    'or create a new archive using record_wpr.')
347      raise ArchiveError('No archive info file.')
348    wpr_archive_info.DownloadArchivesIfNeeded()
349
350  # Report any problems with individual story.
351  stories_missing_archive_path = []
352  stories_missing_archive_data = []
353  for story in filtered_stories:
354    if not story.is_local:
355      archive_path = wpr_archive_info.WprFilePathForStory(story)
356      if not archive_path:
357        stories_missing_archive_path.append(story)
358      elif not os.path.isfile(archive_path):
359        stories_missing_archive_data.append(story)
360  if stories_missing_archive_path:
361    logging.error(
362        'The story set archives for some stories do not exist.\n'
363        'To fix this, record those stories using record_wpr.\n'
364        'To ignore this warning and run against live sites, '
365        'pass the flag --use-live-sites.')
366    logging.error(
367        'stories without archives: %s',
368        ', '.join(story.display_name
369                  for story in stories_missing_archive_path))
370  if stories_missing_archive_data:
371    logging.error(
372        'The story set archives for some stories are missing.\n'
373        'Someone forgot to check them in, uploaded them to the '
374        'wrong cloud storage bucket, or they were deleted.\n'
375        'To fix this, record those stories using record_wpr.\n'
376        'To ignore this warning and run against live sites, '
377        'pass the flag --use-live-sites.')
378    logging.error(
379        'stories missing archives: %s',
380        ', '.join(story.display_name
381                  for story in stories_missing_archive_data))
382  if stories_missing_archive_path or stories_missing_archive_data:
383    raise ArchiveError('Archive file is missing stories.')
384  # Only run valid stories if no problems with the story set or
385  # individual stories.
386  return True
387
388
389def _WaitForThermalThrottlingIfNeeded(platform):
390  if not platform.CanMonitorThermalThrottling():
391    return
392  thermal_throttling_retry = 0
393  while (platform.IsThermallyThrottled() and
394         thermal_throttling_retry < 3):
395    logging.warning('Thermally throttled, waiting (%d)...',
396                    thermal_throttling_retry)
397    thermal_throttling_retry += 1
398    time.sleep(thermal_throttling_retry * 2)
399
400  if thermal_throttling_retry and platform.IsThermallyThrottled():
401    logging.warning('Device is thermally throttled before running '
402                    'performance tests, results will vary.')
403
404
405def _CheckThermalThrottling(platform):
406  if not platform.CanMonitorThermalThrottling():
407    return
408  if platform.HasBeenThermallyThrottled():
409    logging.warning('Device has been thermally throttled during '
410                    'performance tests, results will vary.')
411