1# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import glob
6import logging
7import os
8import random
9import re
10import shutil
11import time
12
13import common
14from autotest_lib.client.bin import test, utils
15from autotest_lib.client.common_lib import error
16from autotest_lib.client.cros import constants, cros_logging
17
18
19class CrashTest(test.test):
20    """
21    This class deals with running crash tests, which are tests which crash a
22    user-space program (or the whole machine) and generate a core dump. We
23    want to check that the correct crash dump is available and can be
24    retrieved.
25
26    Chromium OS has a crash sender which checks for new crash data and sends
27    it to a server. This crash data is used to track software quality and find
28    bugs. The system crash sender normally is always running, but can be paused
29    by creating _PAUSE_FILE. When crash sender sees this, it pauses operation.
30
31    For testing purposes we sometimes want to run the crash sender manually.
32    In this case we can pass the --ignore_pause_file flag and run the crash
33    sender manually.
34
35    Also for testing we sometimes want to mock out the crash sender, and just
36    have it pretend to succeed or fail. The _MOCK_CRASH_SENDING file is used
37    for this. If it doesn't exist, then the crash sender runs normally. If
38    it exists but is empty, the crash sender will succeed (but actually do
39    nothing). If the file contains something, then the crash sender will fail.
40
41    If the user consents to sending crash tests, then the _CONSENT_FILE will
42    exist in the home directory. This test needs to create this file for the
43    crash sending to work. The metrics daemon caches the consent state for
44    1 second, so we need to sleep for more than that after changing it to be
45    sure it picks up the change.
46
47    Crash reports are rate limited to a certain number of reports each 24
48    hours. If the maximum number has already been sent then reports are held
49    until later. This is administered by a directory _CRASH_SENDER_RATE_DIR
50    which contains one temporary file for each time a report is sent.
51
52    The class provides the ability to push a consent file. This disables
53    consent for this test but allows it to be popped back at later. This
54    makes nested tests easier. If _automatic_consent_saving is True (the
55    default) then consent will be pushed at the start and popped at the end.
56
57    Interesting variables:
58        _log_reader: the log reader used for reading log files
59        _leave_crash_sending: True to enable crash sending on exit from the
60            test, False to disable it. (Default True).
61        _automatic_consent_saving: True to push the consent at the start of
62            the test and pop it afterwards. (Default True).
63
64    Useful places to look for more information are:
65
66    chromeos/src/platform/crash-reporter/crash_sender
67        - sender script which crash crash reporter to create reports, then
68
69    chromeos/src/platform/crash-reporter/
70        - crash reporter program
71    """
72
73
74    _CONSENT_FILE = '/home/chronos/Consent To Send Stats'
75    _CORE_PATTERN = '/proc/sys/kernel/core_pattern'
76    _LOCK_CORE_PATTERN = '/proc/sys/kernel/lock_core_pattern'
77    _CRASH_REPORTER_PATH = '/sbin/crash_reporter'
78    _CRASH_SENDER_PATH = '/sbin/crash_sender'
79    _CRASH_SENDER_RATE_DIR = '/var/lib/crash_sender'
80    _CRASH_SENDER_LOCK_PATH = '/run/lock/crash_sender'
81    _CRASH_RUN_STATE_DIR = '/run/crash_reporter'
82    _CRASH_TEST_IN_PROGRESS = _CRASH_RUN_STATE_DIR + '/crash-test-in-progress'
83    _MOCK_CRASH_SENDING = _CRASH_RUN_STATE_DIR + '/mock-crash-sending'
84    _FILTER_IN = _CRASH_RUN_STATE_DIR + '/filter-in'
85    _PAUSE_FILE = '/var/lib/crash_sender_paused'
86    _SYSTEM_CRASH_DIR = '/var/spool/crash'
87    _FALLBACK_USER_CRASH_DIR = '/home/chronos/crash'
88    _REBOOT_VAULT_CRASH_DIR = '/mnt/stateful_partition/reboot_vault/crash'
89    _USER_CRASH_DIRS = '/home/chronos/u-*/crash'
90    _USER_CRASH_DIR_REGEX = re.compile('/home/chronos/u-([a-f0-9]+)/crash')
91
92    # Matches kDefaultMaxUploadBytes
93    _MAX_CRASH_SIZE = 1024 * 1024
94
95    # Use the same file format as crash does normally:
96    # <basename>.#.#.#.#.meta
97    _FAKE_TEST_BASENAME = 'fake.1.2.3.4'
98
99    def _set_system_sending(self, is_enabled):
100        """Sets whether or not the system crash_sender is allowed to run.
101
102        This is done by creating or removing _PAUSE_FILE.
103
104        crash_sender may still be allowed to run if _call_sender_one_crash is
105        called with 'ignore_pause=True'.
106
107        @param is_enabled: True to enable crash_sender, False to disable it.
108        """
109        if is_enabled:
110            if os.path.exists(self._PAUSE_FILE):
111                os.remove(self._PAUSE_FILE)
112        else:
113            utils.system('touch ' + self._PAUSE_FILE)
114
115    def _remove_all_files_in_dir(self, d):
116        """Recursively remove all of the files in |d|, without removing |d|.
117      """
118        try:
119            root, dirs, files = next(os.walk(d))
120        except StopIteration:
121            return
122        for path in files:
123            os.remove(os.path.join(root, path))
124        for path in dirs:
125            shutil.rmtree(os.path.join(root, path))
126
127
128    def _reset_rate_limiting(self):
129        """Reset the count of crash reports sent today.
130
131        This clears the contents of the rate limiting directory which has
132        the effect of reseting our count of crash reports sent.
133        """
134        self._remove_all_files_in_dir(self._CRASH_SENDER_RATE_DIR)
135
136
137    def _clear_spooled_crashes(self):
138        """Clears system and user crash directories.
139
140        This will remove all crash reports which are waiting to be sent.
141        """
142        self._remove_all_files_in_dir(self._SYSTEM_CRASH_DIR)
143        self._remove_all_files_in_dir(self._REBOOT_VAULT_CRASH_DIR)
144        for d in glob.glob(self._USER_CRASH_DIRS):
145            self._remove_all_files_in_dir(d)
146        self._remove_all_files_in_dir(self._FALLBACK_USER_CRASH_DIR)
147
148
149    def _kill_running_sender(self):
150        """Kill the the crash_sender process if running."""
151        utils.system('pkill -9 -e --exact crash_sender', ignore_status=True)
152
153
154    def _set_sending_mock(self, mock_enabled):
155        """Enables / disables mocking of the sending process.
156
157        This uses the _MOCK_CRASH_SENDING file to achieve its aims. See notes
158        at the top.
159
160        @param mock_enabled: If True, mocking is enabled, else it is disabled.
161        """
162        if mock_enabled:
163            data = ''
164            logging.info('Setting sending mock')
165            utils.open_write_close(self._MOCK_CRASH_SENDING, data)
166        else:
167            utils.system('rm -f ' + self._MOCK_CRASH_SENDING)
168
169
170    def _set_consent(self, has_consent):
171        """Sets whether or not we have consent to send crash reports.
172
173        This creates or deletes the _CONSENT_FILE to control whether
174        crash_sender will consider that it has consent to send crash reports.
175        It also copies a policy blob with the proper policy setting.
176
177        @param has_consent: True to indicate consent, False otherwise
178        """
179        autotest_cros_dir = os.path.join(os.path.dirname(__file__), '..')
180        if has_consent:
181            if os.path.isdir(constants.ALLOWLIST_DIR):
182                # Create policy file that enables metrics/consent.
183                shutil.copy('%s/mock_metrics_on.policy' % autotest_cros_dir,
184                            constants.SIGNED_POLICY_FILE)
185                shutil.copy('%s/mock_metrics_owner.key' % autotest_cros_dir,
186                            constants.OWNER_KEY_FILE)
187            # Create deprecated consent file.  This is created *after* the
188            # policy file in order to avoid a race condition where chrome
189            # might remove the consent file if the policy's not set yet.
190            # We create it as a temp file first in order to make the creation
191            # of the consent file, owned by chronos, atomic.
192            # See crosbug.com/18413.
193            temp_file = self._CONSENT_FILE + '.tmp';
194            utils.open_write_close(temp_file, 'test-consent')
195            utils.system('chown chronos:chronos "%s"' % (temp_file))
196            shutil.move(temp_file, self._CONSENT_FILE)
197            logging.info('Created %s', self._CONSENT_FILE)
198        else:
199            if os.path.isdir(constants.ALLOWLIST_DIR):
200                # Create policy file that disables metrics/consent.
201                shutil.copy('%s/mock_metrics_off.policy' % autotest_cros_dir,
202                            constants.SIGNED_POLICY_FILE)
203                shutil.copy('%s/mock_metrics_owner.key' % autotest_cros_dir,
204                            constants.OWNER_KEY_FILE)
205            # Remove deprecated consent file.
206            utils.system('rm -f "%s"' % (self._CONSENT_FILE))
207        # Ensure cached consent state is updated.
208        time.sleep(2)
209
210
211    def _set_crash_test_in_progress(self, in_progress):
212        if in_progress:
213            utils.open_write_close(self._CRASH_TEST_IN_PROGRESS, 'in-progress')
214            logging.info('Created %s', self._CRASH_TEST_IN_PROGRESS)
215        else:
216            utils.system('rm -f "%s"' % (self._CRASH_TEST_IN_PROGRESS))
217
218
219    def _get_pushed_consent_file_path(self):
220        """Returns filename of the pushed consent file."""
221        return os.path.join(self.bindir, 'pushed_consent')
222
223
224    def _get_pushed_policy_file_path(self):
225        """Returns filename of the pushed policy file."""
226        return os.path.join(self.bindir, 'pushed_policy')
227
228
229    def _get_pushed_owner_key_file_path(self):
230        """Returns filename of the pushed owner.key file."""
231        return os.path.join(self.bindir, 'pushed_owner_key')
232
233
234    def _push_consent(self):
235        """Push the consent file, thus disabling consent.
236
237        The consent files can be created in the new test if required. Call
238        _pop_consent() to restore the original state.
239        """
240        if os.path.exists(self._CONSENT_FILE):
241            shutil.move(self._CONSENT_FILE,
242                        self._get_pushed_consent_file_path())
243        if os.path.exists(constants.SIGNED_POLICY_FILE):
244            shutil.move(constants.SIGNED_POLICY_FILE,
245                        self._get_pushed_policy_file_path())
246        if os.path.exists(constants.OWNER_KEY_FILE):
247            shutil.move(constants.OWNER_KEY_FILE,
248                        self._get_pushed_owner_key_file_path())
249        # Ensure cached consent state is updated.
250        time.sleep(2)
251
252
253    def _pop_consent(self):
254        """Pop the consent files, enabling/disabling consent as it was before
255        we pushed the consent."""
256        if os.path.exists(self._get_pushed_consent_file_path()):
257            shutil.move(self._get_pushed_consent_file_path(),
258                        self._CONSENT_FILE)
259        else:
260            utils.system('rm -f "%s"' % self._CONSENT_FILE)
261        if os.path.exists(self._get_pushed_policy_file_path()):
262            shutil.move(self._get_pushed_policy_file_path(),
263                        constants.SIGNED_POLICY_FILE)
264        else:
265            utils.system('rm -f "%s"' % constants.SIGNED_POLICY_FILE)
266        if os.path.exists(self._get_pushed_owner_key_file_path()):
267            shutil.move(self._get_pushed_owner_key_file_path(),
268                        constants.OWNER_KEY_FILE)
269        else:
270            utils.system('rm -f "%s"' % constants.OWNER_KEY_FILE)
271        # Ensure cached consent state is updated.
272        time.sleep(2)
273
274
275    def _get_crash_dir(self, username, force_user_crash_dir=False):
276        """Returns crash directory for process running as the given user.
277
278        @param username: Unix user of the crashing process.
279        @param force_user_crash_dir: Regardless of |username|, return the crash
280                                     directory of the current user session, or
281                                     the fallback directory if no sessions.
282        """
283        if username in ('root', 'crash') and not force_user_crash_dir:
284            return self._SYSTEM_CRASH_DIR
285        else:
286            dirs = glob.glob(self._USER_CRASH_DIRS)
287            return dirs[0] if dirs else self._FALLBACK_USER_CRASH_DIR
288
289
290    def _canonicalize_crash_dir(self, crash_dir):
291        """Converts /home/chronos crash directory to /home/user counterpart.
292
293        @param crash_dir: A path of the form /home/chronos/u-<hash>/crash.
294        @returns /home/user/<hash>/crash, or |crash_dir| on form mismatch.
295        """
296        match = re.match(self._USER_CRASH_DIR_REGEX, crash_dir)
297        return ('/home/user/%s/crash' % match.group(1)) if match else crash_dir
298
299
300    def _initialize_crash_reporter(self, lock_core_pattern):
301        """Start up the crash reporter.
302
303        @param lock_core_pattern: lock core pattern during initialization.
304        """
305
306        if not lock_core_pattern:
307            self._set_crash_test_in_progress(False)
308        utils.system('%s --init' % self._CRASH_REPORTER_PATH)
309        if not lock_core_pattern:
310            self._set_crash_test_in_progress(True)
311            # Completely disable crash_reporter from generating crash dumps
312            # while any tests are running, otherwise a crashy system can make
313            # these tests flaky.
314            self.enable_crash_filtering('none')
315
316
317    def get_crash_dir_name(self, name):
318        """Return the full path for |name| inside the system crash directory."""
319        return os.path.join(self._SYSTEM_CRASH_DIR, name)
320
321
322    def write_crash_dir_entry(self, name, contents):
323        """Writes a file to the system crash directory.
324
325        This writes a file to _SYSTEM_CRASH_DIR with the given name. This is
326        used to insert new crash dump files for testing purposes.
327
328        @param name: Name of file to write.
329        @param contents: String to write to the file.
330        """
331        entry = self.get_crash_dir_name(name)
332        if not os.path.exists(self._SYSTEM_CRASH_DIR):
333            os.makedirs(self._SYSTEM_CRASH_DIR)
334        utils.open_write_close(entry, contents)
335        return entry
336
337
338    def write_fake_meta(self, name, exec_name, payload, complete=True):
339        """Writes a fake meta entry to the system crash directory.
340
341        @param name: Name of file to write.
342        @param exec_name: Value for exec_name item.
343        @param payload: Value for payload item.
344        @param complete: True to close off the record, otherwise leave it
345                incomplete.
346        """
347        last_line = ''
348        if complete:
349            last_line = 'done=1\n'
350        contents = ('exec_name=%s\n'
351                    'ver=my_ver\n'
352                    'payload=%s\n'
353                    '%s' % (exec_name, payload,
354                            last_line))
355        return self.write_crash_dir_entry(name, contents)
356
357    def _get_dmp_contents(self):
358        """Creates the contents of the dmp file for our made crashes.
359
360        The dmp file contents are deliberately large and hard-to-compress. This
361        ensures logging_CrashSender hits its bytes/day cap before its sends/day
362        cap.
363        """
364        return bytearray(
365                [random.randint(0, 255) for n in range(self._MAX_CRASH_SIZE)])
366
367
368    def _prepare_sender_one_crash(self,
369                                  reports_enabled,
370                                  report):
371        """Create metadata for a fake crash report.
372
373        This enabled mocking of the crash sender, then creates a fake
374        crash report for testing purposes.
375
376        @param reports_enabled: True to enable consent to that reports will be
377                sent.
378        @param report: Report to use for crash, if None we create one.
379        """
380        self._set_sending_mock(mock_enabled=True)
381        self._set_consent(reports_enabled)
382        if report is None:
383            # Use the same file format as crash does normally:
384            # <basename>.#.#.#.meta
385            payload = self.write_crash_dir_entry(
386                '%s.dmp' % self._FAKE_TEST_BASENAME, self._get_dmp_contents())
387            report = self.write_fake_meta(
388                '%s.meta' % self._FAKE_TEST_BASENAME, 'fake', payload)
389        return report
390
391
392    def _parse_sender_output(self, output):
393        """Parse the log output from the crash_sender script.
394
395        This script can run on the logs from either a mocked or true
396        crash send. It looks for one and only one crash from output.
397        Non-crash anomalies should be ignored since there're just noise
398        during running the test.
399
400        @param output: output from the script
401
402        @returns A dictionary with these values:
403            exec_name: name of executable which crashed
404            image_type: type of image ("dev","test",...), if given
405            boot_mode: current boot mode ("dev",...), if given
406            meta_path: path to the report metadata file
407            output: the output from the script, copied
408            report_kind: kind of report sent (minidump vs kernel)
409            send_attempt: did the script attempt to send a crash.
410            send_success: if it attempted, was the crash send successful.
411            sig: signature of the report, if given.
412            sleep_time: if it attempted, how long did it sleep before
413              sending (if mocked, how long would it have slept)
414        """
415        anomaly_types = (
416            'kernel_suspend_warning',
417            'kernel_warning',
418            'kernel_wifi_warning',
419            'selinux_violation',
420            'service_failure',
421        )
422
423        def crash_sender_search(regexp, output):
424            """Narrow search to lines from crash_sender."""
425            return re.search(r'crash_sender\[\d+\]:\s+' + regexp, output)
426
427        before_first_crash = None
428        while True:
429            crash_header = crash_sender_search(
430                'Considering metadata (\S+)',
431                output
432            )
433            if not crash_header:
434                break
435            if before_first_crash is None:
436                before_first_crash = output[:crash_header.start()]
437            meta_considered = crash_header.group(1)
438            is_anomaly = any(x in meta_considered for x in anomaly_types)
439            if is_anomaly:
440                # If it's an anomaly, skip this header, and look for next
441                # one.
442                output = output[crash_header.end():]
443            else:
444                # If it's not an anomaly, skip everything before this
445                # header.
446                output = output[crash_header.start():]
447                break
448        if before_first_crash:
449            output = before_first_crash + output
450        logging.debug('Filtered sender output to parse:\n%s', output)
451
452        sleep_match = crash_sender_search('Scheduled to send in (\d+)s', output)
453        send_attempt = sleep_match is not None
454        if send_attempt:
455            sleep_time = int(sleep_match.group(1))
456        else:
457            sleep_time = None
458
459        meta_match = crash_sender_search('Metadata: (\S+) \((\S+)\)', output)
460        if meta_match:
461            meta_path = meta_match.group(1)
462            report_kind = meta_match.group(2)
463        else:
464            meta_path = None
465            report_kind = None
466
467        payload_match = crash_sender_search('Payload: (\S+)', output)
468        if payload_match:
469            report_payload = payload_match.group(1)
470        else:
471            report_payload = None
472
473        exec_name_match = crash_sender_search('Exec name: (\S+)', output)
474        if exec_name_match:
475            exec_name = exec_name_match.group(1)
476        else:
477            exec_name = None
478
479        sig_match = crash_sender_search('sig: (\S+)', output)
480        if sig_match:
481            sig = sig_match.group(1)
482        else:
483            sig = None
484
485        image_type_match = crash_sender_search('Image type: (\S+)', output)
486        if image_type_match:
487            image_type = image_type_match.group(1)
488        else:
489            image_type = None
490
491        boot_mode_match = crash_sender_search('Boot mode: (\S+)', output)
492        if boot_mode_match:
493            boot_mode = boot_mode_match.group(1)
494        else:
495            boot_mode = None
496
497        send_success = 'Mocking successful send' in output
498        return {'exec_name': exec_name,
499                'report_kind': report_kind,
500                'meta_path': meta_path,
501                'report_payload': report_payload,
502                'send_attempt': send_attempt,
503                'send_success': send_success,
504                'sig': sig,
505                'image_type': image_type,
506                'boot_mode': boot_mode,
507                'sleep_time': sleep_time,
508                'output': output}
509
510
511    def wait_for_sender_completion(self):
512        """Wait for crash_sender to complete.
513
514        Wait for no crash_sender's last message to be placed in the
515        system log before continuing and for the process to finish.
516        Otherwise we might get only part of the output."""
517        utils.poll_for_condition(
518            lambda: self._log_reader.can_find('crash_sender done.'),
519            timeout=60,
520            exception=error.TestError(
521              'Timeout waiting for crash_sender to emit done: ' +
522              self._log_reader.get_logs()))
523        utils.poll_for_condition(
524            lambda: utils.system('pgrep crash_sender',
525                                 ignore_status=True) != 0,
526            timeout=60,
527            exception=error.TestError(
528                'Timeout waiting for crash_sender to finish: ' +
529                self._log_reader.get_logs()))
530
531
532    def _call_sender_one_crash(self, reports_enabled=True, report=None):
533        """Call the crash sender script to mock upload one crash.
534
535        @param reports_enabled: Has the user consented to sending crash reports.
536        @param report: report to use for crash, if None we create one.
537
538        @returns a dictionary describing the result with the keys
539          from _parse_sender_output, as well as:
540            report_exists: does the minidump still exist after calling
541              send script
542            rate_count: how many crashes have been uploaded in the past
543              24 hours.
544        """
545        report = self._prepare_sender_one_crash(reports_enabled,
546                                                report)
547        self._log_reader.set_start_by_current()
548        script_output = ""
549        try:
550            script_output = utils.system_output(
551                '%s --ignore_pause_file 2>&1' % (self._CRASH_SENDER_PATH),
552                ignore_status=False)
553        except error.CmdError as err:
554            raise error.TestFail('"%s" returned an unexpected non-zero '
555                                 'value (%s).'
556                                 % (err.command, err.result_obj.exit_status))
557
558        self.wait_for_sender_completion()
559        output = self._log_reader.get_logs()
560        logging.debug('Crash sender message output:\n %s', output)
561
562        if script_output != '':
563            logging.debug('crash_sender stdout/stderr: %s', script_output)
564
565        if os.path.exists(report):
566            report_exists = True
567            os.remove(report)
568        else:
569            report_exists = False
570        if os.path.exists(self._CRASH_SENDER_RATE_DIR):
571            rate_count = len([
572                name for name in os.listdir(self._CRASH_SENDER_RATE_DIR)
573                if os.path.isfile(os.path.join(self._CRASH_SENDER_RATE_DIR,
574                                               name))
575            ])
576        else:
577            rate_count = 0
578
579        result = self._parse_sender_output(output)
580        result['report_exists'] = report_exists
581        result['rate_count'] = rate_count
582
583        # Show the result for debugging but remove 'output' key
584        # since it's large and earlier in debug output.
585        debug_result = dict(result)
586        del debug_result['output']
587        logging.debug('Result of send (besides output): %s', debug_result)
588
589        return result
590
591
592    def enable_crash_filtering(self, name):
593        """Writes the given parameter to the filter-in file.
594
595        This is used to ignore crashes in which we have no interest.
596
597        @param new_parameter: The filter to write to the file, if any.
598        """
599        utils.open_write_close(self._FILTER_IN, name)
600
601
602    def disable_crash_filtering(self):
603        """Remove the filter-in file.
604
605        Next time the crash reporter is invoked, it will not filter crashes."""
606        os.remove(self._FILTER_IN)
607
608
609    def initialize(self):
610        """Initalize the test."""
611        test.test.initialize(self)
612        self._log_reader = cros_logging.make_system_log_reader()
613        self._leave_crash_sending = True
614        self._automatic_consent_saving = True
615        self.enable_crash_filtering('none')
616        self._set_crash_test_in_progress(True)
617
618
619    def cleanup(self):
620        """Cleanup after the test.
621
622        We reset things back to the way we think they should be. This is
623        intended to allow the system to continue normal operation.
624
625        Some variables silently change the behavior:
626            _automatic_consent_saving: if True, we pop the consent file.
627            _leave_crash_sending: True to enable crash sending, False to
628                disable it
629        """
630        self._reset_rate_limiting()
631        self._clear_spooled_crashes()
632        self._set_system_sending(self._leave_crash_sending)
633        self._set_sending_mock(mock_enabled=False)
634        if self._automatic_consent_saving:
635            self._pop_consent()
636        self._set_crash_test_in_progress(False)
637
638        # Re-initialize crash reporter to clear any state left over
639        # (e.g. core_pattern)
640        self._initialize_crash_reporter(True)
641
642        self.disable_crash_filtering()
643
644        test.test.cleanup(self)
645
646
647    def run_crash_tests(self,
648                        test_names,
649                        initialize_crash_reporter=False,
650                        clear_spool_first=True,
651                        must_run_all=True,
652                        lock_core_pattern=False):
653        """Run crash tests defined in this class.
654
655        @param test_names: Array of test names.
656        @param initialize_crash_reporter: Should set up crash reporter for every
657                run.
658        @param clear_spool_first: Clear all spooled user/system crashes before
659                starting the test.
660        @param must_run_all: Should make sure every test in this class is
661                mentioned in test_names.
662        @param lock_core_pattern: Lock core_pattern while initializing
663                crash_reporter.
664        """
665        if self._automatic_consent_saving:
666            self._push_consent()
667
668        if must_run_all:
669            # Sanity check test_names is complete
670            for attr in dir(self):
671                if attr.find('_test_') == 0:
672                    test_name = attr[6:]
673                    if not test_name in test_names:
674                        raise error.TestError('Test %s is missing' % test_name)
675
676        for test_name in test_names:
677            logging.info(('=' * 20) + ('Running %s' % test_name) + ('=' * 20))
678            if initialize_crash_reporter:
679                self._initialize_crash_reporter(lock_core_pattern)
680            # Disable crash_sender from running, kill off any running ones.
681            # We set a flag to crash_sender when invoking it manually to avoid
682            # our invocations being paused.
683            self._set_system_sending(False)
684            self._kill_running_sender()
685            self._reset_rate_limiting()
686            if clear_spool_first:
687                self._clear_spooled_crashes()
688
689            # Call the test function
690            getattr(self, '_test_' + test_name)()
691
692        # Clear the intentional crashes, so that the server won't automatically
693        # report crash as failure.
694        self._clear_spooled_crashes()
695