1# Copyright 2018 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import logging
6import numpy
7import os
8
9from autotest_lib.client.bin import utils
10from autotest_lib.client.common_lib.cros import tpm_utils
11from autotest_lib.server import test, autotest
12
13CLIENT_TEST_NAME = 'platform_InitLoginPerf'
14STAGE_OOBE = 0
15STAGE_REGULAR = 1
16STAGE_NAME = ['oobe', 'regular']
17BENCHMARKS = {
18        'initial_login': {'stage': STAGE_OOBE,
19                          'name': 'login-duration',
20                          'display': '1stLogin',
21                          'units': 'seconds',
22                          'upload': True},
23        'regular_login': {'stage': STAGE_REGULAR,
24                          'name': 'login-duration',
25                          'display': 'RegLogin',
26                          'units': 'seconds',
27                          'upload': True},
28        'prepare_attestation': {'stage': STAGE_OOBE,
29                                'name': 'attestation-duration',
30                                'display': 'PrepAttn',
31                                'units': 'seconds',
32                                'upload': True},
33        'take_ownership': {'stage': STAGE_OOBE,
34                           'name': 'ownership-duration',
35                           'display': 'TakeOwnp',
36                           'units': 'seconds',
37                           'upload': True},
38        }
39
40class platform_InitLoginPerfServer(test.test):
41    """Test to exercise and gather perf data for initialization and login."""
42
43    version = 1
44
45    def initialize(self):
46        """Run before the first iteration."""
47        self.perf_results = {}
48        for bmname in BENCHMARKS:
49            self.perf_results[bmname] = []
50
51    def stage_args(self, stage):
52        """Build arguments for the client-side test.
53
54        @param stage  Stage of the test to get arguments for.
55        @return       Dictionary of arguments.
56
57        """
58        if stage == 0:
59            return {'perform_init': True,
60                    'pre_init_delay': self.pre_init_delay}
61        else:
62            return {'perform_init': False}
63
64    def run_stage(self, stage):
65        """Run the client-side test.
66
67        @param stage: Stage of the test to run.
68
69        """
70        full_stage = 'iteration.%s/%s' % (self.iteration, STAGE_NAME[stage])
71        logging.info('Run stage %s', full_stage)
72        self.client_at.run_test(test_name=self.client_test,
73                                results_dir=full_stage,
74                                check_client_result=True,
75                                **self.stage_args(stage))
76        client_keyval = os.path.join(self.outputdir, full_stage,
77                self.client_test, 'results', 'keyval')
78        self.client_results[stage] = utils.read_keyval(client_keyval,
79                type_tag='perf')
80
81    def save_perf_data(self):
82        """Extract perf data from client-side test results."""
83        for bmname, bm in BENCHMARKS.iteritems():
84            try:
85                self.perf_results[bmname].append(
86                        self.client_results[bm['stage']][bm['name']])
87            except:
88                logging.warning('Failed to extract %s from client results',
89                                bmname)
90                self.perf_results[bmname].append(None)
91                pass
92
93    def output_benchmark(self, bmname):
94        """Output a benchmark.
95
96        @param bmname: Name of the benchmark.
97
98        """
99        bm = BENCHMARKS[bmname]
100        values = self.perf_results[bmname]
101        if not bm.get('upload', True):
102            return
103        self.output_perf_value(
104                description=bmname,
105                value=[x for x in values if x is not None],
106                units=bm.get('units', 'seconds'),
107                higher_is_better=False,
108                graph=self.graph_name)
109
110    def display_perf_headers(self):
111        """Add headers for the results table to the info log."""
112        hdr = "# "
113        for bm in BENCHMARKS.itervalues():
114            hdr += bm['display'] + ' '
115        logging.info('# Results for delay = %.2f sec', self.pre_init_delay)
116        logging.info(hdr)
117
118    def display_perf_line(self, n):
119        """Add one iteration results line to the info log.
120
121        @param n: Number of the iteration.
122
123        """
124        line = "# "
125        for bmname in BENCHMARKS:
126            value = self.perf_results[bmname][n]
127            if value is None:
128                line += '    None '
129            else:
130                line += '%8.2f ' % value
131        logging.info(line)
132
133    def display_perf_stats(self, name, func):
134        """ Add results statistics line to the info log.
135
136        @param name: Name of the statistic.
137        @param func: Function to reduce the list of results.
138
139        """
140        line = "# "
141        for bmname in BENCHMARKS:
142            line += '%8.2f ' % func(self.perf_results[bmname])
143        logging.info('# %s:', name)
144        logging.info(line)
145
146    def process_perf_data(self):
147        """Process performance data from all iterations."""
148        logging.info('Process perf data')
149        logging.debug('Results: %s', self.perf_results)
150
151        if self.upload_perf:
152            for bmname in BENCHMARKS:
153                self.output_benchmark(bmname)
154
155        logging.info('##############################################')
156        self.display_perf_headers()
157        for iter in range(self.iteration - 1):
158            self.display_perf_line(iter)
159        self.display_perf_stats('Average', numpy.mean)
160        self.display_perf_stats('Min', min)
161        self.display_perf_stats('Max', max)
162        self.display_perf_stats('StdDev', lambda x: numpy.std(x, ddof=1))
163        logging.info('##############################################')
164
165    def run_once(self, host, pre_init_delay=0,
166                 upload_perf=False, graph_name=None):
167        """Run a single iteration.
168
169        @param pre_init_delay: Delay before initialization during first boot.
170        @param upload_perf: Do we need to upload the results?
171        @param graph_name: Graph name to use when uploading the results.
172
173        """
174        if self.iteration is None:
175            self.iteration = 1
176        logging.info('Start iteration %s', self.iteration)
177
178        self.client = host
179        self.pre_init_delay = pre_init_delay
180        self.upload_perf = upload_perf
181        self.graph_name = graph_name
182        self.client_results = {}
183        self.client_test = CLIENT_TEST_NAME
184        self.client_at = autotest.Autotest(self.client)
185
186        logging.info('Clear the owner before the test')
187        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
188
189        self.run_stage(STAGE_OOBE)
190        self.client.reboot()
191        self.run_stage(STAGE_REGULAR)
192        self.save_perf_data()
193
194    def postprocess(self):
195        """Run after all iterations in case of success."""
196        self.process_perf_data()
197
198    def cleanup(self):
199        """Run at the end regardless of success."""
200        logging.info('Cleanup')
201        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
202