1# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4#
5# GLMark outputs a final performance score, and it checks the performance score
6# against minimum requirement if min_score is set.
7
8import logging
9import os
10import re
11import string
12
13from autotest_lib.client.bin import test, utils
14from autotest_lib.client.common_lib import error
15from autotest_lib.client.cros import service_stopper
16from autotest_lib.client.cros.graphics import graphics_utils
17
18GLMARK2_TEST_RE = (
19    r'^\[(?P<scene>.*)\] (?P<options>.*): FPS: (?P<fps>\d+) FrameTime: '
20    r'(?P<frametime>\d+.\d+) ms$')
21GLMARK2_SCORE_RE = r'glmark2 Score: (\d+)'
22
23# perf value description strings may only contain letters, numbers, periods,
24# dashes and underscores.
25# But glmark2 test names are usually in the form:
26#   scene-name:opt=val:opt=v1,v2;v3,v4 or scene:<default>
27# which we convert to:
28#   scene-name.opt_val.opt_v1-v2_v3-v4 or scene.default
29description_table = string.maketrans(':,=;', '.-__')
30description_delete = '<>'
31
32
33class graphics_GLMark2(graphics_utils.GraphicsTest):
34    """Runs glmark2, which benchmarks only calls compatible with OpenGL ES 2.0"""
35    version = 1
36    preserve_srcdir = True
37    _services = None
38
39    def setup(self):
40        self.job.setup_dep(['glmark2'])
41
42    def initialize(self):
43        super(graphics_GLMark2, self).initialize()
44        # If UI is running, we must stop it and restore later.
45        self._services = service_stopper.ServiceStopper(['ui'])
46        self._services.stop_services()
47
48    def cleanup(self):
49        if self._services:
50            self._services.restore_services()
51        super(graphics_GLMark2, self).cleanup()
52
53    @graphics_utils.GraphicsTest.failure_report_decorator('graphics_GLMark2')
54    def run_once(self, size='800x600', hasty=False, min_score=None):
55        dep = 'glmark2'
56        dep_dir = os.path.join(self.autodir, 'deps', dep)
57        self.job.install_pkg(dep, 'dep', dep_dir)
58
59        glmark2 = os.path.join(self.autodir, 'deps/glmark2/glmark2')
60        if not os.path.exists(glmark2):
61            raise error.TestFail('Failed: Could not find test binary.')
62
63        glmark2_data = os.path.join(self.autodir, 'deps/glmark2/data')
64
65        options = []
66        options.append('--data-path %s' % glmark2_data)
67        options.append('--size %s' % size)
68        options.append('--annotate')
69        if hasty:
70            options.append('-b :duration=0.2')
71        else:
72            options.append('-b :duration=2')
73        cmd = glmark2 + ' ' + ' '.join(options)
74
75        if os.environ.get('CROS_FACTORY'):
76            from autotest_lib.client.cros import factory_setup_modules
77            from cros.factory.test import ui
78            ui.start_reposition_thread('^glmark')
79
80        # TODO(ihf): Switch this test to use perf.PerfControl like
81        #            graphics_GLBench once it is stable. crbug.com/344766.
82        if not hasty:
83            if not utils.wait_for_idle_cpu(60.0, 0.1):
84                if not utils.wait_for_idle_cpu(20.0, 0.2):
85                    raise error.TestFail('Failed: Could not get idle CPU.')
86            if not utils.wait_for_cool_machine():
87                raise error.TestFail('Failed: Could not get cold machine.')
88
89        # In this test we are manually handling stderr, so expected=True.
90        # Strangely autotest takes CmdError/CmdTimeoutError as warning only.
91        try:
92            result = utils.run(cmd,
93                               stderr_is_expected=True,
94                               stdout_tee=utils.TEE_TO_LOGS,
95                               stderr_tee=utils.TEE_TO_LOGS)
96        except error.CmdError:
97            raise error.TestFail('Failed: CmdError running %s' % cmd)
98        except error.CmdTimeoutError:
99            raise error.TestFail('Failed: CmdTimeout running %s' % cmd)
100
101        logging.info(result)
102        for line in result.stderr.splitlines():
103            if line.startswith('Error:'):
104                # Line already starts with 'Error: ", not need to prepend.
105                raise error.TestFail(line)
106
107        # Numbers in hasty mode are not as reliable, so don't send them to
108        # the dashboard etc.
109        if not hasty:
110            keyvals = {}
111            score = None
112            # glmark2 output the final performance score as:
113            #  glmark2 Score: 530
114            for line in result.stdout.splitlines():
115                match = re.findall(GLMARK2_SCORE_RE, line)
116                if match:
117                    score = int(match[0])
118            if not score:
119                raise error.TestFail('Failed: Unable to read benchmark score')
120            # Output numbers for plotting by harness.
121            logging.info('GLMark2 score: %d', score)
122            if os.environ.get('CROS_FACTORY'):
123                from autotest_lib.client.cros import factory_setup_modules
124                from cros.factory.event_log import EventLog
125                EventLog('graphics_GLMark2').Log('glmark2_score', score=score)
126            keyvals['glmark2_score'] = score
127            self.write_perf_keyval(keyvals)
128            self.output_perf_value(
129                description='Score',
130                value=score,
131                units='score',
132                higher_is_better=True)
133
134            if min_score is not None and score < min_score:
135                raise error.TestFail(
136                    'Failed: Benchmark score %d < %d (minimum score '
137                    'requirement)' % (score, min_score))
138