1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <android-base/parsebool.h>
18 #include <benchmark/benchmark.h>
19 #include <errno.h>
20 #include <fcntl.h>
21 #include <fnmatch.h>
22 #include <getopt.h>
23 #include <pthread.h>
24 #include <stdio.h>
25 #include <sys/stat.h>
26 #include <sys/types.h>
27 #include <unistd.h>
28 
29 #include <regex>
30 #include <string>
31 #include <unordered_map>
32 #include <vector>
33 
34 #include "HardwareBitmapUploader.h"
35 #include "Properties.h"
36 #include "hwui/Typeface.h"
37 #include "renderthread/RenderProxy.h"
38 #include "tests/common/LeakChecker.h"
39 #include "tests/common/TestScene.h"
40 
41 using namespace android;
42 using namespace android::base;
43 using namespace android::uirenderer;
44 using namespace android::uirenderer::test;
45 
46 static std::vector<TestScene::Info> gRunTests;
47 static TestScene::Options gOpts;
48 static bool gRunLeakCheck = true;
49 std::unique_ptr<benchmark::BenchmarkReporter> gBenchmarkReporter;
50 
51 void run(const TestScene::Info& info, const TestScene::Options& opts,
52          benchmark::BenchmarkReporter* reporter);
53 
printHelp()54 static void printHelp() {
55     printf(R"(
56 USAGE: hwuimacro [OPTIONS] <TESTNAME>
57 
58 OPTIONS:
59   -c, --count=NUM      NUM loops a test should run (example, number of frames)
60   -r, --runs=NUM       Repeat the test(s) NUM times
61   -h, --help           Display this help
62   --list               List all tests
63   --wait-for-gpu       Set this to wait for the GPU before producing the
64                        next frame. Note that without locked clocks this will
65                        pathologically bad performance due to large idle time
66   --report-frametime[=weight] If set, the test will print to stdout the
67                        moving average frametime. Weight is optional, default is 10
68   --cpuset=name        Adds the test to the specified cpuset before running
69                        Not supported on all devices and needs root
70   --offscreen          Render tests off device screen. This option is on by default
71   --onscreen           Render tests on device screen. By default tests
72                        are offscreen rendered
73   --benchmark_format   Set output format. Possible values are tabular, json, csv
74   --benchmark_list_tests Lists the tests that would run but does not run them
75   --benchmark_filter=<regex> Filters the test set to the given regex. If prefixed with `-` and test
76                        that doesn't match the given regex is run
77   --renderer=TYPE      Sets the render pipeline to use. May be skiagl or skiavk
78   --skip-leak-check    Skips the memory leak check
79   --report-gpu-memory[=verbose]  Dumps the GPU memory usage after each test run
80 )");
81 }
82 
listTests()83 static void listTests() {
84     printf("Tests: \n");
85     for (auto&& test : TestScene::testMap()) {
86         auto&& info = test.second;
87         const char* col1 = info.name.c_str();
88         int dlen = info.description.length();
89         const char* col2 = info.description.c_str();
90         // World's best line breaking algorithm.
91         do {
92             int toPrint = dlen;
93             if (toPrint > 50) {
94                 char* found = (char*)memrchr(col2, ' ', 50);
95                 if (found) {
96                     toPrint = found - col2;
97                 } else {
98                     toPrint = 50;
99                 }
100             }
101             printf("%-20s %.*s\n", col1, toPrint, col2);
102             col1 = "";
103             col2 += toPrint;
104             dlen -= toPrint;
105             while (*col2 == ' ') {
106                 col2++;
107                 dlen--;
108             }
109         } while (dlen > 0);
110         printf("\n");
111     }
112 }
113 
moveToCpuSet(const char * cpusetName)114 static void moveToCpuSet(const char* cpusetName) {
115     if (access("/dev/cpuset/tasks", F_OK)) {
116         fprintf(stderr, "don't have access to cpusets, skipping...\n");
117         return;
118     }
119     static const int BUF_SIZE = 100;
120     char buffer[BUF_SIZE];
121 
122     if (snprintf(buffer, BUF_SIZE, "/dev/cpuset/%s/tasks", cpusetName) >= BUF_SIZE) {
123         fprintf(stderr, "Error, cpusetName too large to fit in buffer '%s'\n", cpusetName);
124         return;
125     }
126     int fd = open(buffer, O_WRONLY | O_CLOEXEC);
127     if (fd == -1) {
128         fprintf(stderr, "Error opening file %d\n", errno);
129         return;
130     }
131     pid_t pid = getpid();
132 
133     int towrite = snprintf(buffer, BUF_SIZE, "%ld", (long)pid);
134     if (towrite >= BUF_SIZE) {
135         fprintf(stderr, "Buffer wasn't large enough?\n");
136     } else {
137         if (write(fd, buffer, towrite) != towrite) {
138             fprintf(stderr, "Failed to write, errno=%d", errno);
139         }
140     }
141     close(fd);
142 }
143 
setBenchmarkFormat(const char * format)144 static bool setBenchmarkFormat(const char* format) {
145     if (!strcmp(format, "tabular")) {
146         gBenchmarkReporter.reset(new benchmark::ConsoleReporter());
147     } else if (!strcmp(format, "json")) {
148         // We cannot print the leak check if outputing to JSON as that will break
149         // JSON parsers since it's not JSON-formatted
150         gRunLeakCheck = false;
151         gBenchmarkReporter.reset(new benchmark::JSONReporter());
152     } else {
153         fprintf(stderr, "Unknown format '%s'\n", format);
154         return false;
155     }
156     return true;
157 }
158 
setRenderer(const char * renderer)159 static bool setRenderer(const char* renderer) {
160     if (!strcmp(renderer, "skiagl")) {
161         Properties::overrideRenderPipelineType(RenderPipelineType::SkiaGL);
162     } else if (!strcmp(renderer, "skiavk")) {
163         Properties::overrideRenderPipelineType(RenderPipelineType::SkiaVulkan);
164     } else {
165         fprintf(stderr, "Unknown format '%s'\n", renderer);
166         return false;
167     }
168     return true;
169 }
170 
addTestsThatMatchFilter(std::string spec)171 static void addTestsThatMatchFilter(std::string spec) {
172     if (spec.empty() || spec == "all") {
173         spec = ".";  // Regexp that matches all benchmarks
174     }
175     bool isNegativeFilter = false;
176     if (spec[0] == '-') {
177         spec.replace(0, 1, "");
178         isNegativeFilter = true;
179     }
180     std::regex re(spec, std::regex_constants::extended);
181     for (auto& iter : TestScene::testMap()) {
182         if ((isNegativeFilter && !std::regex_search(iter.first, re)) ||
183             (!isNegativeFilter && std::regex_search(iter.first, re))) {
184             gRunTests.push_back(iter.second);
185         }
186     }
187 }
188 
189 // For options that only exist in long-form. Anything in the
190 // 0-255 range is reserved for short options (which just use their ASCII value)
191 namespace LongOpts {
192 enum {
193     Reserved = 255,
194     List,
195     WaitForGpu,
196     ReportFrametime,
197     CpuSet,
198     BenchmarkFormat,
199     BenchmarkListTests,
200     BenchmarkFilter,
201     Onscreen,
202     Offscreen,
203     Renderer,
204     SkipLeakCheck,
205     ReportGpuMemory,
206 };
207 }
208 
209 static const struct option LONG_OPTIONS[] = {
210         {"count", required_argument, nullptr, 'c'},
211         {"runs", required_argument, nullptr, 'r'},
212         {"help", no_argument, nullptr, 'h'},
213         {"list", no_argument, nullptr, LongOpts::List},
214         {"wait-for-gpu", no_argument, nullptr, LongOpts::WaitForGpu},
215         {"report-frametime", optional_argument, nullptr, LongOpts::ReportFrametime},
216         {"cpuset", required_argument, nullptr, LongOpts::CpuSet},
217         {"benchmark_format", required_argument, nullptr, LongOpts::BenchmarkFormat},
218         {"benchmark_list_tests", optional_argument, nullptr, LongOpts::BenchmarkListTests},
219         {"benchmark_filter", required_argument, nullptr, LongOpts::BenchmarkFilter},
220         {"onscreen", no_argument, nullptr, LongOpts::Onscreen},
221         {"offscreen", no_argument, nullptr, LongOpts::Offscreen},
222         {"renderer", required_argument, nullptr, LongOpts::Renderer},
223         {"skip-leak-check", no_argument, nullptr, LongOpts::SkipLeakCheck},
224         {"report-gpu-memory", optional_argument, nullptr, LongOpts::ReportGpuMemory},
225         {0, 0, 0, 0}};
226 
227 static const char* SHORT_OPTIONS = "c:r:h";
228 
parseOptions(int argc,char * argv[])229 void parseOptions(int argc, char* argv[]) {
230     benchmark::BenchmarkReporter::Context::executable_name = (argc > 0) ? argv[0] : "unknown";
231 
232     int c;
233     bool error = false;
234     bool listTestsOnly = false;
235     bool testsAreFiltered = false;
236     opterr = 0;
237 
238     while (true) {
239         /* getopt_long stores the option index here. */
240         int option_index = 0;
241 
242         c = getopt_long(argc, argv, SHORT_OPTIONS, LONG_OPTIONS, &option_index);
243 
244         if (c == -1) break;
245 
246         switch (c) {
247             case 0:
248                 // Option set a flag, don't need to do anything
249                 // (although none of the current LONG_OPTIONS do this...)
250                 break;
251 
252             case LongOpts::List:
253                 listTests();
254                 exit(EXIT_SUCCESS);
255                 break;
256 
257             case 'c':
258                 gOpts.frameCount = atoi(optarg);
259                 if (!gOpts.frameCount) {
260                     fprintf(stderr, "Invalid frames argument '%s'\n", optarg);
261                     error = true;
262                 }
263                 break;
264 
265             case 'r':
266                 gOpts.repeatCount = atoi(optarg);
267                 if (!gOpts.repeatCount) {
268                     fprintf(stderr, "Invalid repeat argument '%s'\n", optarg);
269                     error = true;
270                 } else {
271                     gOpts.repeatCount = (gOpts.repeatCount > 0 ? gOpts.repeatCount : INT_MAX);
272                 }
273                 break;
274 
275             case LongOpts::ReportFrametime:
276                 if (optarg) {
277                     gOpts.reportFrametimeWeight = atoi(optarg);
278                     if (!gOpts.reportFrametimeWeight) {
279                         fprintf(stderr, "Invalid report frametime weight '%s'\n", optarg);
280                         error = true;
281                     }
282                 } else {
283                     gOpts.reportFrametimeWeight = 10;
284                 }
285                 break;
286 
287             case LongOpts::WaitForGpu:
288                 Properties::waitForGpuCompletion = true;
289                 break;
290 
291             case LongOpts::CpuSet:
292                 if (!optarg) {
293                     error = true;
294                     break;
295                 }
296                 moveToCpuSet(optarg);
297                 break;
298 
299             case LongOpts::BenchmarkFormat:
300                 if (!optarg) {
301                     error = true;
302                     break;
303                 }
304                 if (!setBenchmarkFormat(optarg)) {
305                     error = true;
306                 }
307                 break;
308 
309             case LongOpts::BenchmarkListTests:
310                 if (!optarg || ParseBool(optarg) == ParseBoolResult::kTrue) {
311                     listTestsOnly = true;
312                 }
313                 break;
314 
315             case LongOpts::BenchmarkFilter:
316                 if (!optarg) {
317                     error = true;
318                     break;
319                 }
320                 addTestsThatMatchFilter(optarg);
321                 testsAreFiltered = true;
322                 break;
323 
324             case LongOpts::Renderer:
325                 if (!optarg) {
326                     error = true;
327                     break;
328                 }
329                 if (!setRenderer(optarg)) {
330                     error = true;
331                 }
332                 break;
333 
334             case LongOpts::Onscreen:
335                 gOpts.renderOffscreen = false;
336                 break;
337 
338             case LongOpts::Offscreen:
339                 gOpts.renderOffscreen = true;
340                 break;
341 
342             case LongOpts::SkipLeakCheck:
343                 gRunLeakCheck = false;
344                 break;
345 
346             case LongOpts::ReportGpuMemory:
347                 gOpts.reportGpuMemoryUsage = true;
348                 if (optarg) {
349                     if (!strcmp("verbose", optarg)) {
350                         gOpts.reportGpuMemoryUsageVerbose = true;
351                     } else {
352                         fprintf(stderr, "Invalid report gpu memory option '%s'\n", optarg);
353                         error = true;
354                     }
355                 }
356                 break;
357 
358             case 'h':
359                 printHelp();
360                 exit(EXIT_SUCCESS);
361                 break;
362 
363             case '?':
364                 fprintf(stderr, "Unrecognized option '%s'\n", argv[optind - 1]);
365                 [[fallthrough]];
366             default:
367                 error = true;
368                 break;
369         }
370     }
371 
372     if (error) {
373         fprintf(stderr, "Try '%s --help' for more information.\n", argv[0]);
374         exit(EXIT_FAILURE);
375     }
376 
377     /* Print any remaining command line arguments (not options). */
378     if (optind < argc) {
379         do {
380             const char* test = argv[optind++];
381             if (strchr(test, '*')) {
382                 // Glob match
383                 for (auto& iter : TestScene::testMap()) {
384                     if (!fnmatch(test, iter.first.c_str(), 0)) {
385                         gRunTests.push_back(iter.second);
386                     }
387                 }
388             } else {
389                 auto pos = TestScene::testMap().find(test);
390                 if (pos == TestScene::testMap().end()) {
391                     fprintf(stderr, "Unknown test '%s'\n", test);
392                     exit(EXIT_FAILURE);
393                 } else {
394                     gRunTests.push_back(pos->second);
395                 }
396             }
397         } while (optind < argc);
398     } else if (gRunTests.empty() && !testsAreFiltered) {
399         for (auto& iter : TestScene::testMap()) {
400             gRunTests.push_back(iter.second);
401         }
402     }
403 
404     if (listTestsOnly) {
405         for (auto& iter : gRunTests) {
406             std::cout << iter.name << std::endl;
407         }
408         exit(EXIT_SUCCESS);
409     }
410 }
411 
main(int argc,char * argv[])412 int main(int argc, char* argv[]) {
413     Typeface::setRobotoTypefaceForTest();
414 
415     parseOptions(argc, argv);
416     if (!gBenchmarkReporter && gOpts.renderOffscreen) {
417         gBenchmarkReporter.reset(new benchmark::ConsoleReporter());
418     }
419 
420     if (gBenchmarkReporter) {
421         size_t name_field_width = 10;
422         for (auto&& test : gRunTests) {
423             name_field_width = std::max<size_t>(name_field_width, test.name.size());
424         }
425         // _50th, _90th, etc...
426         name_field_width += 5;
427 
428         benchmark::BenchmarkReporter::Context context;
429         context.name_field_width = name_field_width;
430         gBenchmarkReporter->ReportContext(context);
431     }
432 
433     for (auto&& test : gRunTests) {
434         run(test, gOpts, gBenchmarkReporter.get());
435     }
436 
437     if (gBenchmarkReporter) {
438         gBenchmarkReporter->Finalize();
439     }
440 
441     renderthread::RenderProxy::trimMemory(100);
442     HardwareBitmapUploader::terminate();
443 
444     if (gRunLeakCheck) {
445         LeakChecker::checkForLeaks();
446     }
447     return 0;
448 }
449