1 /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15
16 #include <errno.h>
17 #include <jni.h>
18 #include <sys/stat.h>
19 #include <unistd.h>
20
21 #include <fstream>
22 #include <sstream>
23 #include <string>
24 #include <unordered_map>
25 #include <utility>
26 #include <vector>
27
28 #include "tensorflow/lite/tools/benchmark/benchmark_tflite_model.h"
29
30 #ifdef __ANDROID__
31 #include <android/log.h>
32 #endif
33
34 namespace tflite {
35 namespace benchmark {
36 namespace {
37
38 const char kOutputDir[] = "/sdcard/benchmark_output";
39
40 class FirebaseReportingListener : public BenchmarkListener {
41 public:
FirebaseReportingListener(std::string tag,int report_fd)42 explicit FirebaseReportingListener(std::string tag, int report_fd)
43 : tag_(tag), report_fd_(report_fd) {
44 if (report_fd < 0) {
45 #ifdef __ANDROID__
46 __android_log_print(
47 ANDROID_LOG_ERROR, "tflite",
48 "Report would be streamed only to local log not to Firebase "
49 "since the Firebase log file is not opened.");
50 #else
51 fprintf(stderr,
52 "Report would be streamed only to local log not to Firebase "
53 "since the Firebase log file is not opened.");
54 #endif
55 }
56 }
57
OnBenchmarkEnd(const BenchmarkResults & results)58 void OnBenchmarkEnd(const BenchmarkResults& results) override {
59 ReportResult(results);
60 }
61
ReportFailure(TfLiteStatus status)62 void ReportFailure(TfLiteStatus status) {
63 std::string status_msg =
64 status == kTfLiteError
65 ? "TFLite error"
66 : (status == kTfLiteDelegateError ? "TFLite delegate error"
67 : "Unknown error code");
68 Report(status_msg, std::vector<std::pair<std::string, std::string>>());
69 }
70
71 private:
Report(const std::string & status,const std::vector<std::pair<std::string,std::string>> & contents)72 void Report(
73 const std::string& status,
74 const std::vector<std::pair<std::string, std::string>>& contents) {
75 // The output format of Firebase Game Loop test is json.
76 // https://firebase.google.com/docs/test-lab/android/game-loop#output-example
77 std::stringstream report;
78 report << "{\n"
79 << " \"name\": \"TFLite benchmark\",\n"
80 << " \"benchmark config\": \"" << tag_ << "\",\n"
81 << " \"status\": \"" << status << "\"";
82 for (const auto& content : contents) {
83 report << ",\n"
84 << " \"" << content.first << "\": \"" << content.second << "\"";
85 }
86 report << "\n}\n";
87
88 auto report_str = report.str();
89 if (report_fd_ >= 0) {
90 write(report_fd_, report_str.c_str(), report_str.size());
91 }
92
93 #ifdef __ANDROID__
94 __android_log_print(ANDROID_LOG_ERROR, "tflite", "%s", report_str.c_str());
95 #else
96 fprintf(stderr, "%s", report_str.c_str());
97 #endif
98 }
99
ReportResult(const BenchmarkResults & results)100 void ReportResult(const BenchmarkResults& results) {
101 std::vector<std::pair<std::string, std::string>> contents;
102 std::stringstream avg_time;
103 avg_time << "init: " << results.startup_latency_us() << ", "
104 << "warmup: " << results.warmup_time_us().avg() << ", "
105 << "inference: " << results.inference_time_us().avg();
106 contents.emplace_back("average time in us", avg_time.str());
107 std::stringstream overall_mem_usage;
108 overall_mem_usage << results.overall_mem_usage();
109 contents.emplace_back("overall memory usage", overall_mem_usage.str());
110
111 Report("OK", contents);
112 }
113
114 std::string tag_;
115 int report_fd_;
116 };
117
118 class CsvExportingListener : public BenchmarkListener {
119 public:
CsvExportingListener(std::string tag)120 explicit CsvExportingListener(std::string tag) : tag_(tag) {}
121
OnBenchmarkEnd(const BenchmarkResults & results)122 void OnBenchmarkEnd(const BenchmarkResults& results) override {
123 if (!CreateOutputDir()) {
124 #ifdef __ANDROID__
125 __android_log_print(ANDROID_LOG_ERROR, "tflite",
126 "Failed to create output directory %s.", kOutputDir);
127 #else
128 fprintf(stderr, "Failed to create output directory %s.", kOutputDir);
129 #endif
130 return;
131 }
132 WriteBenchmarkResultCsv(results);
133 }
134
135 private:
CreateOutputDir()136 bool CreateOutputDir() {
137 struct stat st;
138 if (stat(kOutputDir, &st) != 0) {
139 if (mkdir(kOutputDir, 0777) != 0 && errno != EEXIST) {
140 return false;
141 }
142 } else if (!S_ISDIR(st.st_mode)) {
143 errno = ENOTDIR;
144 return false;
145 }
146 return true;
147 }
148
WriteBenchmarkResultCsv(const BenchmarkResults & results)149 void WriteBenchmarkResultCsv(const BenchmarkResults& results) {
150 auto init_us = results.startup_latency_us();
151 auto warmup_us = results.warmup_time_us();
152 auto inference_us = results.inference_time_us();
153 auto init_mem_usage = results.init_mem_usage();
154 auto overall_mem_usage = results.overall_mem_usage();
155
156 std::stringstream file_name;
157 file_name << kOutputDir << "/benchmark_result_" << tag_;
158
159 std::ofstream file;
160 file.open(file_name.str().c_str());
161 file << "config_key,model_size,init_time,"
162 << "warmup_avg,warmup_min,warmup_max,warmup_stddev,"
163 << "inference_avg,inference_min,inference_max,inference_stddev,"
164 << "init_max_rss,init_total_alloc,init_in_use_alloc,"
165 << "overall_max_rss,overall_total_alloc,overall_in_use_alloc\n";
166 file << tag_ << "," << results.model_size_mb() << "," << init_us << ","
167 << warmup_us.avg() << "," << warmup_us.min() << "," << warmup_us.max()
168 << "," << warmup_us.std_deviation() << "," << inference_us.avg() << ","
169 << inference_us.min() << "," << inference_us.max() << ","
170 << inference_us.std_deviation() << ","
171 << (init_mem_usage.max_rss_kb / 1024.0) << ","
172 << (init_mem_usage.total_allocated_bytes / 1024.0 / 1024.0) << ","
173 << (init_mem_usage.in_use_allocated_bytes / 1024.0 / 1024.0) << ","
174 << (overall_mem_usage.max_rss_kb / 1024.0) << ","
175 << (overall_mem_usage.total_allocated_bytes / 1024.0 / 1024.0) << ","
176 << (overall_mem_usage.in_use_allocated_bytes / 1024.0 / 1024.0)
177 << "\n";
178 file.close();
179 }
180
181 std::string tag_;
182 };
183
GetScenarioConfig(const string & library_dir,int scenario,std::vector<std::string> & args)184 std::string GetScenarioConfig(const string& library_dir, int scenario,
185 std::vector<std::string>& args) {
186 // The number of scenarios should equal to the value specified in
187 // AndroidManifest.xml file.
188 std::unordered_map<int, std::pair<std::string, std::vector<std::string>>>
189 all_scenarios = {
190 {1, {"cpu_1thread", {"--num_threads=1"}}},
191 {2, {"cpu_2threads", {"--num_threads=2"}}},
192 {3, {"cpu_4threads", {"--num_threads=4"}}},
193 {4, {"xnnpack_1thread", {"--use_xnnpack=true", "--num_threads=1"}}},
194 {5, {"xnnpack_2threads", {"--use_xnnpack=true", "--num_threads=2"}}},
195 {6, {"xnnpack_4threads", {"--use_xnnpack=true", "--num_threads=4"}}},
196 {7,
197 {"gpu_default",
198 {"--use_gpu=true", "--gpu_precision_loss_allowed=false"}}},
199 {8,
200 {"gpu_fp16",
201 {"--use_gpu=true", "--gpu_precision_loss_allowed=true"}}},
202 {9, {"dsp_hexagon", {"--use_hexagon=true"}}},
203 {10, {"nnapi", {"--use_nnapi=true"}}},
204 };
205
206 std::string tag;
207 args.emplace_back("(BenchmarkModelAndroid)");
208 args.emplace_back("--graph=/data/local/tmp/graph");
209
210 auto it = all_scenarios.find(scenario);
211 if (it != all_scenarios.end()) {
212 const auto& scenario_info = it->second;
213 tag = scenario_info.first;
214 for (const auto& arg : scenario_info.second) {
215 args.push_back(arg);
216 }
217 }
218 if (scenario == 9) {
219 std::stringstream hexagon_lib_path;
220 hexagon_lib_path << "--hexagon_lib_path=" << library_dir;
221 args.push_back(hexagon_lib_path.str());
222 }
223 return tag;
224 }
225
RunScenario(const string & library_dir,int scenario,int report_fd)226 void RunScenario(const string& library_dir, int scenario, int report_fd) {
227 std::vector<std::string> args;
228 std::string tag = GetScenarioConfig(library_dir, scenario, args);
229 std::vector<char*> argv;
230 argv.reserve(args.size());
231 for (auto& arg : args) {
232 argv.push_back(const_cast<char*>(arg.data()));
233 }
234
235 BenchmarkTfLiteModel benchmark;
236 FirebaseReportingListener firebaseReporting(tag, report_fd);
237 benchmark.AddListener(&firebaseReporting);
238 CsvExportingListener csvExporting(tag);
239 benchmark.AddListener(&csvExporting);
240 auto status = benchmark.Run(static_cast<int>(argv.size()), argv.data());
241 if (status != kTfLiteOk) {
242 firebaseReporting.ReportFailure(status);
243 }
244 }
245
246 } // namespace
247 } // namespace benchmark
248 } // namespace tflite
249
250 extern "C" {
251
252 JNIEXPORT void JNICALL
Java_org_tensorflow_lite_benchmark_firebase_BenchmarkModel_nativeRun(JNIEnv * env,jclass clazz,jstring library_dir,jint scenario,jint report_fd)253 Java_org_tensorflow_lite_benchmark_firebase_BenchmarkModel_nativeRun(
254 JNIEnv* env, jclass clazz, jstring library_dir, jint scenario,
255 jint report_fd) {
256 const char* lib_dir = env->GetStringUTFChars(library_dir, nullptr);
257
258 tflite::benchmark::RunScenario(lib_dir, static_cast<int>(scenario),
259 static_cast<int>(report_fd));
260
261 env->ReleaseStringUTFChars(library_dir, lib_dir);
262 }
263
264 } // extern "C"
265