Home
last modified time | relevance | path

Searched refs:inference_time_us (Results 1 – 12 of 12) sorted by relevance

/external/tensorflow/tensorflow/lite/tools/benchmark/android/jni/
Dbenchmark_model_jni.cc33 auto inference_us = results.inference_time_us(); in OnBenchmarkEnd()
43 << "Inference time us:" << results.inference_time_us(); in OnBenchmarkEnd()
/external/tensorflow/tensorflow/lite/tools/benchmark/
Dbenchmark_model.h47 tensorflow::Stat<int64_t> inference_time_us, in BenchmarkResults() argument
54 inference_time_us_(inference_time_us), in BenchmarkResults()
59 tensorflow::Stat<int64_t> inference_time_us() const { in inference_time_us() function
Dbenchmark_performance_options.h67 return i.metrics.inference_time_us().avg() < in operator()
68 j.metrics.inference_time_us().avg(); in operator()
Dbenchmark_model.cc50 auto inference_us = results.inference_time_us(); in OnBenchmarkEnd()
233 Stat<int64_t> inference_time_us = in Run() local
240 warmup_time_us, inference_time_us, init_mem_usage, in Run()
Dbenchmark_test.cc170 const auto current_avg_latency = result.metrics.inference_time_us().avg(); in OutputStats()
175 EXPECT_EQ(0, result.metrics.inference_time_us().count()); in OutputStats()
378 const int64_t num_actual_runs = results.inference_time_us().count(); in OnBenchmarkEnd()
Dbenchmark_performance_options.cc100 run_stats.metrics.inference_time_us().OutputToStream(&stream); in OutputStats()
/external/tensorflow/tensorflow/tools/benchmark/
Dbenchmark_model.h42 StatSummarizer* stats, int64* inference_time_us);
Dbenchmark_model.cc287 StatSummarizer* stats, int64* inference_time_us) { in RunBenchmark() argument
305 *inference_time_us = end_time - start_time; in RunBenchmark()
/external/tensorflow/tensorflow/lite/tools/benchmark/experimental/firebase/android/jni/
Dbenchmark_model_jni.cc105 << "inference: " << results.inference_time_us().avg(); in ReportResult()
152 auto inference_us = results.inference_time_us(); in WriteBenchmarkResultCsv()
/external/tensorflow/tensorflow/lite/tools/benchmark/experimental/c/
Dbenchmark_c_api.cc41 return ConvertStat(results->results->inference_time_us()); in TfLiteBenchmarkResultsGetInferenceTimeMicroseconds()
/external/tensorflow/tensorflow/lite/experimental/acceleration/configuration/
Dconfiguration_generated.h1730 std::vector<int64_t> inference_time_us; member
1752 const flatbuffers::Vector<int64_t> *inference_time_us() const { in inference_time_us() function
1769 verifier.VerifyVector(inference_time_us()) && in Verify()
1788 void add_inference_time_us(flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us) { in add_inference_time_us()
1789 fbb_.AddOffset(BenchmarkResult::VT_INFERENCE_TIME_US, inference_time_us); in add_inference_time_us()
1815 flatbuffers::Offset<flatbuffers::Vector<int64_t>> inference_time_us = 0,
1822 builder_.add_inference_time_us(inference_time_us);
1831 const std::vector<int64_t> *inference_time_us = nullptr,
1836 auto inference_time_us__ = inference_time_us ? _fbb.CreateVector<int64_t>(*inference_time_us) : 0;
2585inference_time_us(); if (_e) { _o->inference_time_us.resize(_e->size()); for (flatbuffers::uoffset… in UnPackTo()
[all …]
Dconfiguration.proto400 repeated int64 inference_time_us = 2 [packed = true]; field