Home
last modified time | relevance | path

Searched refs:BenchmarkModel (Results 1 – 3 of 3) sorted by relevance

/test/mlts/benchmark/jni/
Drun_tflite.cpp65 BenchmarkModel* BenchmarkModel::create(const char* modelfile, bool use_nnapi, in create()
68 BenchmarkModel* model = new BenchmarkModel(); in create()
77 bool BenchmarkModel::init(const char* modelfile, bool use_nnapi, in init()
131 BenchmarkModel::BenchmarkModel() {} in BenchmarkModel() function in BenchmarkModel
132 BenchmarkModel::~BenchmarkModel() {} in ~BenchmarkModel()
134 bool BenchmarkModel::setInput(const uint8_t* dataPtr, size_t length) { in setInput()
152 void BenchmarkModel::saveInferenceOutput(InferenceResult* result, in saveInferenceOutput()
161 void BenchmarkModel::getOutputError(const uint8_t* expected_data, size_t length, in getOutputError()
202 bool BenchmarkModel::resizeInputTensors(std::vector<int> shape) { in resizeInputTensors()
214 bool BenchmarkModel::runInference() { in runInference()
[all …]
Drun_tflite.h64 class BenchmarkModel {
66 ~BenchmarkModel();
68 static BenchmarkModel* create(const char* modelfile, bool use_nnapi,
86 BenchmarkModel();
Dbenchmark_jni.cpp47 BenchmarkModel::create(modelFileName, _useNnApi, in Java_com_android_nn_benchmark_core_NNTestBase_initModel()
64 BenchmarkModel* model = (BenchmarkModel *) _modelHandle; in Java_com_android_nn_benchmark_core_NNTestBase_destroyModel()
76 BenchmarkModel* model = (BenchmarkModel *) _modelHandle; in Java_com_android_nn_benchmark_core_NNTestBase_resizeInputTensors()
274 BenchmarkModel* model = reinterpret_cast<BenchmarkModel*>(_modelHandle); in Java_com_android_nn_benchmark_core_NNTestBase_runBenchmark()
367 BenchmarkModel* model = reinterpret_cast<BenchmarkModel*>(_modelHandle); in Java_com_android_nn_benchmark_core_NNTestBase_dumpAllLayers()