Home
last modified time | relevance | path

Searched refs:BenchmarkModel (Results 1 – 3 of 3) sorted by relevance

/test/mlts/benchmark/jni/
Drun_tflite.cpp65 BenchmarkModel* BenchmarkModel::create(const char* modelfile, bool use_nnapi, in create()
68 BenchmarkModel* model = new BenchmarkModel(); in create()
77 bool BenchmarkModel::init(const char* modelfile, bool use_nnapi, in init()
133 BenchmarkModel::BenchmarkModel() {} in BenchmarkModel() function in BenchmarkModel
134 BenchmarkModel::~BenchmarkModel() {} in ~BenchmarkModel()
136 bool BenchmarkModel::setInput(const uint8_t* dataPtr, size_t length) { in setInput()
154 void BenchmarkModel::saveInferenceOutput(InferenceResult* result, in saveInferenceOutput()
163 void BenchmarkModel::getOutputError(const uint8_t* expected_data, size_t length, in getOutputError()
204 bool BenchmarkModel::resizeInputTensors(std::vector<int> shape) { in resizeInputTensors()
216 bool BenchmarkModel::runInference() { in runInference()
[all …]
Drun_tflite.h64 class BenchmarkModel {
66 ~BenchmarkModel();
68 static BenchmarkModel* create(const char* modelfile, bool use_nnapi,
86 BenchmarkModel();
Dbenchmark_jni.cpp49 BenchmarkModel::create(modelFileName, _useNnApi, in Java_com_android_nn_benchmark_core_NNTestBase_initModel()
66 BenchmarkModel* model = (BenchmarkModel *) _modelHandle; in Java_com_android_nn_benchmark_core_NNTestBase_destroyModel()
78 BenchmarkModel* model = (BenchmarkModel *) _modelHandle; in Java_com_android_nn_benchmark_core_NNTestBase_resizeInputTensors()
276 BenchmarkModel* model = reinterpret_cast<BenchmarkModel*>(_modelHandle); in Java_com_android_nn_benchmark_core_NNTestBase_runBenchmark()
369 BenchmarkModel* model = reinterpret_cast<BenchmarkModel*>(_modelHandle); in Java_com_android_nn_benchmark_core_NNTestBase_dumpAllLayers()