1 // clang-format off
2 // Generated file (from: dequantize.mod.py). Do not edit
CreateModel(Model * model)3 void CreateModel(Model *model) {
4   OperandType type0(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 1}, 1.0f, 0);
5   OperandType type1(Type::TENSOR_FLOAT32, {1, 2, 2, 1});
6   // Phase 1, operands
7   auto op1 = model->addOperand(&type0);
8   auto op2 = model->addOperand(&type1);
9   // Phase 2, operations
10   model->addOperation(ANEURALNETWORKS_DEQUANTIZE, {op1}, {op2});
11   // Phase 3, inputs and outputs
12   model->identifyInputsAndOutputs(
13     {op1},
14     {op2});
15   assert(model->isValid());
16 }
17 
is_ignored(int i)18 inline bool is_ignored(int i) {
19   static std::set<int> ignore = {};
20   return ignore.find(i) != ignore.end();
21 }
22 
CreateModel_dynamic_output_shape(Model * model)23 void CreateModel_dynamic_output_shape(Model *model) {
24   OperandType type0(Type::TENSOR_QUANT8_ASYMM, {1, 2, 2, 1}, 1.0f, 0);
25   OperandType type2(Type::TENSOR_FLOAT32, {0, 0, 0, 0});
26   // Phase 1, operands
27   auto op1 = model->addOperand(&type0);
28   auto op2 = model->addOperand(&type2);
29   // Phase 2, operations
30   model->addOperation(ANEURALNETWORKS_DEQUANTIZE, {op1}, {op2});
31   // Phase 3, inputs and outputs
32   model->identifyInputsAndOutputs(
33     {op1},
34     {op2});
35   assert(model->isValid());
36 }
37 
is_ignored_dynamic_output_shape(int i)38 inline bool is_ignored_dynamic_output_shape(int i) {
39   static std::set<int> ignore = {};
40   return ignore.find(i) != ignore.end();
41 }
42 
43