Home
last modified time | relevance | path

Searched refs:kRelu1 (Results 1 – 25 of 26) sorted by relevance

12

/external/tensorflow/tensorflow/lite/toco/graph_transformations/
Dfuse_activation_functions.cc36 ac_op->type != OperatorType::kRelu1 && in Run()
94 } else if (ac_op->type == OperatorType::kRelu1) { in Run()
95 op->fused_activation_function = FusedActivationFunctionType::kRelu1; in Run()
Dremove_trivial_quantized_activation_func.cc43 case OperatorType::kRelu1: in IsTrivialUnfusedActivationFunc()
73 case FusedActivationFunctionType::kRelu1: in IsTrivialFusedActivationFunc()
Dpropagate_activation_function_into_constants.cc35 ac_op->type != OperatorType::kRelu1 && in Run()
98 case OperatorType::kRelu1: { in Run()
Dresolve_constant_unary.cc135 case OperatorType::kRelu1: in Run()
320 unary_op->type == OperatorType::kRelu1 || in Run()
331 case OperatorType::kRelu1: { in Run()
Dunfuse_activation_functions.cc48 case FusedActivationFunctionType::kRelu1: in Run()
Dreorder_elementwise_unary.cc38 case OperatorType::kRelu1: in IsElementwiseOperator()
Dpropagate_fake_quant_num_bits.cc110 case OperatorType::kRelu1: in DoesOpBlockBackwardPropagation()
Dquantize.cc87 OperatorType::kRelu1, in SupportsQuantization()
413 op.type == OperatorType::kRelu || op.type == OperatorType::kRelu1 || in ChooseQuantizationForOperatorOutput()
Dpropagate_fixed_sizes.cc2167 case OperatorType::kRelu1: in Run()
/external/tensorflow/tensorflow/lite/toco/tflite/
Dtypes.cc227 case FusedActivationFunctionType::kRelu1: in Serialize()
244 return FusedActivationFunctionType::kRelu1; in Deserialize()
Dtypes_test.cc213 {FusedActivationFunctionType::kRelu1, in TEST()
Dop_version.cc238 {{OperatorType::kRelu1, 1}, "1.5.0"}, in GetMinimumRuntimeVersionForModel()
Doperator_test.cc118 CheckSimpleOperator<Relu1Operator>("RELU_N1_TO_1", OperatorType::kRelu1); in TEST_F()
Doperator.cc2005 ::tflite::BuiltinOperator_RELU_N1_TO_1, OperatorType::kRelu1)); in BuildOperatorList()
/external/tensorflow/tensorflow/core/kernels/neon/
Ddepthwiseconv_float.h560 Ac == FusedActivationFunctionType::kRelu1,
677 } else if (Ac == FusedActivationFunctionType::kRelu1) {
695 } else if (Ac == FusedActivationFunctionType::kRelu1) {
710 } else if (Ac == FusedActivationFunctionType::kRelu1) {
Dtypes.h23 enum class FusedActivationFunctionType { kNone, kRelu6, kRelu1, kRelu }; enumerator
/external/tensorflow/tensorflow/lite/kernels/internal/
Ddepthwiseconv_float_test.cc90 FusedActivationFunctionType::kRelu1, in TryTestOneDepthwiseConv()
Dtypes.h30 kRelu1, enumerator
Dcommon.h47 case FusedActivationFunctionType::kRelu1: in GetActivationMinMax()
/external/tensorflow/tensorflow/compiler/mlir/lite/transforms/
Doptimize.cc64 constexpr char kRelu1[] = "RELU_N1_TO_1"; variable
966 FuseFullyConnectedAndReluX<TFL::Relu1Op, kRelu1>, in runOnFunction()
978 FuseFullyConnectedAndReluX<TFL::Relu1Op, kRelu1>, in runOnFunction()
/external/tensorflow/tensorflow/lite/kernels/internal/reference/
Dlegacy_reference_ops.h345 Ac == FusedActivationFunctionType::kRelu1, in Conv()
556 Ac == FusedActivationFunctionType::kRelu1, in FullyConnected()
1268 Ac == FusedActivationFunctionType::kRelu1, in Add()
1324 Ac == FusedActivationFunctionType::kRelu1, in BroadcastAdd()
1391 Ac == FusedActivationFunctionType::kRelu1, in BroadcastAddFivefold()
1445 Ac == FusedActivationFunctionType::kRelu1, in Add()
1615 Ac == FusedActivationFunctionType::kRelu1, in AveragePool()
1706 Ac == FusedActivationFunctionType::kRelu1, in MaxPool()
/external/tensorflow/tensorflow/lite/micro/kernels/ceva/
Dtypes.h109 kRelu1, enumerator
214 enum class FusedActivationFunctionType : uint8_t { kNone, kRelu6, kRelu1, kRelu };
/external/tensorflow/tensorflow/lite/toco/
Dmodel.h76 kRelu1, enumerator
773 Relu1Operator() : Operator(OperatorType::kRelu1) {}
Ddump_graphviz.cc367 case FusedActivationFunctionType::kRelu1: in GetOpAttributes()
/external/tensorflow/tensorflow/lite/kernels/internal/optimized/
Dlegacy_optimized_ops.h1783 Ac == FusedActivationFunctionType::kRelu1, in FullyConnected()
2797 Ac == FusedActivationFunctionType::kRelu1, in Conv()
2824 Ac == FusedActivationFunctionType::kRelu1, in Conv()
2877 Ac == FusedActivationFunctionType::kRelu1, in ConvAsGemm()
3520 Ac == FusedActivationFunctionType::kRelu1, in Add()
3589 Ac == FusedActivationFunctionType::kRelu1, in BroadcastAdd()
3628 Ac == FusedActivationFunctionType::kRelu1, in BroadcastAddFivefold()
3683 Ac == FusedActivationFunctionType::kRelu1, in Add()
3837 Ac == FusedActivationFunctionType::kRelu1, in AveragePool()
3928 Ac == FusedActivationFunctionType::kRelu1, in MaxPool()

12