Home
last modified time | relevance | path

Searched refs:kRelu6 (Results 1 – 25 of 33) sorted by relevance

12

/external/tensorflow/tensorflow/lite/toco/graph_transformations/
Dfuse_activation_functions.cc35 if (ac_op->type != OperatorType::kRelu6 && in Run()
92 if (ac_op->type == OperatorType::kRelu6) { in Run()
93 op->fused_activation_function = FusedActivationFunctionType::kRelu6; in Run()
Dremove_trivial_quantized_activation_func.cc47 case OperatorType::kRelu6: in IsTrivialUnfusedActivationFunc()
77 case FusedActivationFunctionType::kRelu6: in IsTrivialFusedActivationFunc()
Dpropagate_activation_function_into_constants.cc34 if (ac_op->type != OperatorType::kRelu6 && in Run()
104 case OperatorType::kRelu6: { in Run()
Dresolve_constant_unary.cc134 case OperatorType::kRelu6: in Run()
319 } else if (unary_op->type == OperatorType::kRelu6 || in Run()
337 case OperatorType::kRelu6: { in Run()
Dunfuse_activation_functions.cc45 case FusedActivationFunctionType::kRelu6: in Run()
Didentify_hardswish.cc44 FusedActivationFunctionType::kRelu6)) { in Run()
Dreorder_elementwise_unary.cc39 case OperatorType::kRelu6: in IsElementwiseOperator()
Dpropagate_fake_quant_num_bits.cc111 case OperatorType::kRelu6: in DoesOpBlockBackwardPropagation()
Dquantize.cc88 OperatorType::kRelu6, in SupportsQuantization()
414 op.type == OperatorType::kRelu6 || op.type == OperatorType::kPRelu || in ChooseQuantizationForOperatorOutput()
/external/tensorflow/tensorflow/lite/toco/tflite/
Dtypes.cc225 case FusedActivationFunctionType::kRelu6: in Serialize()
242 return FusedActivationFunctionType::kRelu6; in Deserialize()
Dop_version.cc146 {{OperatorType::kRelu6, 1}, "1.5.0"}, in GetMinimumRuntimeVersionForModel()
147 {{OperatorType::kRelu6, 2}, "1.14.0"}, in GetMinimumRuntimeVersionForModel()
Doperator_test.cc119 CheckSimpleOperator<Relu6Operator>("RELU6", OperatorType::kRelu6); in TEST_F()
163 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
231 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
349 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
371 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
387 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
400 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
409 op.fused_activation_function = FusedActivationFunctionType::kRelu6; in TEST_F()
Dtypes_test.cc211 {FusedActivationFunctionType::kRelu6, in TEST()
/external/tensorflow/tensorflow/core/kernels/neon/
Ddepthwiseconv_float.h559 Ac == FusedActivationFunctionType::kRelu6 ||
672 } else if (Ac == FusedActivationFunctionType::kRelu6) {
693 } else if (Ac == FusedActivationFunctionType::kRelu6) {
708 } else if (Ac == FusedActivationFunctionType::kRelu6) {
Dtypes.h23 enum class FusedActivationFunctionType { kNone, kRelu6, kRelu1, kRelu }; enumerator
/external/tensorflow/tensorflow/lite/kernels/internal/
Ddepthwiseconv_float_test.cc91 FusedActivationFunctionType::kRelu6})); in TryTestOneDepthwiseConv()
/external/tensorflow/tensorflow/stream_executor/
Ddnn.proto56 kRelu6 = 3; enumerator
Ddnn.cc103 case ActivationMode::kRelu6: in ActivationModeString()
/external/tensorflow/tensorflow/compiler/mlir/xla/
Dattribute_exporter.cc78 return stream_executor::dnn::kRelu6; in ConvertConvActivationMode()
/external/tensorflow/tensorflow/compiler/mlir/lite/transforms/
Doptimize.cc63 constexpr char kRelu6[] = "RELU6"; variable
965 FuseFullyConnectedAndReluX<TFL::Relu6Op, kRelu6>, in runOnFunction()
977 FuseFullyConnectedAndReluX<TFL::Relu6Op, kRelu6>, in runOnFunction()
/external/tensorflow/tensorflow/lite/kernels/internal/reference/
Dlegacy_reference_ops.h344 Ac == FusedActivationFunctionType::kRelu6 || in Conv()
555 Ac == FusedActivationFunctionType::kRelu6 || in FullyConnected()
1267 Ac == FusedActivationFunctionType::kRelu6 || in Add()
1323 Ac == FusedActivationFunctionType::kRelu6 || in BroadcastAdd()
1390 Ac == FusedActivationFunctionType::kRelu6 || in BroadcastAddFivefold()
1444 Ac == FusedActivationFunctionType::kRelu6 || in Add()
1614 Ac == FusedActivationFunctionType::kRelu6 || in AveragePool()
1705 Ac == FusedActivationFunctionType::kRelu6 || in MaxPool()
/external/tensorflow/tensorflow/lite/micro/kernels/ceva/
Dtypes.h108 kRelu6, enumerator
214 enum class FusedActivationFunctionType : uint8_t { kNone, kRelu6, kRelu1, kRelu };
/external/tensorflow/tensorflow/lite/toco/
Dmodel.h77 kRelu6, enumerator
784 Relu6Operator() : Operator(OperatorType::kRelu6) {}
Ddump_graphviz.cc364 case FusedActivationFunctionType::kRelu6: in GetOpAttributes()
/external/tensorflow/tensorflow/lite/kernels/internal/optimized/
Dlegacy_optimized_ops.h1782 Ac == FusedActivationFunctionType::kRelu6 || in FullyConnected()
2796 Ac == FusedActivationFunctionType::kRelu6 || in Conv()
2823 Ac == FusedActivationFunctionType::kRelu6 || in Conv()
2876 Ac == FusedActivationFunctionType::kRelu6 || in ConvAsGemm()
3519 Ac == FusedActivationFunctionType::kRelu6 || in Add()
3588 Ac == FusedActivationFunctionType::kRelu6 || in BroadcastAdd()
3627 Ac == FusedActivationFunctionType::kRelu6 || in BroadcastAddFivefold()
3682 Ac == FusedActivationFunctionType::kRelu6 || in Add()
3836 Ac == FusedActivationFunctionType::kRelu6 || in AveragePool()
3927 Ac == FusedActivationFunctionType::kRelu6 || in MaxPool()

12