/external/tensorflow/tensorflow/core/api_def/base_api/ |
D | api_def_ReluGrad.pbtxt | 7 The backpropagated gradients to the corresponding Relu operation. 13 The features passed as input to the corresponding Relu operation, OR 23 summary: "Computes rectified linear gradients for a Relu operation."
|
D | api_def_Relu.pbtxt | 2 graph_op_name: "Relu"
|
D | api_def_SparseMatMul.pbtxt | 13 in the input gradient when that gradient comes from a Relu.
|
/external/tensorflow/tensorflow/core/api_def/java_api/ |
D | api_def_Relu.pbtxt | 2 graph_op_name: "Relu" 4 name: "nn.Relu"
|
/external/tensorflow/tensorflow/core/kernels/ |
D | relu_op.cc | 82 void Relu<GPUDevice, T>::operator()( \ 85 extern template struct Relu<GPUDevice, T>; \ 147 void Relu<GPUDevice, qint8>::operator()( 150 extern template struct Relu<GPUDevice, qint8>; 203 functor::Relu<Device, qint8> func; in Operate()
|
D | relu_op_gpu.cu.cc | 124 struct Relu<Device, qint8> { struct 149 template struct functor::Relu<GPUDevice, T>; \ 162 template struct functor::Relu<GPUDevice, qint8>; member in tensorflow::functor
|
D | unary_ops_composition.cc | 176 auto relu = functor::Relu<Eigen::DefaultDevice, T>(); \ 252 REGISTER_COMPUTE_FN(Relu); in UnaryOpsCompositionSupport() 317 REGISTER_COMPUTE_FN(Relu); in UnaryOpsCompositionSupport() 381 REGISTER_COMPUTE_FN(Relu); in UnaryOpsCompositionSupport()
|
D | relu_op_functor.h | 28 struct Relu { struct
|
D | conv_ops_fused_impl.h | 128 struct Relu { struct 233 using WithBiasAddAndRelu = BiasAddOutputKernel<T, Relu>; 237 using WithFusedBatchNormAndRelu = FusedBatchNormOutputKernel<T, Relu>;
|
D | mkl_fused_ops_test.cc | 166 auto with_relu = ops::Relu(root.WithOpName("with_relu"), with_bias); in RunConv2DWithBiasAndRelu() 590 auto with_relu = ops::Relu(root.WithOpName("with_relu"), with_bias); in RunMklPadWithFusedConv2DAndBiasRelu()
|
D | conv_ops_test.cc | 643 ops::Relu with_relu = ops::Relu(root.WithOpName("with_relu"), with_bias); in RunConv2DWithBiasAndRelu() 701 ops::Relu with_relu = in RunConv2DWithBatchNormAndRelu() 702 ops::Relu(root.WithOpName("with_relu"), with_fused_batch_norm.y); in RunConv2DWithBatchNormAndRelu()
|
/external/tensorflow/tensorflow/core/graph/ |
D | quantize_training_test.cc | 92 Node* relu = test::graph::Relu(g, a); in TEST_F() 143 Node* relu = test::graph::Relu(g, a); in TEST_F() 192 Node* relu = test::graph::Relu(g, a); in TEST_F() 246 Node* relu = test::graph::Relu(g, a); in TEST_F() 294 Node* relu = test::graph::Relu(graph, const_a); in TEST_F() 331 Node* relu = test::graph::Relu(graph, const_a); in TEST_F() 373 Node* relu = test::graph::Relu(g, a); in TEST_F() 467 Node* relu = test::graph::Relu(g, a); in TEST_F()
|
D | testlib.h | 199 Node* Relu(Graph* g, Node* in);
|
/external/tensorflow/tensorflow/core/grappler/costs/graph_properties_testdata/ |
D | large_function_graph.pbtxt | 443 name: "InceptionV2/InceptionV2/Conv2d_1a_7x7/Relu" 470 name: "InceptionV2/InceptionV2/Conv2d_1a_7x7/Relu" 471 op: "Relu" 586 key: "InceptionV2/InceptionV2/Conv2d_1a_7x7/Relu" 587 value: "InceptionV2/InceptionV2/Conv2d_1a_7x7/Relu:activations:0"
|
/external/tensorflow/tensorflow/lite/g3doc/convert/ |
D | cmdline_examples.md | 160 …Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b… 180 …InceptionV1/Mixed_3b/Branch_1/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_… 201 …Relu,InceptionV1/InceptionV1/Mixed_3b/Branch_2/Conv2d_0a_1x1/Relu,InceptionV1/InceptionV1/Mixed_3b… 209 MatMul, BiasAdd, Relu...), it is typically represented as a single "fused" op
|
/external/tensorflow/tensorflow/contrib/receptive_field/ |
D | README.md | 85 g.as_graph_def(), 'input_image', 'InceptionResnetV2/Conv2d_7b_1x1/Relu') 90 the node `'InceptionResnetV2/Conv2d_7b_1x1/Relu'` is computed from a region 101 `'InceptionResnetV2/Conv2d_7b_1x1/Relu'` is centered in the original image in 171 --output_node InceptionResnetV2/Conv2d_7b_1x1/Relu
|
/external/tensorflow/tensorflow/core/api_def/python_api/ |
D | api_def_Relu.pbtxt | 2 graph_op_name: "Relu"
|
/external/tensorflow/tensorflow/tools/graph_transforms/ |
D | quantize_nodes_test.cc | 404 Output relu_op = Relu(root.WithOpName("relu_op"), constant_op); in TestQuantizeRelu() 767 Output relu_op = Relu(root.WithOpName("relu_op"), dequantize_op); in TestRemoveRedundantQuantizationWithMultipleOutputs() 791 Output relu_op = Relu(root.WithOpName("relu_op"), placeholder_op); in TestQuantizePlaceholders() 1093 Output relu_op = Relu(root.WithOpName("relu_op"), bias_add_op); in TestHoistFakeQuants() 1284 Output a_relu_op = Relu(root.WithOpName("a_relu_op"), a_op); in TestMergeDuplicatesNested() 1290 Output b_relu_op = Relu(root.WithOpName("b_relu_op"), b_op); in TestMergeDuplicatesNested() 1334 Output a_relu_op = Relu(root.WithOpName("a_relu_op"), a_op); in TestMergeDuplicatesInOut() 1340 Output b_relu_op = Relu(root.WithOpName("b_relu_op"), b_op); in TestMergeDuplicatesInOut() 1407 Relu(root.WithOpName("excluded_relu_op"), excluded_reshape_op); in TestExcludeNonFloat() 1413 Relu(root.WithOpName("included_relu_op"), included_reshape_op); in TestExcludeNonFloat()
|
/external/tensorflow/tensorflow/contrib/fused_conv/ops/ |
D | fused_conv2d_bias_activation_op.cc | 147 Must be "Relu" or "None".
|
/external/tensorflow/tensorflow/contrib/specs/ |
D | README.md | 15 - built-in layers are capitalized, not CamelCase (Relu, Fs, etc.) 17 - less common operations are longer (Relu, Conc, etc.) 50 - `Relu` = tf.nn.relu
|
/external/tensorflow/tensorflow/core/grappler/costs/ |
D | analytical_cost_estimator_test.cc | 70 auto relu = ops::Relu(s.WithOpName("relu"), bias); in CreateMiniGraph()
|
/external/tensorflow/tensorflow/cc/gradients/ |
D | nn_grad_test.cc | 43 using ops::Relu; 144 auto y = Relu(scope_, x); in TEST_F()
|
/external/tensorflow/tensorflow/contrib/specs/python/ |
D | specs_ops.py | 118 Relu = Fun(nn_ops.relu) variable
|
/external/tensorflow/tensorflow/core/profiler/g3doc/ |
D | profile_memory.md | 32 Relu 8462.80MB (30.83%, 7.03%)
|
/external/tensorflow/tensorflow/core/grappler/optimizers/ |
D | remapper_test.cc | 185 auto relu = ops::Relu(s.WithOpName("relu"), bias_add); in TEST_F() 333 auto relu = ops::Relu(s.WithOpName("relu"), batch_norm.y); in TEST_F()
|