Home
last modified time | relevance | path

Searched refs:softmax (Results 1 – 25 of 148) sorted by relevance

123456

/external/tensorflow/tensorflow/core/kernels/
Dsoftmax_op_functor.h35 typename TTypes<T>::Matrix softmax, const bool log);
45 typename TTypes<T>::Matrix softmax, const bool log) { in Compute()
73 softmax.device(d) = shifted_logits; in Compute()
75 softmax.device(d) = (softmax - softmax.exp() in Compute()
86 softmax.device(d) = shifted_logits.exp(); in Compute()
88 softmax.device(d) = (softmax * softmax.sum(along_class) in Compute()
Dsoftmax_op.cc39 typename TTypes<T>::Matrix softmax, const bool log) { in operator ()()
40 SoftmaxEigenImpl<Device, T>::Compute(d, logits, softmax, log); in operator ()()
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.keras.applications.pbtxt77 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
81 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
85 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
89 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
93 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
97 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
101 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
105 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
109 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
113 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
[all …]
Dtensorflow.keras.applications.efficientnet.pbtxt5 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
9 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
13 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
17 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
21 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
25 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
29 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
33 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
Dtensorflow.keras.applications.resnet_v2.pbtxt5 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
9 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
13 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
/external/tensorflow/tensorflow/tools/api/golden/v1/
Dtensorflow.keras.applications.pbtxt77 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
81 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
85 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
89 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
93 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
97 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
101 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
105 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
109 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
113 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
[all …]
Dtensorflow.keras.applications.efficientnet.pbtxt5 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
9 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
13 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
17 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
21 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
25 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
29 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
33 …s=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
Dtensorflow.keras.applications.resnet_v2.pbtxt5 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
9 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
13 …rds=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
/external/libtextclassifier/native/lang_id/common/math/
Dsoftmax.cc75 std::vector<float> softmax; in ComputeSoftmax() local
76 softmax.reserve(scores.size()); in ComputeSoftmax()
78 return softmax; in ComputeSoftmax()
97 softmax.push_back(exp_scores[i] / denominator); in ComputeSoftmax()
99 return softmax; in ComputeSoftmax()
/external/tensorflow/tensorflow/core/kernels/sparse/
Dkernels_gpu.cu.cc348 T* softmax) { in CalculateRowSoftmax() argument
361 softmax[r_i] = exp_i; in CalculateRowSoftmax()
365 softmax[r_i] = softmax[r_i] / sum_exp; in CalculateRowSoftmax()
372 const T* logits, T* softmax) { in CSRSparseMatrixSoftmaxKernel2D() argument
379 softmax); in CSRSparseMatrixSoftmaxKernel2D()
397 const int* row_ptr, const T* logits, T* softmax) { in CSRSparseMatrixSoftmaxKernel3D() argument
414 softmax); in CSRSparseMatrixSoftmaxKernel3D()
481 const T* softmax, const int grad_softmax_begin, const int grad_softmax_end, in CalculateRowSoftmaxGrad() argument
501 sum_prod += ldg(softmax + i) * ldg(grad_softmax + j); in CalculateRowSoftmaxGrad()
528 gradient[i] = (ldg(grad_softmax + j) - sum_prod) * ldg(softmax + i); in CalculateRowSoftmaxGrad()
[all …]
Dsoftmax_op.cc76 functor::CSRSparseMatrixSoftmax<Device, T> softmax; in Compute() local
78 ctx, softmax(ctx, *logits_matrix, output_matrix.values().vec<T>())); in Compute()
212 OpKernelContext* ctx, const CSRSparseMatrix& softmax, \
/external/libtextclassifier/native/utils/math/
Dsoftmax.cc77 std::vector<float> softmax; in ComputeSoftmax() local
80 softmax.reserve(scores_size); in ComputeSoftmax()
99 softmax.push_back(exp_scores[i] / denominator); in ComputeSoftmax()
101 return softmax; in ComputeSoftmax()
/external/tensorflow/tensorflow/python/kernel_tests/
Dsoftmax_op_test.py50 softmax = e / np.reshape(np.sum(e, axis=dim), one_only_on_dim)
52 res = np.log(softmax)
54 res = softmax
77 tf_softmax = nn_ops.softmax(np_features, axis=dim, name=name)
240 op = nn_ops.softmax([[[1., 1., 1., 1.], [1., 2., 3., 4.]],
249 self.assertAllEqual(y, self.evaluate(nn_ops.softmax(x, axis=0)))
257 nn_ops.softmax([1., 2., 3., 4.], axis=dim).eval()
264 nn_ops.softmax(ones, axis=2).eval()
276 y = nn_ops.softmax(x)
/external/tensorflow/tensorflow/core/api_def/base_api/
Dapi_def_Softmax.pbtxt10 name: "softmax"
15 summary: "Computes softmax activations."
19 $$softmax[i, j] = exp(logits[i, j]) / sum_j(exp(logits[i, j]))$$
Dapi_def_SparseMatrixSoftmax.pbtxt9 name: "softmax"
12 summary: "Calculates the softmax of a CSRSparseMatrix."
14 Calculate the softmax of the innermost dimensions of a SparseMatrix.
Dapi_def_SparseMatrixSoftmaxGrad.pbtxt5 name: "softmax"
10 description: "The gradient of `softmax`."
Dapi_def_SparseSoftmax.pbtxt28 summary: "Applies softmax to a batched N-D `SparseTensor`."
33 This op is equivalent to applying the normal `tf.nn.softmax()` to each innermost
38 (1) Applies `tf.nn.softmax()` to a densified view of each innermost submatrix
Dapi_def_StopGradient.pbtxt15 to pretend that the value was a constant. For example, the softmax function
20 def softmax(x):
39 However, when we backprop through the softmax to x, we dont want to backprop
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/
Dpost-quantize.mlir39 …%5 = "tfl.softmax"(%4) {beta = 1.000000e+00 : f32} : (tensor<1x401408x!quant.uniform<u8:f32, 0.023…
58 // CHECK-NEXT: %[[softmax:.*]] = "tfl.softmax"(%[[reshape]]) {beta = 1.000000e+00 : f32} : (tensor…
59 // CHECK-NEXT: return %[[softmax]] : tensor<1x401408x!quant.uniform<u8:f32, 3.906250e-03>>
71 // CHECK-NEXT: %[[softmax:.*]] = "tfl.softmax"(%arg0) {beta = 1.000000e+00 : f32} : (tensor<128x16…
72 %0 = "tfl.softmax"(%arg0) {beta = 1.000000e+00 : f32} : (tensor<128x16xf32>) -> tensor<128x16xf32>
75 // CHECK-NEXT: %[[argmax:.*]] = "tfl.arg_max"(%[[softmax]], %[[cst]]) : (tensor<128x16xf32>, tenso…
77 // CHECK-NEXT: return %[[softmax]], %[[argmax]] : tensor<128x16xf32>, tensor<128xi32>
/external/tensorflow/tensorflow/lite/toco/tflite/
Dop_version_test.cc46 std::unique_ptr<SoftmaxOperator> softmax(new SoftmaxOperator()); in TEST() local
49 softmax->inputs.push_back(softmax_input); in TEST()
50 softmax->outputs.push_back(softmax_output); in TEST()
54 model.operators.push_back(std::move(softmax)); in TEST()
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
DSoftmax.pbtxt8 name: "softmax"
30 name: "softmax"
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/
DSoftmax.pbtxt8 name: "softmax"
30 name: "softmax"
/external/tensorflow/tensorflow/core/ops/
Dsparse_csr_matrix_ops.cc532 ShapeHandle softmax = sparse_matrix_shape_and_type.shape; in __anon5631e9dc0e02() local
533 TF_RETURN_IF_ERROR(c->WithRankAtLeast(softmax, 2, &softmax)); in __anon5631e9dc0e02()
534 TF_RETURN_IF_ERROR(c->WithRankAtMost(softmax, 3, &softmax)); in __anon5631e9dc0e02()
535 if (!c->RankKnown(softmax)) { in __anon5631e9dc0e02()
545 TF_RETURN_IF_ERROR(c->Merge(softmax, grad_softmax, &softmax)); in __anon5631e9dc0e02()
547 0, {ShapeAndType{softmax, sparse_matrix_shape_and_type.dtype}}); in __anon5631e9dc0e02()
/external/tensorflow/tensorflow/python/ops/
Dnn_grad.py304 softmax = op.outputs[0]
305 sum_channels = math_ops.reduce_sum(grad_softmax * softmax, -1, keepdims=True)
306 return (grad_softmax - sum_channels) * softmax
323 softmax = math_ops.exp(op.outputs[0])
324 return grad - math_ops.reduce_sum(grad, -1, keepdims=True) * softmax
532 softmax = nn_ops.softmax(logits)
537 array_ops.expand_dims(softmax, 2)),
538 axis=1)) * softmax)
558 softmax = nn_ops.softmax(logits)
563 array_ops.expand_dims(softmax, 2)),
[all …]
/external/libtextclassifier/native/lang_id/common/flatbuffers/
Dembedding-network.fbs84 // hidden layer or the final (output / softmax) layer.
93 // is generally used for softmax classification. That's why we say that the
94 // last layer is the "softmax layer".
113 // Hidden layers, followed by the final (softmax) layer.

123456