Home
last modified time | relevance | path

Searched refs:num_heads (Results 1 – 5 of 5) sorted by relevance

/external/tensorflow/tensorflow/python/keras/layers/
Dmulti_head_attention_test.py43 num_heads=12,
56 num_heads=12, key_dim=64)
65 num_heads=12, key_dim=64)
75 num_heads=12, key_dim=64)
87 num_heads=2, key_dim=2, use_bias=use_bias)
137 num_heads=12,
148 num_heads=2, key_dim=2)
201 num_heads=2, key_dim=2, attention_axes=attention_axes)
230 num_heads=2, key_dim=2, dropout=0.5)
264 test_layer = SubclassAttention(num_heads=12, key_dim=64)
[all …]
Dmulti_head_attention.py218 num_heads, argument
234 self._num_heads = num_heads
/external/tensorflow/tensorflow/python/keras/benchmarks/keras_examples_benchmarks/
Dtext_classification_transformer_benchmark_test.py42 num_heads = 2
48 transformer_block = TransformerBlock(embed_dim, num_heads, ff_dim)
145 def __init__(self, embed_dim, num_heads=8): argument
148 self.num_heads = num_heads
149 if embed_dim % num_heads != 0:
152 self.projection_dim = embed_dim // num_heads
167 x = tf.reshape(x, (batch_size, -1, self.num_heads, self.projection_dim))
197 def __init__(self, embed_dim, num_heads, ff_dim, rate=0.1): argument
199 self.att = MultiHeadSelfAttention(embed_dim, num_heads)
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.keras.layers.-multi-head-attention.pbtxt132 …argspec: "args=[\'self\', \'num_heads\', \'key_dim\', \'value_dim\', \'dropout\', \'use_bias\', \'…
/external/tensorflow/tensorflow/tools/api/golden/v1/
Dtensorflow.keras.layers.-multi-head-attention.pbtxt132 …argspec: "args=[\'self\', \'num_heads\', \'key_dim\', \'value_dim\', \'dropout\', \'use_bias\', \'…