Home
last modified time | relevance | path

Searched refs:attention_mask (Results 1 – 4 of 4) sorted by relevance

/external/tensorflow/tensorflow/python/keras/layers/
Dmulti_head_attention.py417 def _masked_softmax(self, attention_scores, attention_mask=None): argument
420 if attention_mask is not None:
424 for _ in range(len(attention_scores.shape) - len(attention_mask.shape)):
425 attention_mask = array_ops.expand_dims(
426 attention_mask, axis=mask_expansion_axes)
427 return self._softmax(attention_scores, attention_mask)
433 attention_mask=None, argument
464 attention_scores = self._masked_softmax(attention_scores, attention_mask)
480 attention_mask=None, argument
500 query, key, value, attention_mask, training)
Dmulti_head_attention_test.py93 output = test_layer(query=query, value=value, attention_mask=mask_tensor)
117 output = test_layer(query, value=value, key=key, attention_mask=mask_tensor)
154 output = test_layer(query=query, value=value, attention_mask=mask_tensor)
178 query=query, value=value, attention_mask=mask_tensor,
221 attention_mask=mask_tensor)
254 attention_mask=None, argument
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.keras.layers.-multi-head-attention.pbtxt164 …argspec: "args=[\'self\', \'query\', \'value\', \'key\', \'attention_mask\', \'return_attention_sc…
/external/tensorflow/tensorflow/tools/api/golden/v1/
Dtensorflow.keras.layers.-multi-head-attention.pbtxt164 …argspec: "args=[\'self\', \'query\', \'value\', \'key\', \'attention_mask\', \'return_attention_sc…