Home
last modified time | relevance | path

Searched refs:all_reduce_alg (Results 1 – 2 of 2) sorted by relevance

/external/tensorflow/tensorflow/python/keras/benchmarks/
Ddistribution_util.py32 def _collective_communication(all_reduce_alg): argument
50 if all_reduce_alg not in collective_communication_options:
54 all_reduce_alg))
55 return collective_communication_options[all_reduce_alg]
58 def _mirrored_cross_device_ops(all_reduce_alg, num_packs): argument
71 if all_reduce_alg is None:
77 if all_reduce_alg not in mirrored_all_reduce_options:
81 all_reduce_alg))
82 cross_device_ops_class = mirrored_all_reduce_options[all_reduce_alg]
88 all_reduce_alg=None, argument
[all …]
/external/tensorflow/tensorflow/python/distribute/
Dcross_device_ops.py822 def __init__(self, all_reduce_alg="nccl", num_packs=1): argument
831 self._all_reduce_alg = all_reduce_alg
969 all_reduce_alg="nccl", num_packs=num_packs)
1011 all_reduce_alg="hierarchical_copy",