Home
last modified time | relevance | path

Searched refs:global_batch_size (Results 1 – 8 of 8) sorted by relevance

/external/tensorflow/tensorflow/python/keras/engine/
Ddistributed_training_utils.py450 global_batch_size = min(num_samples, 32)
454 global_batch_size = batch_size
456 global_batch_size *= distribution_strategy.num_replicas_in_sync
458 steps = np.ceil(num_samples / global_batch_size).astype(int)
460 if num_samples % global_batch_size:
462 'batch size %s.' % (num_samples, global_batch_size))
463 steps = num_samples // global_batch_size
472 global_batch_size = num_samples // steps
476 global_batch_size = batch_size
478 global_batch_size *= distribution_strategy.num_replicas_in_sync
[all …]
/external/tensorflow/tensorflow/python/tpu/
Dtpu_context.py457 def global_batch_size(self): member in _InternalTPUContext
471 global_batch_size = self.global_batch_size
473 return global_batch_size
478 return global_batch_size // self.num_replicas
480 return global_batch_size // self.num_hosts
485 global_batch_size = self.global_batch_size
488 return global_batch_size
491 return global_batch_size // self.num_replicas
Dtpu_estimator.py2796 ctx.global_batch_size,
/external/tensorflow/tensorflow/contrib/distribute/python/
Dkeras_correctness_test_base.py133 def get_batch_size(global_batch_size, distribution): argument
134 batch_size = global_batch_size
161 global_batch_size = _GLOBAL_BATCH_SIZE
162 batch_size = get_batch_size(global_batch_size, with_distribution)
204 'steps_per_epoch': training_data_size // global_batch_size,
476 global_batch_size = 64
478 ds_batch_size = get_batch_size(global_batch_size, distribution)
479 nods_batch_size = get_batch_size(global_batch_size, None)
Dkeras_backward_compat_test.py216 global_batch_size = 64
217 batch_size = global_batch_size
261 'steps_per_epoch': len(x_train) // global_batch_size,
/external/tensorflow/tensorflow/tools/api/golden/v1/
Dtensorflow.distribute.-input-context.pbtxt23 argspec: "args=[\'self\', \'global_batch_size\'], varargs=None, keywords=None, defaults=None"
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.distribute.-input-context.pbtxt23 argspec: "args=[\'self\', \'global_batch_size\'], varargs=None, keywords=None, defaults=None"
/external/tensorflow/tensorflow/python/distribute/
Ddistribute_lib.py269 def get_per_replica_batch_size(self, global_batch_size): argument
283 if global_batch_size % self._num_replicas_in_sync != 0:
286 (global_batch_size, self._num_replicas_in_sync))
287 return global_batch_size // self._num_replicas_in_sync