/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | make_batched_features_dataset_test.py | 45 for num_epochs in [1, 10]: 51 num_epochs=num_epochs, 54 batch_size, 0, num_epochs=num_epochs, label_key_provided=True) 63 num_epochs=num_epochs, 66 batch_size, 1, num_epochs=num_epochs, label_key_provided=True) 75 num_epochs=num_epochs, 78 batch_size, num_epochs=num_epochs, label_key_provided=True) 85 num_epochs=num_epochs, 87 self.verify_records(batch_size, num_epochs=num_epochs) 112 num_epochs = 5 [all …]
|
D | make_tf_record_dataset_test.py | 37 def _read_test(self, batch_size, num_epochs, file_index=None, argument 52 num_epochs=num_epochs, 62 num_epochs=num_epochs, 72 for num_epochs in [1, 3]: 74 self._read_test(batch_size, num_epochs, 0) 77 self._read_test(batch_size, num_epochs, 1) 80 self._read_test(batch_size, num_epochs) 83 self._read_test(batch_size, num_epochs, num_parallel_reads=8) 88 for num_epochs in [1, 3]: 90 self._read_test(batch_size, num_epochs, 0, drop_final_batch=True) [all …]
|
D | make_csv_dataset_test.py | 40 def _make_csv_dataset(self, filenames, batch_size, num_epochs=1, **kwargs): argument 42 filenames, batch_size=batch_size, num_epochs=num_epochs, **kwargs) 65 num_epochs): argument 67 for _ in range(num_epochs): 81 num_epochs, argument 92 num_epochs, 113 num_epochs=1, argument 123 num_epochs=num_epochs, 126 self._verify_output(dataset, batch_size, num_epochs, label_name, 155 num_epochs=1, [all …]
|
D | reader_dataset_ops_test_base.py | 74 num_epochs, argument 83 self.num_epochs = num_epochs 97 num_epochs=self.num_epochs, 195 num_epochs, argument 214 for _ in range(num_epochs): 252 num_epochs=1, argument 263 num_epochs, 336 def _next_expected_batch(self, file_indices, batch_size, num_epochs, argument 350 for _ in range(num_epochs): 368 def _verify_records(self, outputs, batch_size, file_index, num_epochs, argument [all …]
|
D | shuffle_and_repeat_test.py | 123 num_epochs = 1000 * 1000 128 buffer_size=5 * num_epochs, count=num_epochs)) 141 num_epochs = 2 145 buffer_size=num_elements, count=num_epochs))
|
D | stats_dataset_ops_test.py | 406 num_epochs = 5 407 total_records = num_epochs * self._num_records 413 num_epochs=num_epochs, 456 self._sum_keywords(1) * num_epochs + 3 * total_records)
|
D | auto_shard_dataset_test.py | 251 num_epochs, index, batch_size, parallel_reads = params 254 num_epochs=num_epochs, 266 num_epochs=num_epochs, 558 num_epochs=1)
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/serialization/ |
D | tf_record_dataset_serialization_test.py | 40 num_epochs, argument 68 buffer_size=buffer_size).repeat(num_epochs).batch(batch_size) 72 num_epochs = 5 73 batch_size = num_epochs 74 num_outputs = num_epochs * self._num_files * self._num_records // batch_size 77 lambda: self._build_iterator_graph(num_epochs, batch_size, 81 lambda: self._build_iterator_graph(num_epochs, buffer_size=0), 87 num_epochs = 5 88 num_outputs = num_epochs * self._num_files * self._num_records 89 self.run_core_tests(lambda: self._build_iterator_graph(num_epochs), [all …]
|
D | fixed_length_record_dataset_serialization_test.py | 35 def _build_iterator_graph(self, num_epochs, compression_type=None): argument 39 self._footer_bytes).repeat(num_epochs) 43 num_epochs = 5 44 num_outputs = num_epochs * self._num_files * self._num_records 45 self.run_core_tests(lambda: self._build_iterator_graph(num_epochs),
|
D | parse_example_dataset_serialization_test.py | 37 num_epochs=num_repeat,
|
/external/tensorflow/tensorflow/python/data/benchmarks/ |
D | from_tensor_slices_benchmark.py | 67 num_epochs = 100 68 num_elements = input_size * num_epochs // batch_size 74 num_epochs).batch(batch_size)) 84 num_epochs = 100 86 num_elements = num_epochs * reshape_dim[0] 92 input_data.reshape(*reshape_dim)).repeat(num_epochs)) 134 num_epochs = 100 135 num_elements = input_size * num_epochs // batch_size 141 batch_size).cache().repeat(num_epochs))
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | readers.py | 214 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed): argument 218 if num_epochs != 1: 219 dataset = dataset.repeat(num_epochs) 226 num_epochs=None, argument 300 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed) 306 drop_final_batch = drop_final_batch or num_epochs is None 333 num_epochs=None, argument 593 dataset, num_epochs, shuffle, shuffle_buffer_size, shuffle_seed) 602 drop_remainder=num_epochs is None) 622 num_epochs=None, argument [all …]
|
/external/tensorflow/tensorflow/python/training/ |
D | input.py | 85 def limit_epochs(tensor, num_epochs=None, name=None): argument 103 if num_epochs is None: 105 if num_epochs <= 0: 106 raise ValueError("num_epochs must be > 0 not %d." % num_epochs) 112 counter = epochs.count_up_to(num_epochs) 125 num_epochs=None, argument 189 input_tensor = limit_epochs(input_tensor, num_epochs) 212 num_epochs=None, argument 270 num_epochs=num_epochs, 285 def range_input_producer(limit, num_epochs=None, shuffle=True, seed=None, argument [all …]
|
D | input_test.py | 86 love_me_two_times = inp.limit_epochs(love_me, num_epochs=2) 102 num_epochs = 2 104 input_tensor, num_epochs=num_epochs, shuffle=False) 105 dequeue_many = queue.dequeue_many(len(input_tensor) * num_epochs) 112 self.assertAllEqual(input_tensor * num_epochs, 128 num_epochs = 2 130 input_tensor, element_shape=[4], num_epochs=num_epochs, shuffle=False) 131 dequeue_many = queue.dequeue_many(len(input_value) * num_epochs) 138 self.assertAllEqual(input_value * num_epochs, self.evaluate(dequeue_many)) 158 num_epochs = 3 [all …]
|
/external/tensorflow/tensorflow/tools/api/golden/v1/ |
D | tensorflow.estimator.inputs.pbtxt | 5 …argspec: "args=[\'x\', \'y\', \'batch_size\', \'num_epochs\', \'shuffle\', \'queue_capacity\', \'n… 9 …argspec: "args=[\'x\', \'y\', \'batch_size\', \'num_epochs\', \'shuffle\', \'queue_capacity\', \'n…
|
D | tensorflow.train.pbtxt | 349 …argspec: "args=[\'input_tensor\', \'element_shape\', \'num_epochs\', \'shuffle\', \'seed\', \'capa… 361 …argspec: "args=[\'tensor\', \'num_epochs\', \'name\'], varargs=None, keywords=None, defaults=[\'No… 421 …argspec: "args=[\'limit\', \'num_epochs\', \'shuffle\', \'seed\', \'capacity\', \'shared_name\', \… 453 …argspec: "args=[\'tensor_list\', \'num_epochs\', \'shuffle\', \'seed\', \'capacity\', \'shared_nam… 461 …argspec: "args=[\'string_tensor\', \'num_epochs\', \'shuffle\', \'seed\', \'capacity\', \'shared_n…
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | checkpoint_test.py | 213 def _build_graph(start, stop, num_epochs): argument 215 dataset_ops.Dataset.range(start, stop).repeat(num_epochs)) 224 num_epochs = 5 229 start, stop, num_epochs) 246 init_op, get_next, _, restore_op = _build_graph(start, stop, num_epochs) 252 for _ in range(break_epoch, num_epochs): 261 def _build_graph(start, stop, num_epochs): argument 263 dataset_ops.Dataset.range(start, stop).repeat(num_epochs)) 272 num_epochs = 5 275 start, stop, num_epochs) [all …]
|
D | tf_record_dataset_test.py | 48 num_epochs=1, argument 52 filenames, compression_type).repeat(num_epochs) 107 dataset = self._dataset_factory(self.test_filenames, num_epochs=10) 117 self.test_filenames, num_epochs=10, batch_size=self._num_records)
|
D | text_line_dataset_test.py | 88 def dataset_fn(filenames, num_epochs, batch_size=None): argument 90 filenames, compression_type=compression_type).repeat(num_epochs)
|
/external/tensorflow/tensorflow/python/keras/distribute/ |
D | mirrored_strategy_test.py | 130 num_epochs = 4 132 for _ in range(num_epochs): 138 self.assertEqual(optimizer.iterations.numpy(), num_epochs * num_steps)
|
/external/tensorflow/tensorflow/python/compiler/tensorrt/test/ |
D | quantization_mnist_test.py | 165 def _Run(self, is_training, use_trt, batch_size, num_epochs, model_dir): argument 205 dataset = dataset.repeat(count=num_epochs) 271 num_epochs=None, 283 num_epochs=None,
|
/external/tensorflow/tensorflow/python/keras/integration_test/ |
D | tpu_strategy_test.py | 186 num_epochs = 4 188 for _ in range(num_epochs): 194 self.assertEqual(optimizer.iterations.numpy(), num_epochs * num_steps)
|
/external/tensorflow/tensorflow/python/tpu/ |
D | datasets.py | 58 num_epochs: Optional[int] = None, 154 source_dataset = source_dataset.repeat(num_epochs)
|
/external/tensorflow/tensorflow/python/distribute/ |
D | input_ops_test.py | 166 num_epochs = 5 173 dataset = dataset.repeat(num_epochs) 185 num_iterations = (self._num_files * self._num_records * num_epochs) // ( 196 expected *= num_epochs
|
/external/tensorflow/tensorflow/python/keras/layers/ |
D | local_test.py | 287 num_epochs = 2 341 epochs=num_epochs, 347 epochs=num_epochs, 353 epochs=num_epochs,
|