/external/tensorflow/tensorflow/python/data/ops/ |
D | dataset_ops.py | 146 …[input_dataset._has_captured_ref() for input_dataset in self._inputs()]) # pylint: disable=protec… 165 for input_dataset in self._inputs(): 166 input_options = input_dataset.options() 2041 def __init__(self, input_dataset, variant_tensor): argument 2042 self._input_dataset = input_dataset 2052 def __init__(self, input_dataset, variant_tensor): argument 2053 self._input_dataset = input_dataset 2055 input_dataset, variant_tensor) 2550 def __init__(self, input_dataset, dataset_to_concatenate): argument 2552 self._input_dataset = input_dataset [all …]
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | optimization.py | 106 def __init__(self, input_dataset, transformations): argument 108 self._input_dataset = input_dataset 118 super(_AssertNextDataset, self).__init__(input_dataset, variant_tensor) 124 def __init__(self, input_dataset): argument 126 self._input_dataset = input_dataset 131 super(_NonSerializableDataset, self).__init__(input_dataset, variant_tensor) 185 input_dataset, argument 254 input_dataset.output_types, input_dataset.output_shapes, 255 input_dataset.output_classes))) 279 input_dataset._variant_tensor, # pylint: disable=protected-access [all …]
|
D | grouping.py | 249 def __init__(self, input_dataset, key_func, reducer): argument 251 self._input_dataset = input_dataset 252 self._make_key_func(key_func, input_dataset) 254 self._make_reduce_func(reducer.reduce_func, input_dataset) 267 super(_GroupByReducerDataset, self).__init__(input_dataset, variant_tensor) 269 def _make_key_func(self, key_func, input_dataset): argument 272 key_func, self._transformation_name(), dataset=input_dataset) 287 def _make_reduce_func(self, reduce_func, input_dataset): argument 303 … (self._state_structure, input_dataset._element_structure)), # pylint: disable=protected-access 373 def __init__(self, input_dataset, key_func, reduce_func, window_size_func): argument [all …]
|
D | batching.py | 363 def __init__(self, input_dataset): argument 365 input_shapes = dataset_ops.get_legacy_output_shapes(input_dataset) 376 self._input_dataset = input_dataset 379 dataset_ops.get_legacy_output_types(input_dataset), 381 dataset_ops.get_legacy_output_classes(input_dataset)) 386 super(_UnbatchDataset, self).__init__(input_dataset, variant_tensor) 451 def __init__(self, input_dataset, batch_size, row_shape): argument 454 dataset_ops.get_legacy_output_types(input_dataset), dtypes.DType): 457 dataset_ops.get_legacy_output_types(input_dataset)) 458 self._input_dataset = input_dataset [all …]
|
D | distribute.py | 43 def __init__(self, input_dataset, num_workers, index): argument 44 self._input_dataset = input_dataset 46 self._structure = input_dataset._element_structure # pylint: disable=protected-access 52 super(_AutoShardDataset, self).__init__(input_dataset, variant_tensor)
|
D | unique.py | 54 def __init__(self, input_dataset): argument 56 self._input_dataset = input_dataset 57 if dataset_ops.get_legacy_output_types(input_dataset) not in ( 65 super(_UniqueDataset, self).__init__(input_dataset, variant_tensor)
|
D | prefetching_ops.py | 88 def __init__(self, input_dataset, target_device, source_device="/cpu:0"): argument 96 self._input_dataset = input_dataset 212 super(_CopyToDeviceDataset, self).__init__(input_dataset, variant_tensor) 231 def __init__(self, input_dataset, map_func, use_inter_op_parallelism=True): argument 233 self._input_dataset = input_dataset 239 dataset=input_dataset, 247 super(_MapOnGpuDataset, self).__init__(input_dataset, variant_tensor)
|
D | error_ops.py | 58 def __init__(self, input_dataset): argument 60 self._input_dataset = input_dataset 65 super(_IgnoreErrorsDataset, self).__init__(input_dataset, variant_tensor)
|
D | sleep.py | 27 def __init__(self, input_dataset, sleep_microseconds): argument 28 self._input_dataset = input_dataset 34 super(_SleepDataset, self).__init__(input_dataset, variant_tensor)
|
D | take_while_ops.py | 30 def __init__(self, input_dataset, predicate): argument 33 self._input_dataset = input_dataset 49 super(_TakeWhileDataset, self).__init__(input_dataset, var_tensor)
|
D | stats_ops.py | 103 def __init__(self, input_dataset, op_function, tag): argument 104 self._input_dataset = input_dataset 111 super(_StatsDataset, self).__init__(input_dataset, variant_tensor)
|
D | parsing_ops.py | 33 def __init__(self, input_dataset, features, num_parallel_calls): argument 34 self._input_dataset = input_dataset 35 if not input_dataset._element_structure.is_compatible_with( # pylint: disable=protected-access 91 super(_ParseExampleDataset, self).__init__(input_dataset, variant_tensor)
|
/external/tensorflow/tensorflow/python/data/kernel_tests/ |
D | concatenate_test.py | 44 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 47 concatenated = input_dataset.concatenate(dataset_to_concatenate) 75 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 78 concatenated = input_dataset.concatenate(dataset_to_concatenate) 106 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 111 input_dataset.concatenate(dataset_to_concatenate) 123 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 128 input_dataset.concatenate(dataset_to_concatenate) 138 input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components) 143 input_dataset.concatenate(dataset_to_concatenate)
|
D | dataset_test.py | 145 input_dataset = input_dataset_fn() 146 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs()) 149 input_dataset = dataset_ops.Dataset.range(0) 151 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs()) 163 input_dataset = input_dataset_fn() 165 self.assertEqual([input_dataset], dataset_fn(input_dataset)._inputs())
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | tf_record_writer_test.py | 42 input_dataset = readers.TFRecordDataset([filename], compression_type) 44 compression_type).write(input_dataset) 89 input_dataset = dataset_ops.Dataset.from_tensors(10) 91 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset) 94 input_dataset = dataset_ops.Dataset.from_tensors([["hello"], ["world"]]) 96 writers.TFRecordWriter(self._outputFilename(), "").write(input_dataset) 100 input_dataset = readers.TFRecordDataset(self._createFile()) 101 return writers.TFRecordWriter(self._outputFilename()).write(input_dataset)
|
/external/tensorflow/tensorflow/core/api_def/base_api/ |
D | api_def_ExperimentalDatasetCardinality.pbtxt | 5 name: "input_dataset" 13 The cardinality of `input_dataset`. Named constants are used to represent 17 summary: "Returns the cardinality of `input_dataset`." 19 Returns the cardinality of `input_dataset`.
|
D | api_def_ExperimentalGroupByReducerDataset.pbtxt | 5 name: "input_dataset" 20 A function mapping an element of `input_dataset`, concatenated 48 A function mapping the current reducer state and an element of `input_dataset`, 65 summary: "Creates a dataset that computes a group-by on `input_dataset`." 67 Creates a dataset that computes a group-by on `input_dataset`.
|
D | api_def_TakeDataset.pbtxt | 7 A scalar representing the number of elements from the `input_dataset` 8 that should be taken. A value of `-1` indicates that all of `input_dataset` 12 summary: "Creates a dataset that contains `count` elements from the `input_dataset`."
|
D | api_def_DatasetToGraph.pbtxt | 5 name: "input_dataset" 16 summary: "Returns a serialized GraphDef representing `input_dataset`." 18 Returns a graph representation for `input_dataset`.
|
D | api_def_OptimizeDataset.pbtxt | 5 name: "input_dataset" 16 summary: "Creates a dataset by applying optimizations to `input_dataset`." 18 Creates a dataset by applying optimizations to `input_dataset`.
|
D | api_def_ExperimentalMapAndBatchDataset.pbtxt | 5 name: "input_dataset" 22 elements from `input_dataset` in parallel. 43 A function to apply to the outputs of `input_dataset`. 48 Creates a dataset that applies `f` to the outputs of `input_dataset` and then
|
D | api_def_ExperimentalNumaMapAndBatchDataset.pbtxt | 5 name: "input_dataset" 22 elements from `input_dataset` in parallel. 43 A function to apply to the outputs of `input_dataset`. 48 Creates a dataset that applies `f` to the outputs of `input_dataset` and then
|
/external/tensorflow/tensorflow/contrib/data/python/ops/ |
D | sliding.py | 30 def __init__(self, input_dataset, window_size, window_shift, window_stride): argument 32 self._input_dataset = input_dataset 40 input_structure = dataset_ops.get_structure(input_dataset) 48 super(_SlideDataset, self).__init__(input_dataset, variant_tensor)
|
/external/tensorflow/tensorflow/python/data/util/ |
D | traverse_test.py | 31 def __init__(self, input_dataset): argument 32 self._input_dataset = input_dataset 34 input_dataset._variant_tensor, 39 super(_TestDataset, self).__init__(input_dataset, variant_tensor)
|
/external/tensorflow/tensorflow/core/grappler/optimizers/data/ |
D | map_vectorization_test.cc | 71 NodeDef* AddMapNode(MutableGraphView* graph, const string& input_dataset, in AddMapNode() argument 79 /*inputs=*/{input_dataset, num_parallel_calls_node->name()}, in AddMapNode() 91 /*inputs=*/{input_dataset}, in AddMapNode() 105 NodeDef* AddPrefetchNode(MutableGraphView* graph, const string& input_dataset, in AddPrefetchNode() argument 110 /*inputs=*/{input_dataset, buffer_size_node->name()}, in AddPrefetchNode() 118 NodeDef* AddBatchNode(MutableGraphView* graph, const string& input_dataset, in AddBatchNode() argument 129 {input_dataset, batch_size_node->name(), drop_remainder->name()}, in AddBatchNode() 136 /*inputs=*/{input_dataset, batch_size_node->name()}, in AddBatchNode() 285 const string& input_dataset, const string& map_fn, in AddMapAndBatchNode() argument 297 {input_dataset, batch_size_node->name(), in AddMapAndBatchNode()
|