/external/grpc-grpc/tools/gcp/utils/ |
D | big_query_utils.py | 38 def create_dataset(biq_query, project_id, dataset_id): argument 43 'datasetId': dataset_id 53 print 'Warning: The dataset %s already exists' % dataset_id 56 print 'Error in creating dataset: %s. Err: %s' % (dataset_id, 62 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument 69 return create_table2(big_query, project_id, dataset_id, table_id, fields, 75 dataset_id, argument 89 return create_table2(big_query, project_id, dataset_id, table_id, fields, 95 dataset_id, argument 109 'datasetId': dataset_id, [all …]
|
/external/tensorflow/tensorflow/core/data/service/ |
D | dispatcher_state_test.cc | 59 Status CreateAnonymousJob(int64 job_id, int64 dataset_id, in CreateAnonymousJob() argument 64 create_job->set_dataset_id(dataset_id); in CreateAnonymousJob() 70 Status CreateNamedJob(int64 job_id, int64 dataset_id, NamedJobKey named_job_key, in CreateNamedJob() argument 75 create_job->set_dataset_id(dataset_id); in CreateNamedJob() 136 EXPECT_EQ(dataset->dataset_id, id); in TEST() 212 int64 dataset_id = 10; in TEST() local 215 TF_EXPECT_OK(RegisterDataset(dataset_id, state)); in TEST() 216 TF_EXPECT_OK(CreateAnonymousJob(job_id, dataset_id, state)); in TEST() 220 EXPECT_EQ(job->dataset_id, dataset_id); in TEST() 229 int64 dataset_id = 10; in TEST() local [all …]
|
D | dispatcher_state.h | 65 explicit Dataset(int64 dataset_id, int64 fingerprint) in Dataset() 66 : dataset_id(dataset_id), fingerprint(fingerprint) {} in Dataset() 68 const int64 dataset_id; member 126 explicit Job(int64 job_id, int64 dataset_id, ProcessingMode processing_mode, in Job() 130 dataset_id(dataset_id), in Job() 140 const int64 dataset_id; member
|
D | dispatcher_impl.cc | 194 TF_RETURN_IF_ERROR(MakeSplitProvider(job.dataset_id, split_provider)); in RestoreSplitProvider() 240 TF_RETURN_IF_ERROR(state_.DatasetFromId(task->job->dataset_id, dataset)); in WorkerHeartbeat() 242 DatasetKey(dataset->dataset_id, dataset->fingerprint); in WorkerHeartbeat() 252 task_def->set_dataset_id(task->job->dataset_id); in WorkerHeartbeat() 301 TF_RETURN_IF_ERROR(state_.DatasetFromId(request->dataset_id(), dataset)); in GetDatasetDef() 341 MakeSplitProvider(job->dataset_id, split_providers_[job_id])); in GetSplit() 350 int64 dataset_id, std::unique_ptr<SplitProvider>& split_provider) in MakeSplitProvider() argument 353 TF_RETURN_IF_ERROR(state_.DatasetFromId(dataset_id, dataset)); in MakeSplitProvider() 390 int64 id = dataset->dataset_id; in GetOrRegisterDataset() 408 int64& dataset_id) in RegisterDataset() argument [all …]
|
D | dispatcher_impl.h | 159 Status MakeSplitProvider(int64 dataset_id, 165 int64& dataset_id) TF_EXCLUSIVE_LOCKS_REQUIRED(mu_); 174 Status CreateJob(int64 dataset_id, ProcessingMode processing_mode, 215 ProcessingMode processing_mode, int64 dataset_id) 234 Status GetDatasetDef(int64 dataset_id,
|
D | data_service.cc | 104 Status DataServiceDispatcherClient::GetDatasetDef(int64 dataset_id, in GetDatasetDef() argument 108 req.set_dataset_id(dataset_id); in GetDatasetDef() 142 int64& dataset_id) { in RegisterDataset() argument 152 dataset_id = resp.dataset_id(); in RegisterDataset() 157 int64 dataset_id, ProcessingMode processing_mode, in GetOrCreateJob() argument 162 req.set_dataset_id(dataset_id); in GetOrCreateJob() 176 dataset_id), in GetOrCreateJob()
|
D | data_service.h | 102 Status GetDatasetDef(int64 dataset_id, DatasetDef& dataset_def); 110 Status RegisterDataset(GraphDef dataset, int64& dataset_id); 115 Status GetOrCreateJob(int64 dataset_id, ProcessingMode processing_mode,
|
D | journal.proto | 26 int64 dataset_id = 1; field 42 int64 dataset_id = 2; field
|
D | dispatcher.proto | 34 int64 dataset_id = 1; field 64 int64 dataset_id = 1; field 78 int64 dataset_id = 1; field
|
/external/rust/crates/grpcio-sys/grpc/tools/gcp/utils/ |
D | big_query_utils.py | 42 def create_dataset(biq_query, project_id, dataset_id): argument 47 'datasetId': dataset_id 57 print('Warning: The dataset %s already exists' % dataset_id) 61 (dataset_id, http_error)) 66 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument 73 return create_table2(big_query, project_id, dataset_id, table_id, fields, 79 dataset_id, argument 93 return create_table2(big_query, project_id, dataset_id, table_id, fields, 99 dataset_id, argument 113 'datasetId': dataset_id, [all …]
|
/external/protobuf/benchmarks/util/ |
D | big_query_utils.py | 26 def create_dataset(biq_query, project_id, dataset_id): argument 31 'datasetId': dataset_id 41 print('Warning: The dataset %s already exists' % dataset_id) 44 print('Error in creating dataset: %s. Err: %s' % (dataset_id, 50 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument 57 return create_table2(big_query, project_id, dataset_id, table_id, fields, 63 dataset_id, argument 77 return create_table2(big_query, project_id, dataset_id, table_id, fields, 83 dataset_id, argument 97 'datasetId': dataset_id, [all …]
|
/external/grpc-grpc/tools/run_tests/performance/ |
D | bq_upload_result.py | 37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument 56 _create_results_table(bq, dataset_id, table_id) 59 bq, dataset_id, table_id, scenario_result, flatten=False): 64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument 69 _create_results_table(bq, dataset_id, table_id) 71 if not _insert_result(bq, dataset_id, table_id, scenario_result): 76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument 81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id, 85 def _create_results_table(bq, dataset_id, table_id): argument 90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id, [all …]
|
D | patch_scenario_results_schema.py | 36 def _patch_results_table(dataset_id, table_id): argument 42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id, 57 dataset_id, table_id = args.bq_result_table.split('.', 2) variable 59 _patch_results_table(dataset_id, table_id)
|
/external/rust/crates/grpcio-sys/grpc/tools/run_tests/performance/ |
D | bq_upload_result.py | 37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument 56 _create_results_table(bq, dataset_id, table_id) 59 bq, dataset_id, table_id, scenario_result, flatten=False): 64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument 69 _create_results_table(bq, dataset_id, table_id) 71 if not _insert_result(bq, dataset_id, table_id, scenario_result): 76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument 81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id, 85 def _create_results_table(bq, dataset_id, table_id): argument 90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id, [all …]
|
D | patch_scenario_results_schema.py | 36 def _patch_results_table(dataset_id, table_id): argument 42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id, 56 dataset_id, table_id = args.bq_result_table.split('.', 2) variable 58 _patch_results_table(dataset_id, table_id)
|
/external/tensorflow/tensorflow/python/data/experimental/ops/ |
D | data_service_ops.py | 58 dataset_id, argument 119 dataset_id, dtype=dtypes.int64, name="dataset_id") 150 dataset_id=self._dataset_id, 163 dataset_id=self._dataset_id, 187 def __init__(self, dataset_id, processing_mode, address, protocol, argument 192 dataset_id=dataset_id, 240 dataset_id, 306 dataset_id=dataset_id, 379 dataset_id = register_dataset(service, dataset) 383 dataset_id, [all …]
|
/external/tensorflow/tensorflow/core/kernels/data/experimental/ |
D | data_service_ops.cc | 62 int64 dataset_id; in Compute() local 66 [&]() { return client.RegisterDataset(graph_def, dataset_id); }, in Compute() 74 output_dataset_id() = dataset_id; in Compute()
|
D | data_service_dataset_op.cc | 86 Dataset(OpKernelContext* ctx, int op_version, int64 dataset_id, in Dataset() argument 98 dataset_id_(dataset_id), in Dataset() 157 Node* dataset_id; in AsGraphDefInternal() local 158 TF_RETURN_IF_ERROR(b->AddScalar(dataset_id_, &dataset_id)); in AsGraphDefInternal() 159 inputs.push_back(dataset_id); in AsGraphDefInternal() 884 int64 dataset_id; in MakeDataset() local 885 OP_REQUIRES_OK(ctx, ParseScalarArgument(ctx, kDatasetId, &dataset_id)); in MakeDataset() 964 *output = new Dataset(ctx, op_version_, dataset_id, processing_mode, address, in MakeDataset()
|
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/ |
D | DataServiceDataset.pbtxt | 4 name: "dataset_id" 59 name: "dataset_id"
|
D | DataServiceDatasetV2.pbtxt | 4 name: "dataset_id" 67 name: "dataset_id"
|
D | RegisterDataset.pbtxt | 16 name: "dataset_id"
|
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/ |
D | RegisterDataset.pbtxt | 16 name: "dataset_id"
|
/external/tensorflow/tensorflow/tools/api/golden/v1/ |
D | tensorflow.data.experimental.service.pbtxt | 17 …argspec: "args=[\'processing_mode\', \'service\', \'dataset_id\', \'element_spec\', \'job_name\', …
|
/external/tensorflow/tensorflow/tools/api/golden/v2/ |
D | tensorflow.data.experimental.service.pbtxt | 25 …argspec: "args=[\'processing_mode\', \'service\', \'dataset_id\', \'element_spec\', \'job_name\', …
|
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/ |
D | data_service_ops_test.py | 716 dataset_id = data_service_ops.register_dataset(cluster.target, ds) 718 "parallel_epochs", cluster.target, dataset_id, ds.element_spec) 728 dataset_id = data_service_ops.register_dataset(cluster.target, ds) 730 "parallel_epochs", cluster.target, dataset_id, ds.element_spec) 743 dataset_id = data_service_ops.register_dataset(cluster.target, ds) 746 "parallel_epochs", cluster.target, dataset_id, wrong_spec) 755 dataset_id = 0 758 "parallel_epochs", cluster.target, dataset_id, element_spec)
|