Home
last modified time | relevance | path

Searched refs:dataset_id (Results 1 – 25 of 32) sorted by relevance

12

/external/grpc-grpc/tools/gcp/utils/
Dbig_query_utils.py38 def create_dataset(biq_query, project_id, dataset_id): argument
43 'datasetId': dataset_id
53 print 'Warning: The dataset %s already exists' % dataset_id
56 print 'Error in creating dataset: %s. Err: %s' % (dataset_id,
62 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument
69 return create_table2(big_query, project_id, dataset_id, table_id, fields,
75 dataset_id, argument
89 return create_table2(big_query, project_id, dataset_id, table_id, fields,
95 dataset_id, argument
109 'datasetId': dataset_id,
[all …]
/external/tensorflow/tensorflow/core/data/service/
Ddispatcher_state_test.cc59 Status CreateAnonymousJob(int64 job_id, int64 dataset_id, in CreateAnonymousJob() argument
64 create_job->set_dataset_id(dataset_id); in CreateAnonymousJob()
70 Status CreateNamedJob(int64 job_id, int64 dataset_id, NamedJobKey named_job_key, in CreateNamedJob() argument
75 create_job->set_dataset_id(dataset_id); in CreateNamedJob()
136 EXPECT_EQ(dataset->dataset_id, id); in TEST()
212 int64 dataset_id = 10; in TEST() local
215 TF_EXPECT_OK(RegisterDataset(dataset_id, state)); in TEST()
216 TF_EXPECT_OK(CreateAnonymousJob(job_id, dataset_id, state)); in TEST()
220 EXPECT_EQ(job->dataset_id, dataset_id); in TEST()
229 int64 dataset_id = 10; in TEST() local
[all …]
Ddispatcher_state.h65 explicit Dataset(int64 dataset_id, int64 fingerprint) in Dataset()
66 : dataset_id(dataset_id), fingerprint(fingerprint) {} in Dataset()
68 const int64 dataset_id; member
126 explicit Job(int64 job_id, int64 dataset_id, ProcessingMode processing_mode, in Job()
130 dataset_id(dataset_id), in Job()
140 const int64 dataset_id; member
Ddispatcher_impl.cc194 TF_RETURN_IF_ERROR(MakeSplitProvider(job.dataset_id, split_provider)); in RestoreSplitProvider()
240 TF_RETURN_IF_ERROR(state_.DatasetFromId(task->job->dataset_id, dataset)); in WorkerHeartbeat()
242 DatasetKey(dataset->dataset_id, dataset->fingerprint); in WorkerHeartbeat()
252 task_def->set_dataset_id(task->job->dataset_id); in WorkerHeartbeat()
301 TF_RETURN_IF_ERROR(state_.DatasetFromId(request->dataset_id(), dataset)); in GetDatasetDef()
341 MakeSplitProvider(job->dataset_id, split_providers_[job_id])); in GetSplit()
350 int64 dataset_id, std::unique_ptr<SplitProvider>& split_provider) in MakeSplitProvider() argument
353 TF_RETURN_IF_ERROR(state_.DatasetFromId(dataset_id, dataset)); in MakeSplitProvider()
390 int64 id = dataset->dataset_id; in GetOrRegisterDataset()
408 int64& dataset_id) in RegisterDataset() argument
[all …]
Ddispatcher_impl.h159 Status MakeSplitProvider(int64 dataset_id,
165 int64& dataset_id) TF_EXCLUSIVE_LOCKS_REQUIRED(mu_);
174 Status CreateJob(int64 dataset_id, ProcessingMode processing_mode,
215 ProcessingMode processing_mode, int64 dataset_id)
234 Status GetDatasetDef(int64 dataset_id,
Ddata_service.cc104 Status DataServiceDispatcherClient::GetDatasetDef(int64 dataset_id, in GetDatasetDef() argument
108 req.set_dataset_id(dataset_id); in GetDatasetDef()
142 int64& dataset_id) { in RegisterDataset() argument
152 dataset_id = resp.dataset_id(); in RegisterDataset()
157 int64 dataset_id, ProcessingMode processing_mode, in GetOrCreateJob() argument
162 req.set_dataset_id(dataset_id); in GetOrCreateJob()
176 dataset_id), in GetOrCreateJob()
Ddata_service.h102 Status GetDatasetDef(int64 dataset_id, DatasetDef& dataset_def);
110 Status RegisterDataset(GraphDef dataset, int64& dataset_id);
115 Status GetOrCreateJob(int64 dataset_id, ProcessingMode processing_mode,
Djournal.proto26 int64 dataset_id = 1; field
42 int64 dataset_id = 2; field
Ddispatcher.proto34 int64 dataset_id = 1; field
64 int64 dataset_id = 1; field
78 int64 dataset_id = 1; field
/external/rust/crates/grpcio-sys/grpc/tools/gcp/utils/
Dbig_query_utils.py42 def create_dataset(biq_query, project_id, dataset_id): argument
47 'datasetId': dataset_id
57 print('Warning: The dataset %s already exists' % dataset_id)
61 (dataset_id, http_error))
66 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument
73 return create_table2(big_query, project_id, dataset_id, table_id, fields,
79 dataset_id, argument
93 return create_table2(big_query, project_id, dataset_id, table_id, fields,
99 dataset_id, argument
113 'datasetId': dataset_id,
[all …]
/external/protobuf/benchmarks/util/
Dbig_query_utils.py26 def create_dataset(biq_query, project_id, dataset_id): argument
31 'datasetId': dataset_id
41 print('Warning: The dataset %s already exists' % dataset_id)
44 print('Error in creating dataset: %s. Err: %s' % (dataset_id,
50 def create_table(big_query, project_id, dataset_id, table_id, table_schema, argument
57 return create_table2(big_query, project_id, dataset_id, table_id, fields,
63 dataset_id, argument
77 return create_table2(big_query, project_id, dataset_id, table_id, fields,
83 dataset_id, argument
97 'datasetId': dataset_id,
[all …]
/external/grpc-grpc/tools/run_tests/performance/
Dbq_upload_result.py37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument
56 _create_results_table(bq, dataset_id, table_id)
59 bq, dataset_id, table_id, scenario_result, flatten=False):
64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument
69 _create_results_table(bq, dataset_id, table_id)
71 if not _insert_result(bq, dataset_id, table_id, scenario_result):
76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument
81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
85 def _create_results_table(bq, dataset_id, table_id): argument
90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id,
[all …]
Dpatch_scenario_results_schema.py36 def _patch_results_table(dataset_id, table_id): argument
42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id,
57 dataset_id, table_id = args.bq_result_table.split('.', 2) variable
59 _patch_results_table(dataset_id, table_id)
/external/rust/crates/grpcio-sys/grpc/tools/run_tests/performance/
Dbq_upload_result.py37 def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): argument
56 _create_results_table(bq, dataset_id, table_id)
59 bq, dataset_id, table_id, scenario_result, flatten=False):
64 def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): argument
69 _create_results_table(bq, dataset_id, table_id)
71 if not _insert_result(bq, dataset_id, table_id, scenario_result):
76 def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): argument
81 return big_query_utils.insert_rows(bq, _PROJECT_ID, dataset_id, table_id,
85 def _create_results_table(bq, dataset_id, table_id): argument
90 return big_query_utils.create_table2(bq, _PROJECT_ID, dataset_id, table_id,
[all …]
Dpatch_scenario_results_schema.py36 def _patch_results_table(dataset_id, table_id): argument
42 return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id,
56 dataset_id, table_id = args.bq_result_table.split('.', 2) variable
58 _patch_results_table(dataset_id, table_id)
/external/tensorflow/tensorflow/python/data/experimental/ops/
Ddata_service_ops.py58 dataset_id, argument
119 dataset_id, dtype=dtypes.int64, name="dataset_id")
150 dataset_id=self._dataset_id,
163 dataset_id=self._dataset_id,
187 def __init__(self, dataset_id, processing_mode, address, protocol, argument
192 dataset_id=dataset_id,
240 dataset_id,
306 dataset_id=dataset_id,
379 dataset_id = register_dataset(service, dataset)
383 dataset_id,
[all …]
/external/tensorflow/tensorflow/core/kernels/data/experimental/
Ddata_service_ops.cc62 int64 dataset_id; in Compute() local
66 [&]() { return client.RegisterDataset(graph_def, dataset_id); }, in Compute()
74 output_dataset_id() = dataset_id; in Compute()
Ddata_service_dataset_op.cc86 Dataset(OpKernelContext* ctx, int op_version, int64 dataset_id, in Dataset() argument
98 dataset_id_(dataset_id), in Dataset()
157 Node* dataset_id; in AsGraphDefInternal() local
158 TF_RETURN_IF_ERROR(b->AddScalar(dataset_id_, &dataset_id)); in AsGraphDefInternal()
159 inputs.push_back(dataset_id); in AsGraphDefInternal()
884 int64 dataset_id; in MakeDataset() local
885 OP_REQUIRES_OK(ctx, ParseScalarArgument(ctx, kDatasetId, &dataset_id)); in MakeDataset()
964 *output = new Dataset(ctx, op_version_, dataset_id, processing_mode, address, in MakeDataset()
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v2/
DDataServiceDataset.pbtxt4 name: "dataset_id"
59 name: "dataset_id"
DDataServiceDatasetV2.pbtxt4 name: "dataset_id"
67 name: "dataset_id"
DRegisterDataset.pbtxt16 name: "dataset_id"
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
DRegisterDataset.pbtxt16 name: "dataset_id"
/external/tensorflow/tensorflow/tools/api/golden/v1/
Dtensorflow.data.experimental.service.pbtxt17 …argspec: "args=[\'processing_mode\', \'service\', \'dataset_id\', \'element_spec\', \'job_name\', …
/external/tensorflow/tensorflow/tools/api/golden/v2/
Dtensorflow.data.experimental.service.pbtxt25 …argspec: "args=[\'processing_mode\', \'service\', \'dataset_id\', \'element_spec\', \'job_name\', …
/external/tensorflow/tensorflow/python/data/experimental/kernel_tests/
Ddata_service_ops_test.py716 dataset_id = data_service_ops.register_dataset(cluster.target, ds)
718 "parallel_epochs", cluster.target, dataset_id, ds.element_spec)
728 dataset_id = data_service_ops.register_dataset(cluster.target, ds)
730 "parallel_epochs", cluster.target, dataset_id, ds.element_spec)
743 dataset_id = data_service_ops.register_dataset(cluster.target, ds)
746 "parallel_epochs", cluster.target, dataset_id, wrong_spec)
755 dataset_id = 0
758 "parallel_epochs", cluster.target, dataset_id, element_spec)

12