1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifndef TENSORFLOW_CORE_DATA_STANDALONE_H_
17 #define TENSORFLOW_CORE_DATA_STANDALONE_H_
18 
19 #include <memory>
20 
21 #include "tensorflow/core/common_runtime/device_mgr.h"
22 #include "tensorflow/core/framework/dataset.h"
23 #include "tensorflow/core/framework/function_handle_cache.h"
24 #include "tensorflow/core/lib/core/threadpool.h"
25 #include "tensorflow/core/public/session_options.h"
26 
27 namespace tensorflow {
28 namespace data {
29 namespace standalone {
30 
31 // The purpose of the API in this file is to facilitate standalone execution of
32 // a tf.data input pipeline graph.
33 //
34 // The API exposes two abstractions -- a `Dataset` and an `Iterator` -- which
35 // encapsulate TensorFlow runtime.
36 //
37 // The `Dataset` abstraction represents an input pipeline as a collection
38 // of data sources and a logical plan of transformations that operate over the
39 // data.
40 //
41 // The `Iterator` abstraction represents an execution of an input pipeline that
42 // can be used to enumerate its elements.
43 //
44 // Example usage:
45 //
46 //   // Create a `Dataset` by running the `graph_def` graph.
47 //   tensorflow::data:standalone::Dataset::Params params;
48 //   std::unique_ptr<tensorflow::data::standalone::Dataset> dataset;
49 //   Status s = tensorflow::data::standalone::Dataset::FromGraph(
50 //      params, graph_def, &dataset);
51 //   if (!s.ok()) { /* error handling */ }
52 //
53 //   std::unique_ptr<tensorflow::data::standalone::Iterator> iterator;
54 //   s = dataset->MakeIterator(&iterator);
55 //   if (!s.ok()) { /* error handling */ }
56 //
57 //   bool end_of_input = false;
58 //   while (!end_of_input) {
59 //     std::vector<tensorflow::Tensor> outputs;
60 //     s = iterator->GetNext(&outputs, &end_of_input);
61 //     if (!s.ok()) { /* error handling */ }
62 //     if (!end_of_input) { /* output handling */ }
63 //   }
64 
65 class Dataset;
66 
67 // Represents an execution of an input pipeline that can be used to enumerate
68 // its elements.
69 class Iterator {
70  public:
71   // Returns the next element of the input pipeline (if there is one) and an
72   // indication of whether the end of the input pipeline has been reached.
73   Status GetNext(std::vector<Tensor>* outputs, bool* end_of_input);
74 
75  private:
76   friend class Dataset;
77 
78   Iterator(IteratorBase* iterator, IteratorContext* ctx);
79 
80   std::unique_ptr<IteratorBase> iterator_;
81   std::unique_ptr<IteratorContext> ctx_;
82 };
83 
84 // Represents an input pipeline as a collection of data sources and a logical
85 // plan of transformations that operate over the data.
86 class Dataset {
87  public:
88   // Parameters for `Dataset` creation (e.g. TensorFlow runtime configuration).
89   struct Params {
90     SessionOptions session_options;
91   };
92 
93   // Creates a new `Dataset` instance by running the given dataset graph.
94   static Status FromGraph(Params params, const GraphDef& graph_def,
95                           std::unique_ptr<Dataset>* result);
96 
97   ~Dataset();
98 
99   // Creates an iterator for this dataset.
100   Status MakeIterator(std::unique_ptr<Iterator>* result);
101   // Creates an iterator, optionally with a split provider.
102   Status MakeIterator(std::unique_ptr<SplitProvider> split_provider,
103                       std::unique_ptr<Iterator>* result);
104 
105   // Creates a split provider for this dataset.
106   Status MakeSplitProvider(std::unique_ptr<SplitProvider>* result);
107   // Returns a pointer to the underlying dataset.
108   const DatasetBase* Get() const;
109 
110  private:
111   Dataset(DatasetBase* dataset, DeviceMgr* device_mgr,
112           ProcessFunctionLibraryRuntime* pflr,
113           FunctionLibraryDefinition* flib_def, thread::ThreadPool* pool);
114 
115   DatasetBase* dataset_;  // owned
116   std::unique_ptr<DeviceMgr> device_mgr_;
117   std::unique_ptr<FunctionLibraryDefinition> flib_def_;
118   std::unique_ptr<ProcessFunctionLibraryRuntime> pflr_;
119   std::unique_ptr<thread::ThreadPool> pool_;
120   std::unique_ptr<FunctionHandleCache> function_handle_cache_;
121   std::function<void(std::function<void()>)> runner_;
122   ResourceMgr resource_mgr_;
123   CancellationManager cancellation_manager_;
124 };
125 
126 }  // namespace standalone
127 }  // namespace data
128 }  // namespace tensorflow
129 
130 #endif  // TENSORFLOW_CORE_DATA_STANDALONE_H_
131