• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/core/grappler/optimizers/data/map_fusion.h"
17 
18 #include "absl/container/flat_hash_set.h"
19 #include "tensorflow/core/framework/attr_value.pb.h"
20 #include "tensorflow/core/framework/node_def.pb.h"
21 #include "tensorflow/core/grappler/clusters/cluster.h"
22 #include "tensorflow/core/grappler/grappler_item.h"
23 #include "tensorflow/core/grappler/mutable_graph_view.h"
24 #include "tensorflow/core/grappler/op_types.h"
25 #include "tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.h"
26 #include "tensorflow/core/grappler/optimizers/data/fusion_utils.h"
27 #include "tensorflow/core/grappler/optimizers/data/graph_utils.h"
28 #include "tensorflow/core/grappler/utils.h"
29 #include "tensorflow/core/grappler/utils/topological_sort.h"
30 #include "tensorflow/core/lib/gtl/map_util.h"
31 #include "tensorflow/core/platform/protobuf.h"
32 
33 namespace tensorflow {
34 namespace grappler {
35 namespace {
36 
37 // Sets basic function parameters and copies attributes from parent and map
38 // node.
MakeFusedNode(const NodeDef & parent_map_node,const NodeDef & map_node,const FunctionDef & fused_function,MutableGraphView * graph)39 NodeDef MakeFusedNode(const NodeDef& parent_map_node, const NodeDef& map_node,
40                       const FunctionDef& fused_function,
41                       MutableGraphView* graph) {
42   NodeDef fused_node;
43   graph_utils::SetUniqueGraphNodeName("fused_map", graph->graph(), &fused_node);
44   fused_node.set_op("MapDataset");
45   fused_node.add_input(parent_map_node.input(0));
46 
47   auto attr = parent_map_node.attr().at("f");
48   *attr.mutable_func()->mutable_name() = fused_function.signature().name();
49   (*fused_node.mutable_attr())["f"] = std::move(attr);
50 
51   graph_utils::CopyAttribute("Targuments", parent_map_node, &fused_node);
52   for (auto key : {"output_shapes", "output_types"})
53     graph_utils::CopyAttribute(key, map_node, &fused_node);
54 
55   auto value_or_false = [](const AttrValue* attr) {
56     if (!attr) return false;
57     return attr->b();
58   };
59 
60   const auto* first_parallelism =
61       gtl::FindOrNull(parent_map_node.attr(), "use_inter_op_parallelism");
62   const auto* second_parallelism =
63       gtl::FindOrNull(map_node.attr(), "use_inter_op_parallelism");
64   // Some graphs cannot execute with use_inter_op_parallelism=False, so we need
65   // to set it to true if one of the ops have it set to true.
66   (*fused_node.mutable_attr())["use_inter_op_parallelism"].set_b(
67       value_or_false(first_parallelism) || value_or_false(second_parallelism));
68 
69   const auto* first_cardinality =
70       gtl::FindOrNull(parent_map_node.attr(), "preserve_cardinality");
71   const auto* second_cardinality =
72       gtl::FindOrNull(map_node.attr(), "preserve_cardinality");
73   (*fused_node.mutable_attr())["preserve_cardinality"].set_b(
74       value_or_false(first_cardinality) && value_or_false(second_cardinality));
75 
76   return fused_node;
77 }
78 
79 }  // namespace
80 
OptimizeAndCollectStats(Cluster * cluster,const GrapplerItem & item,GraphDef * output,OptimizationStats * stats)81 Status MapFusion::OptimizeAndCollectStats(Cluster* cluster,
82                                           const GrapplerItem& item,
83                                           GraphDef* output,
84                                           OptimizationStats* stats) {
85   GraphDef sorted_old_graph = item.graph;
86   TF_RETURN_IF_ERROR(TopologicalSort(&sorted_old_graph));
87   *output = sorted_old_graph;
88 
89   MutableGraphView graph(output);
90   absl::flat_hash_set<string> nodes_to_delete;
91   FunctionLibraryDefinition function_library(OpRegistry::Global(),
92                                              item.graph.library());
93 
94   auto get_map_node = [](const NodeDef& node) -> const NodeDef* {
95     // TODO(prazek): we could also handle ParallelMapDataset and
96     // MapAndBatchDataset.
97     if (node.op() == "MapDataset") return &node;
98     return nullptr;
99   };
100 
101   auto get_fused_function = [&function_library, &output](
102                                 const NodeDef* parent_map_node,
103                                 const NodeDef* map_node) -> FunctionDef* {
104     const auto& parent_fun = parent_map_node->attr().at("f");
105     const FunctionDef* parent_func =
106         function_library.Find(parent_fun.func().name());
107     const auto& fun = map_node->attr().at("f");
108     const FunctionDef* func = function_library.Find(fun.func().name());
109 
110     if (!fusion_utils::CanCompose(parent_func->signature(),
111                                   func->signature())) {
112       VLOG(1) << "Can't fuse two maps because the output signature of the "
113                  "first map function does not match the input signature of the "
114                  "second function\n";
115       return nullptr;
116     }
117     return fusion_utils::FuseFunctions(
118         *parent_func, *func, "fused_map", fusion_utils::ComposeSignature,
119         fusion_utils::ComposeInput, fusion_utils::ComposeOutput,
120         fusion_utils::MergeNodes, output->mutable_library());
121   };
122 
123   for (const NodeDef& node : sorted_old_graph.node()) {
124     const NodeDef* map_node = get_map_node(node);
125     if (!map_node) continue;
126 
127     const NodeDef* parent_map_node =
128         get_map_node(*graph_utils::GetInputNode(*map_node, graph));
129     if (!parent_map_node) continue;
130 
131     const auto* fused_function = get_fused_function(parent_map_node, map_node);
132     if (fused_function == nullptr) continue;
133     const auto* fused_maps_node = graph.AddNode(
134         MakeFusedNode(*parent_map_node, *map_node, *fused_function, &graph));
135 
136     TF_RETURN_IF_ERROR(
137         graph.UpdateFanouts(map_node->name(), fused_maps_node->name()));
138 
139     // TODO(prazek): we should run some optimizations on the fused map
140     // functions, or make sure that optimization passes run after map
141     // fusion.
142     TF_RETURN_IF_ERROR(function_library.AddFunctionDef(*fused_function));
143 
144     // TODO(b/116285210): we could also remove map functions from library if
145     // they are not used anymore.
146     nodes_to_delete.insert(parent_map_node->name());
147     nodes_to_delete.insert(map_node->name());
148     stats->num_changes++;
149   }
150 
151   TF_RETURN_IF_ERROR(graph.DeleteNodes(nodes_to_delete));
152   return Status::OK();
153 }
154 
Feedback(Cluster * cluster,const GrapplerItem & item,const GraphDef & optimize_output,double result)155 void MapFusion::Feedback(Cluster* cluster, const GrapplerItem& item,
156                          const GraphDef& optimize_output, double result) {
157   // no-op
158 }
159 
160 REGISTER_GRAPH_OPTIMIZER_AS(MapFusion, "map_fusion");
161 
162 }  // namespace grappler
163 }  // namespace tensorflow
164