1 /* Copyright 2017 The TensorFlow Authors. All Rights Reserved. 2 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 7 http://www.apache.org/licenses/LICENSE-2.0 8 9 Unless required by applicable law or agreed to in writing, software 10 distributed under the License is distributed on an "AS IS" BASIS, 11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 See the License for the specific language governing permissions and 13 limitations under the License. 14 ==============================================================================*/ 15 16 #include "tensorflow/compiler/xla/service/defuser.h" 17 18 #include <algorithm> 19 #include <memory> 20 #include <numeric> 21 #include <string> 22 #include <utility> 23 #include <vector> 24 25 #include "absl/container/flat_hash_map.h" 26 #include "tensorflow/compiler/xla/service/call_graph.h" 27 #include "tensorflow/compiler/xla/service/hlo_computation.h" 28 #include "tensorflow/compiler/xla/service/hlo_instruction.h" 29 #include "tensorflow/compiler/xla/service/hlo_opcode.h" 30 #include "tensorflow/compiler/xla/status_macros.h" 31 #include "tensorflow/compiler/xla/types.h" 32 #include "tensorflow/compiler/xla/util.h" 33 #include "tensorflow/core/lib/core/errors.h" 34 #include "tensorflow/core/lib/core/status.h" 35 #include "tensorflow/core/platform/logging.h" 36 #include "tensorflow/core/platform/types.h" 37 38 namespace xla { 39 40 namespace { 41 42 // Copy all the instructions in the given fusion instruction into the fusion 43 // instruction's parent computation and replace the use of the fusion 44 // instruction with the copy of the fusion expression root. Defuse(HloInstruction * fusion_instruction)45Status Defuse(HloInstruction* fusion_instruction) { 46 VLOG(2) << "Defusing instruction: " << fusion_instruction->ToString(); 47 48 HloComputation* fused_computation = 49 fusion_instruction->fused_instructions_computation(); 50 51 // A map from fused instruction to its defused clone. 52 absl::flat_hash_map<const HloInstruction*, HloInstruction*> 53 defused_instructions; 54 // Initialize map to contain the fusion instruction parameters mapping 55 // to the operands of the fusion instruction. 56 for (int64 i = 0; i < fusion_instruction->operand_count(); ++i) { 57 defused_instructions[fused_computation->parameter_instruction(i)] = 58 fusion_instruction->mutable_operand(i); 59 } 60 61 // Create a clone of each instruction of the fused computation in the same 62 // computation as the fusion instruction itself. 63 // TODO(b/68227302): Moving instruction to new computation rather than 64 // cloning and deleting. 65 for (HloInstruction* fused_instruction : 66 fused_computation->MakeInstructionPostOrder()) { 67 if (fused_instruction->opcode() == HloOpcode::kParameter) { 68 continue; 69 } 70 std::vector<HloInstruction*> new_operands; 71 for (HloInstruction* operand : fused_instruction->operands()) { 72 new_operands.push_back(defused_instructions.at(operand)); 73 } 74 HloInstruction* defused_instruction = 75 fusion_instruction->parent()->AddInstruction( 76 fused_instruction->CloneWithNewOperands(fused_instruction->shape(), 77 new_operands)); 78 defused_instructions[fused_instruction] = defused_instruction; 79 } 80 81 TF_RETURN_IF_ERROR(fusion_instruction->ReplaceAllUsesWith( 82 defused_instructions.at(fusion_instruction->fused_expression_root()))); 83 84 HloModule* module = fusion_instruction->parent()->parent(); 85 TF_RETURN_IF_ERROR( 86 fusion_instruction->parent()->RemoveInstruction(fusion_instruction)); 87 return module->RemoveEmbeddedComputation(fused_computation); 88 } 89 90 } // namespace 91 Run(HloModule * module)92StatusOr<bool> Defuser::Run(HloModule* module) { 93 VLOG(1) << "Defusing module " << module->name(); 94 XLA_VLOG_LINES(2, "Before defusion:\n" + module->ToString()); 95 96 bool changed = false; 97 std::unique_ptr<CallGraph> call_graph = CallGraph::Build(module); 98 TF_RETURN_IF_ERROR(call_graph->VisitNodes( 99 [&](const CallGraphNode& call_graph_node) -> Status { 100 if (call_graph_node.computation()->IsFusionComputation()) { 101 TF_RET_CHECK(call_graph_node.caller_callsites().size() == 1); 102 HloInstruction* fusion_instruction = 103 call_graph_node.caller_callsites()[0].instruction(); 104 TF_RETURN_IF_ERROR(Defuse(fusion_instruction)); 105 changed = true; 106 } 107 return Status::OK(); 108 }, 109 /*visit_unreachable_nodes=*/true)); 110 111 XLA_VLOG_LINES(2, "After defusion:\n" + module->ToString()); 112 113 return changed; 114 } 115 116 } // namespace xla 117