1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifndef TENSORFLOW_COMPILER_XLA_SERVICE_BFLOAT16_NORMALIZATION_H_
17 #define TENSORFLOW_COMPILER_XLA_SERVICE_BFLOAT16_NORMALIZATION_H_
18 
19 #include "tensorflow/compiler/xla/service/bfloat16_support.h"
20 #include "tensorflow/compiler/xla/service/hlo_module.h"
21 #include "tensorflow/compiler/xla/service/hlo_pass_interface.h"
22 
23 namespace xla {
24 
25 // A pass which adds F32 <-> BF16 conversions for HLO instructions that do not
26 // support BF16 input/output or mixed precision, according to the passed-in
27 // backend-specific BF16 support rules.
28 class BFloat16Normalization : public HloModulePass {
29  public:
BFloat16Normalization(const BFloat16Support * bfloat16_support)30   explicit BFloat16Normalization(const BFloat16Support* bfloat16_support)
31       : bfloat16_support_(bfloat16_support) {}
32 
33   ~BFloat16Normalization() override = default;
name()34   absl::string_view name() const override { return "bf16-normalization"; }
35 
36   // Run BF16 normalization on the given computation. Returns whether the
37   // computation was changed.
38   StatusOr<bool> Run(HloModule* module) override;
39 
40  private:
41   const BFloat16Support* bfloat16_support_;
42 };
43 
44 // A pass that unconditionally removes the mixed F32/BF16 uses in HLO
45 // instructions (excluding convert) by adding F32 <-> BF16 conversions. Unlike
46 // BFloat16Normalization, this pass does not use a backend-specific
47 // BFloat16Support, and does not change HLOs that have BF16 data if they do not
48 // use mixed precision; it removes mixed precision even if the backend supports
49 // it. This pass is used to make the HLO module valid for other HLO passes which
50 // do not support mixed precision.
51 class BFloat16MixedPrecisionRemoval : public HloModulePass {
52  public:
BFloat16MixedPrecisionRemoval()53   BFloat16MixedPrecisionRemoval() {}
54 
55   ~BFloat16MixedPrecisionRemoval() override = default;
56 
name()57   absl::string_view name() const override {
58     return "bf16-mixed-precision-removal";
59   }
60 
61   // Run mixed precision removal on the given computation. Returns whether the
62   // computation was changed.
Run(HloModule * module)63   StatusOr<bool> Run(HloModule* module) override {
64     BFloat16Normalization normalization(&no_mixed_precision_support_);
65     return normalization.Run(module);
66   }
67 
68  private:
69   class BFloat16SupportForMixedPrecisionRemoval : public BFloat16Support {
70    public:
BFloat16SupportForMixedPrecisionRemoval()71     BFloat16SupportForMixedPrecisionRemoval() {}
72 
73     ~BFloat16SupportForMixedPrecisionRemoval() override = default;
74 
SupportsBF16Operand(const HloInstruction & hlo,int64 operand_index)75     bool SupportsBF16Operand(const HloInstruction& hlo,
76                              int64 operand_index) const override {
77       return true;
78     }
79 
SupportsBF16Output(const HloInstruction & hlo)80     bool SupportsBF16Output(const HloInstruction& hlo) const override {
81       return true;
82     }
83 
SupportsMixedPrecisions(const HloInstruction & hlo)84     bool SupportsMixedPrecisions(const HloInstruction& hlo) const override {
85       return false;
86     }
87   } no_mixed_precision_support_;
88 };
89 
90 }  // namespace xla
91 
92 #endif  // TENSORFLOW_COMPILER_XLA_SERVICE_BFLOAT16_NORMALIZATION_H_
93