1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
18 #define ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
19 
20 #include <atomic>
21 #include <iomanip>
22 #include <string>
23 #include <type_traits>
24 
25 #include "base/atomic.h"
26 #include "base/globals.h"
27 #include "base/logging.h"  // For VLOG_IS_ON.
28 
29 namespace art {
30 
31 enum class MethodCompilationStat {
32   kAttemptBytecodeCompilation = 0,
33   kAttemptIntrinsicCompilation,
34   kCompiledNativeStub,
35   kCompiledIntrinsic,
36   kCompiledBytecode,
37   kCHAInline,
38   kInlinedInvoke,
39   kReplacedInvokeWithSimplePattern,
40   kInstructionSimplifications,
41   kInstructionSimplificationsArch,
42   kUnresolvedMethod,
43   kUnresolvedField,
44   kUnresolvedFieldNotAFastAccess,
45   kRemovedCheckedCast,
46   kRemovedDeadInstruction,
47   kRemovedNullCheck,
48   kNotCompiledSkipped,
49   kNotCompiledInvalidBytecode,
50   kNotCompiledThrowCatchLoop,
51   kNotCompiledAmbiguousArrayOp,
52   kNotCompiledHugeMethod,
53   kNotCompiledLargeMethodNoBranches,
54   kNotCompiledMalformedOpcode,
55   kNotCompiledNoCodegen,
56   kNotCompiledPathological,
57   kNotCompiledSpaceFilter,
58   kNotCompiledUnhandledInstruction,
59   kNotCompiledUnsupportedIsa,
60   kNotCompiledVerificationError,
61   kNotCompiledVerifyAtRuntime,
62   kInlinedMonomorphicCall,
63   kInlinedPolymorphicCall,
64   kMonomorphicCall,
65   kPolymorphicCall,
66   kMegamorphicCall,
67   kBooleanSimplified,
68   kIntrinsicRecognized,
69   kLoopInvariantMoved,
70   kLoopVectorized,
71   kLoopVectorizedIdiom,
72   kSelectGenerated,
73   kRemovedInstanceOf,
74   kInlinedInvokeVirtualOrInterface,
75   kImplicitNullCheckGenerated,
76   kExplicitNullCheckGenerated,
77   kSimplifyIf,
78   kSimplifyThrowingInvoke,
79   kInstructionSunk,
80   kNotInlinedUnresolvedEntrypoint,
81   kNotInlinedDexCache,
82   kNotInlinedStackMaps,
83   kNotInlinedEnvironmentBudget,
84   kNotInlinedInstructionBudget,
85   kNotInlinedLoopWithoutExit,
86   kNotInlinedIrreducibleLoop,
87   kNotInlinedAlwaysThrows,
88   kNotInlinedInfiniteLoop,
89   kNotInlinedTryCatch,
90   kNotInlinedRegisterAllocator,
91   kNotInlinedCannotBuild,
92   kNotInlinedNotVerified,
93   kNotInlinedCodeItem,
94   kNotInlinedWont,
95   kNotInlinedRecursiveBudget,
96   kNotInlinedProxy,
97   kConstructorFenceGeneratedNew,
98   kConstructorFenceGeneratedFinal,
99   kConstructorFenceRemovedLSE,
100   kConstructorFenceRemovedPFRA,
101   kConstructorFenceRemovedCFRE,
102   kJitOutOfMemoryForCommit,
103   kLastStat
104 };
105 std::ostream& operator<<(std::ostream& os, const MethodCompilationStat& rhs);
106 
107 class OptimizingCompilerStats {
108  public:
OptimizingCompilerStats()109   OptimizingCompilerStats() {
110     // The std::atomic<> default constructor leaves values uninitialized, so initialize them now.
111     Reset();
112   }
113 
114   void RecordStat(MethodCompilationStat stat, uint32_t count = 1) {
115     size_t stat_index = static_cast<size_t>(stat);
116     DCHECK_LT(stat_index, arraysize(compile_stats_));
117     compile_stats_[stat_index] += count;
118   }
119 
GetStat(MethodCompilationStat stat)120   uint32_t GetStat(MethodCompilationStat stat) const {
121     size_t stat_index = static_cast<size_t>(stat);
122     DCHECK_LT(stat_index, arraysize(compile_stats_));
123     return compile_stats_[stat_index];
124   }
125 
Log()126   void Log() const {
127     if (!kIsDebugBuild && !VLOG_IS_ON(compiler)) {
128       // Log only in debug builds or if the compiler is verbose.
129       return;
130     }
131 
132     uint32_t compiled_intrinsics = GetStat(MethodCompilationStat::kCompiledIntrinsic);
133     uint32_t compiled_native_stubs = GetStat(MethodCompilationStat::kCompiledNativeStub);
134     uint32_t bytecode_attempts =
135         GetStat(MethodCompilationStat::kAttemptBytecodeCompilation);
136     if (compiled_intrinsics == 0u && compiled_native_stubs == 0u && bytecode_attempts == 0u) {
137       LOG(INFO) << "Did not compile any method.";
138     } else {
139       uint32_t compiled_bytecode_methods =
140           GetStat(MethodCompilationStat::kCompiledBytecode);
141       // Successful intrinsic compilation preempts other compilation attempts but failed intrinsic
142       // compilation shall still count towards bytecode or native stub compilation attempts.
143       uint32_t num_compilation_attempts =
144           compiled_intrinsics + compiled_native_stubs + bytecode_attempts;
145       uint32_t num_successful_compilations =
146           compiled_intrinsics + compiled_native_stubs + compiled_bytecode_methods;
147       float compiled_percent = num_successful_compilations * 100.0f / num_compilation_attempts;
148       LOG(INFO) << "Attempted compilation of "
149           << num_compilation_attempts << " methods: " << std::fixed << std::setprecision(2)
150           << compiled_percent << "% (" << num_successful_compilations << ") compiled.";
151 
152       for (size_t i = 0; i < arraysize(compile_stats_); ++i) {
153         if (compile_stats_[i] != 0) {
154           LOG(INFO) << "OptStat#" << static_cast<MethodCompilationStat>(i) << ": "
155               << compile_stats_[i];
156         }
157       }
158     }
159   }
160 
AddTo(OptimizingCompilerStats * other_stats)161   void AddTo(OptimizingCompilerStats* other_stats) {
162     for (size_t i = 0; i != arraysize(compile_stats_); ++i) {
163       uint32_t count = compile_stats_[i];
164       if (count != 0) {
165         other_stats->RecordStat(static_cast<MethodCompilationStat>(i), count);
166       }
167     }
168   }
169 
Reset()170   void Reset() {
171     for (std::atomic<uint32_t>& stat : compile_stats_) {
172       stat = 0u;
173     }
174   }
175 
176  private:
177   std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)];
178 
179   DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats);
180 };
181 
182 inline void MaybeRecordStat(OptimizingCompilerStats* compiler_stats,
183                             MethodCompilationStat stat,
184                             uint32_t count = 1) {
185   if (compiler_stats != nullptr) {
186     compiler_stats->RecordStat(stat, count);
187   }
188 }
189 
190 }  // namespace art
191 
192 #endif  // ART_COMPILER_OPTIMIZING_OPTIMIZING_COMPILER_STATS_H_
193