1 /*
2  * Copyright (C) 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "stack_map_stream.h"
18 
19 #include <memory>
20 #include <vector>
21 
22 #include "art_method-inl.h"
23 #include "base/globals.h"
24 #include "base/stl_util.h"
25 #include "class_linker.h"
26 #include "dex/dex_file.h"
27 #include "dex/dex_file_types.h"
28 #include "driver/compiler_options.h"
29 #include "oat/stack_map.h"
30 #include "optimizing/code_generator.h"
31 #include "optimizing/nodes.h"
32 #include "optimizing/optimizing_compiler.h"
33 #include "runtime.h"
34 #include "scoped_thread_state_change-inl.h"
35 
36 namespace art HIDDEN {
37 
38 constexpr static bool kVerifyStackMaps = kIsDebugBuild;
39 
GetStackMapNativePcOffset(size_t i)40 uint32_t StackMapStream::GetStackMapNativePcOffset(size_t i) {
41   return StackMap::UnpackNativePc(stack_maps_[i][StackMap::kPackedNativePc], instruction_set_);
42 }
43 
SetStackMapNativePcOffset(size_t i,uint32_t native_pc_offset)44 void StackMapStream::SetStackMapNativePcOffset(size_t i, uint32_t native_pc_offset) {
45   stack_maps_[i][StackMap::kPackedNativePc] =
46       StackMap::PackNativePc(native_pc_offset, instruction_set_);
47 }
48 
BeginMethod(size_t frame_size_in_bytes,size_t core_spill_mask,size_t fp_spill_mask,uint32_t num_dex_registers,bool baseline,bool debuggable,bool has_should_deoptimize_flag)49 void StackMapStream::BeginMethod(size_t frame_size_in_bytes,
50                                  size_t core_spill_mask,
51                                  size_t fp_spill_mask,
52                                  uint32_t num_dex_registers,
53                                  bool baseline,
54                                  bool debuggable,
55                                  bool has_should_deoptimize_flag) {
56   DCHECK(!in_method_) << "Mismatched Begin/End calls";
57   in_method_ = true;
58   DCHECK_EQ(packed_frame_size_, 0u) << "BeginMethod was already called";
59 
60   DCHECK_ALIGNED(frame_size_in_bytes, kStackAlignment);
61   packed_frame_size_ = frame_size_in_bytes / kStackAlignment;
62   core_spill_mask_ = core_spill_mask;
63   fp_spill_mask_ = fp_spill_mask;
64   num_dex_registers_ = num_dex_registers;
65   baseline_ = baseline;
66   debuggable_ = debuggable;
67   has_should_deoptimize_flag_ = has_should_deoptimize_flag;
68 
69   if (kVerifyStackMaps) {
70     dchecks_.emplace_back([=](const CodeInfo& code_info) {
71       DCHECK_EQ(code_info.packed_frame_size_, frame_size_in_bytes / kStackAlignment);
72       DCHECK_EQ(code_info.core_spill_mask_, core_spill_mask);
73       DCHECK_EQ(code_info.fp_spill_mask_, fp_spill_mask);
74       DCHECK_EQ(code_info.number_of_dex_registers_, num_dex_registers);
75     });
76   }
77 }
78 
EndMethod(size_t code_size)79 void StackMapStream::EndMethod(size_t code_size) {
80   DCHECK(in_method_) << "Mismatched Begin/End calls";
81   in_method_ = false;
82   code_size_ = code_size;
83 
84   // Read the stack masks now. The compiler might have updated them.
85   for (size_t i = 0; i < lazy_stack_masks_.size(); i++) {
86     BitVector* stack_mask = lazy_stack_masks_[i];
87     if (stack_mask != nullptr && stack_mask->GetNumberOfBits() != 0) {
88       stack_maps_[i][StackMap::kStackMaskIndex] =
89           stack_masks_.Dedup(stack_mask->GetRawStorage(), stack_mask->GetNumberOfBits());
90     }
91   }
92 
93   if (kIsDebugBuild) {
94     uint32_t packed_code_size = StackMap::PackNativePc(code_size, instruction_set_);
95     for (size_t i = 0; i < stack_maps_.size(); i++) {
96       DCHECK_LE(stack_maps_[i][StackMap::kPackedNativePc], packed_code_size);
97     }
98   }
99 
100   if (kVerifyStackMaps) {
101     dchecks_.emplace_back([=](const CodeInfo& code_info) {
102         CHECK_EQ(code_info.code_size_, code_size);
103     });
104   }
105 }
106 
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * stack_mask,StackMap::Kind kind,bool needs_vreg_info,const std::vector<uint32_t> & dex_pc_list_for_catch_verification)107 void StackMapStream::BeginStackMapEntry(
108     uint32_t dex_pc,
109     uint32_t native_pc_offset,
110     uint32_t register_mask,
111     BitVector* stack_mask,
112     StackMap::Kind kind,
113     bool needs_vreg_info,
114     const std::vector<uint32_t>& dex_pc_list_for_catch_verification) {
115   DCHECK(in_method_) << "Call BeginMethod first";
116   DCHECK(!in_stack_map_) << "Mismatched Begin/End calls";
117   in_stack_map_ = true;
118 
119   DCHECK_IMPLIES(!dex_pc_list_for_catch_verification.empty(), kind == StackMap::Kind::Catch);
120   DCHECK_IMPLIES(!dex_pc_list_for_catch_verification.empty(), kIsDebugBuild);
121 
122   current_stack_map_ = BitTableBuilder<StackMap>::Entry();
123   current_stack_map_[StackMap::kKind] = static_cast<uint32_t>(kind);
124   current_stack_map_[StackMap::kPackedNativePc] =
125       StackMap::PackNativePc(native_pc_offset, instruction_set_);
126   current_stack_map_[StackMap::kDexPc] = dex_pc;
127   if (stack_maps_.size() > 0) {
128     // Check that non-catch stack maps are sorted by pc.
129     // Catch stack maps are at the end and may be unordered.
130     if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) {
131       DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch);
132     } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) {
133       DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc],
134                 current_stack_map_[StackMap::kPackedNativePc]);
135     }
136   }
137   if (register_mask != 0) {
138     uint32_t shift = LeastSignificantBit(register_mask);
139     BitTableBuilder<RegisterMask>::Entry entry;
140     entry[RegisterMask::kValue] = register_mask >> shift;
141     entry[RegisterMask::kShift] = shift;
142     current_stack_map_[StackMap::kRegisterMaskIndex] = register_masks_.Dedup(&entry);
143   }
144   // The compiler assumes the bit vector will be read during PrepareForFillIn(),
145   // and it might modify the data before that. Therefore, just store the pointer.
146   // See ClearSpillSlotsFromLoopPhisInStackMap in code_generator.h.
147   lazy_stack_masks_.push_back(stack_mask);
148   current_inline_infos_.clear();
149   current_dex_registers_.clear();
150   expected_num_dex_registers_ = needs_vreg_info  ? num_dex_registers_ : 0u;
151 
152   if (kVerifyStackMaps) {
153     size_t stack_map_index = stack_maps_.size();
154     // Create lambda method, which will be executed at the very end to verify data.
155     // Parameters and local variables will be captured(stored) by the lambda "[=]".
156     dchecks_.emplace_back([=](const CodeInfo& code_info) {
157       // The `native_pc_offset` may have been overridden using `SetStackMapNativePcOffset(.)`.
158       uint32_t final_native_pc_offset = GetStackMapNativePcOffset(stack_map_index);
159       if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) {
160         StackMap stack_map = code_info.GetStackMapForNativePcOffset(final_native_pc_offset,
161                                                                     instruction_set_);
162         CHECK_EQ(stack_map.Row(), stack_map_index);
163       } else if (kind == StackMap::Kind::Catch) {
164         StackMap stack_map = code_info.GetCatchStackMapForDexPc(
165             ArrayRef<const uint32_t>(dex_pc_list_for_catch_verification));
166         CHECK_EQ(stack_map.Row(), stack_map_index);
167       }
168       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
169       CHECK_EQ(stack_map.GetNativePcOffset(instruction_set_), final_native_pc_offset);
170       CHECK_EQ(stack_map.GetKind(), static_cast<uint32_t>(kind));
171       CHECK_EQ(stack_map.GetDexPc(), dex_pc);
172       CHECK_EQ(code_info.GetRegisterMaskOf(stack_map), register_mask);
173       BitMemoryRegion seen_stack_mask = code_info.GetStackMaskOf(stack_map);
174       CHECK_GE(seen_stack_mask.size_in_bits(), stack_mask ? stack_mask->GetNumberOfBits() : 0);
175       for (size_t b = 0; b < seen_stack_mask.size_in_bits(); b++) {
176         CHECK_EQ(seen_stack_mask.LoadBit(b), stack_mask != nullptr && stack_mask->IsBitSet(b));
177       }
178     });
179   }
180 }
181 
EndStackMapEntry()182 void StackMapStream::EndStackMapEntry() {
183   DCHECK(in_stack_map_) << "Mismatched Begin/End calls";
184   in_stack_map_ = false;
185 
186   // Generate index into the InlineInfo table.
187   size_t inlining_depth = current_inline_infos_.size();
188   if (!current_inline_infos_.empty()) {
189     current_inline_infos_.back()[InlineInfo::kIsLast] = InlineInfo::kLast;
190     current_stack_map_[StackMap::kInlineInfoIndex] =
191         inline_infos_.Dedup(current_inline_infos_.data(), current_inline_infos_.size());
192   }
193 
194   // Generate delta-compressed dex register map.
195   size_t num_dex_registers = current_dex_registers_.size();
196   if (!current_dex_registers_.empty()) {
197     DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
198     CreateDexRegisterMap();
199   }
200 
201   stack_maps_.Add(current_stack_map_);
202 
203   if (kVerifyStackMaps) {
204     size_t stack_map_index = stack_maps_.size() - 1;
205     dchecks_.emplace_back([=](const CodeInfo& code_info) {
206       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
207       CHECK_EQ(stack_map.HasDexRegisterMap(), (num_dex_registers != 0));
208       CHECK_EQ(stack_map.HasInlineInfo(), (inlining_depth != 0));
209       CHECK_EQ(code_info.GetInlineInfosOf(stack_map).size(), inlining_depth);
210     });
211   }
212 }
213 
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file,const CodeGenerator * codegen)214 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
215                                           uint32_t dex_pc,
216                                           uint32_t num_dex_registers,
217                                           const DexFile* outer_dex_file,
218                                           const CodeGenerator* codegen) {
219   DCHECK(in_stack_map_) << "Call BeginStackMapEntry first";
220   DCHECK(!in_inline_info_) << "Mismatched Begin/End calls";
221   in_inline_info_ = true;
222   DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
223 
224   expected_num_dex_registers_ += num_dex_registers;
225 
226   BitTableBuilder<InlineInfo>::Entry entry;
227   entry[InlineInfo::kIsLast] = InlineInfo::kMore;
228   entry[InlineInfo::kDexPc] = dex_pc;
229   entry[InlineInfo::kNumberOfDexRegisters] = static_cast<uint32_t>(expected_num_dex_registers_);
230   if (EncodeArtMethodInInlineInfo(method)) {
231     entry[InlineInfo::kArtMethodHi] = High32Bits(reinterpret_cast<uintptr_t>(method));
232     entry[InlineInfo::kArtMethodLo] = Low32Bits(reinterpret_cast<uintptr_t>(method));
233   } else {
234     uint32_t is_in_bootclasspath = MethodInfo::kKindNonBCP;
235     uint32_t dexfile_index = MethodInfo::kSameDexFile;
236     if (dex_pc != static_cast<uint32_t>(-1)) {
237       ScopedObjectAccess soa(Thread::Current());
238       const DexFile* dex_file = method->GetDexFile();
239       if (!IsSameDexFile(*outer_dex_file, *dex_file)) {
240         if (method->GetDeclaringClass()->IsBootStrapClassLoaded()) {
241           ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
242           const std::vector<const DexFile*>& boot_class_path = class_linker->GetBootClassPath();
243           auto it = std::find_if(
244               boot_class_path.begin(), boot_class_path.end(), [dex_file](const DexFile* df) {
245                 return IsSameDexFile(*df, *dex_file);
246               });
247           is_in_bootclasspath = MethodInfo::kKindBCP;
248           dexfile_index = std::distance(boot_class_path.begin(), it);
249         } else {
250           const std::vector<const DexFile*>& dex_files =
251               codegen->GetCompilerOptions().GetDexFilesForOatFile();
252           auto it = std::find_if(dex_files.begin(), dex_files.end(), [dex_file](const DexFile* df) {
253             return IsSameDexFile(*df, *dex_file);
254           });
255           // No need to set is_in_bootclasspath since the default value works.
256           dexfile_index = std::distance(dex_files.begin(), it);
257         }
258       }
259     }
260     uint32_t dex_method_index = method->GetDexMethodIndex();
261     entry[InlineInfo::kMethodInfoIndex] =
262         method_infos_.Dedup({dex_method_index, is_in_bootclasspath, dexfile_index});
263   }
264   current_inline_infos_.push_back(entry);
265 
266   if (kVerifyStackMaps) {
267     size_t stack_map_index = stack_maps_.size();
268     size_t depth = current_inline_infos_.size() - 1;
269     dchecks_.emplace_back([=](const CodeInfo& code_info) {
270       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
271       InlineInfo inline_info = code_info.GetInlineInfosOf(stack_map)[depth];
272       CHECK_EQ(inline_info.GetDexPc(), dex_pc);
273       bool encode_art_method = EncodeArtMethodInInlineInfo(method);
274       CHECK_EQ(inline_info.EncodesArtMethod(), encode_art_method);
275       if (encode_art_method) {
276         CHECK_EQ(inline_info.GetArtMethod(), method);
277       } else {
278         MethodInfo method_info = code_info.GetMethodInfoOf(inline_info);
279         CHECK_EQ(method_info.GetMethodIndex(), method->GetDexMethodIndex());
280         CHECK(method_info.GetDexFileIndexKind() == MethodInfo::kKindNonBCP ||
281               method_info.GetDexFileIndexKind() == MethodInfo::kKindBCP);
282         ScopedObjectAccess soa(Thread::Current());
283         if (inline_info.GetDexPc() != static_cast<uint32_t>(-1) &&
284             !IsSameDexFile(*outer_dex_file, *method->GetDexFile())) {
285           if (method->GetDeclaringClass()->IsBootStrapClassLoaded()) {
286             CHECK_EQ(method_info.GetDexFileIndexKind(), MethodInfo::kKindBCP);
287             ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
288             const std::vector<const DexFile*>& boot_class_path = class_linker->GetBootClassPath();
289             DCHECK_LT(method_info.GetDexFileIndex(), boot_class_path.size());
290             CHECK(IsSameDexFile(*boot_class_path[method_info.GetDexFileIndex()],
291                                 *method->GetDexFile()));
292           } else {
293             CHECK_EQ(method_info.GetDexFileIndexKind(), MethodInfo::kKindNonBCP);
294             const std::vector<const DexFile*>& dex_files =
295                 codegen->GetCompilerOptions().GetDexFilesForOatFile();
296             DCHECK_LT(method_info.GetDexFileIndex(), dex_files.size());
297             CHECK(IsSameDexFile(*dex_files[method_info.GetDexFileIndex()], *method->GetDexFile()));
298           }
299         }
300       }
301     });
302   }
303 }
304 
EndInlineInfoEntry()305 void StackMapStream::EndInlineInfoEntry() {
306   DCHECK(in_inline_info_) << "Mismatched Begin/End calls";
307   in_inline_info_ = false;
308   DCHECK_EQ(expected_num_dex_registers_, current_dex_registers_.size());
309 }
310 
311 // Create delta-compressed dex register map based on the current list of DexRegisterLocations.
312 // All dex registers for a stack map are concatenated - inlined registers are just appended.
CreateDexRegisterMap()313 void StackMapStream::CreateDexRegisterMap() {
314   // These are fields rather than local variables so that we can reuse the reserved memory.
315   temp_dex_register_mask_.ClearAllBits();
316   temp_dex_register_map_.clear();
317 
318   // Ensure that the arrays that hold previous state are big enough to be safely indexed below.
319   if (previous_dex_registers_.size() < current_dex_registers_.size()) {
320     previous_dex_registers_.resize(current_dex_registers_.size(), DexRegisterLocation::None());
321     dex_register_timestamp_.resize(current_dex_registers_.size(), 0u);
322   }
323 
324   // Set bit in the mask for each register that has been changed since the previous stack map.
325   // Modified registers are stored in the catalogue and the catalogue index added to the list.
326   for (size_t i = 0; i < current_dex_registers_.size(); i++) {
327     DexRegisterLocation reg = current_dex_registers_[i];
328     // Distance is difference between this index and the index of last modification.
329     uint32_t distance = stack_maps_.size() - dex_register_timestamp_[i];
330     if (previous_dex_registers_[i] != reg || distance > kMaxDexRegisterMapSearchDistance) {
331       BitTableBuilder<DexRegisterInfo>::Entry entry;
332       entry[DexRegisterInfo::kKind] = static_cast<uint32_t>(reg.GetKind());
333       entry[DexRegisterInfo::kPackedValue] =
334           DexRegisterInfo::PackValue(reg.GetKind(), reg.GetValue());
335       uint32_t index = reg.IsLive() ? dex_register_catalog_.Dedup(&entry) : kNoValue;
336       temp_dex_register_mask_.SetBit(i);
337       temp_dex_register_map_.push_back({index});
338       previous_dex_registers_[i] = reg;
339       dex_register_timestamp_[i] = stack_maps_.size();
340     }
341   }
342 
343   // Set the mask and map for the current StackMap (which includes inlined registers).
344   if (temp_dex_register_mask_.GetNumberOfBits() != 0) {
345     current_stack_map_[StackMap::kDexRegisterMaskIndex] =
346         dex_register_masks_.Dedup(temp_dex_register_mask_.GetRawStorage(),
347                                   temp_dex_register_mask_.GetNumberOfBits());
348   }
349   if (!current_dex_registers_.empty()) {
350     current_stack_map_[StackMap::kDexRegisterMapIndex] =
351         dex_register_maps_.Dedup(temp_dex_register_map_.data(),
352                                  temp_dex_register_map_.size());
353   }
354 
355   if (kVerifyStackMaps) {
356     size_t stack_map_index = stack_maps_.size();
357     // We need to make copy of the current registers for later (when the check is run).
358     auto expected_dex_registers = std::make_shared<dchecked_vector<DexRegisterLocation>>(
359         current_dex_registers_.begin(), current_dex_registers_.end());
360     dchecks_.emplace_back([=](const CodeInfo& code_info) {
361       StackMap stack_map = code_info.GetStackMapAt(stack_map_index);
362       uint32_t expected_reg = 0;
363       for (DexRegisterLocation reg : code_info.GetDexRegisterMapOf(stack_map)) {
364         CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
365       }
366       for (InlineInfo inline_info : code_info.GetInlineInfosOf(stack_map)) {
367         DexRegisterMap map = code_info.GetInlineDexRegisterMapOf(stack_map, inline_info);
368         for (DexRegisterLocation reg : map) {
369           CHECK_EQ((*expected_dex_registers)[expected_reg++], reg);
370         }
371       }
372       CHECK_EQ(expected_reg, expected_dex_registers->size());
373     });
374   }
375 }
376 
Encode()377 ScopedArenaVector<uint8_t> StackMapStream::Encode() {
378   DCHECK(in_stack_map_ == false) << "Mismatched Begin/End calls";
379   DCHECK(in_inline_info_ == false) << "Mismatched Begin/End calls";
380 
381   uint32_t flags = 0;
382   flags |= (inline_infos_.size() > 0) ? CodeInfo::kHasInlineInfo : 0;
383   flags |= baseline_ ? CodeInfo::kIsBaseline : 0;
384   flags |= debuggable_ ? CodeInfo::kIsDebuggable : 0;
385   flags |= has_should_deoptimize_flag_ ? CodeInfo::kHasShouldDeoptimizeFlag : 0;
386 
387   uint32_t bit_table_flags = 0;
388   ForEachBitTable([&bit_table_flags](size_t i, auto bit_table) {
389     if (bit_table->size() != 0) {  // Record which bit-tables are stored.
390       bit_table_flags |= 1 << i;
391     }
392   });
393 
394   ScopedArenaVector<uint8_t> buffer(allocator_->Adapter(kArenaAllocStackMapStream));
395   BitMemoryWriter<ScopedArenaVector<uint8_t>> out(&buffer);
396   out.WriteInterleavedVarints(std::array<uint32_t, CodeInfo::kNumHeaders>{
397     flags,
398     code_size_,
399     packed_frame_size_,
400     core_spill_mask_,
401     fp_spill_mask_,
402     num_dex_registers_,
403     bit_table_flags,
404   });
405   ForEachBitTable([&out](size_t, auto bit_table) {
406     if (bit_table->size() != 0) {  // Skip empty bit-tables.
407       bit_table->Encode(out);
408     }
409   });
410 
411   // Verify that we can load the CodeInfo and check some essentials.
412   size_t number_of_read_bits;
413   CodeInfo code_info(buffer.data(), &number_of_read_bits);
414   CHECK_EQ(number_of_read_bits, out.NumberOfWrittenBits());
415   CHECK_EQ(code_info.GetNumberOfStackMaps(), stack_maps_.size());
416   CHECK_EQ(CodeInfo::HasInlineInfo(buffer.data()), inline_infos_.size() > 0);
417   CHECK_EQ(CodeInfo::IsBaseline(buffer.data()), baseline_);
418   CHECK_EQ(CodeInfo::IsDebuggable(buffer.data()), debuggable_);
419   CHECK_EQ(CodeInfo::HasShouldDeoptimizeFlag(buffer.data()), has_should_deoptimize_flag_);
420 
421   // Verify all written data (usually only in debug builds).
422   if (kVerifyStackMaps) {
423     for (const auto& dcheck : dchecks_) {
424       dcheck(code_info);
425     }
426   }
427 
428   return buffer;
429 }
430 
431 }  // namespace art
432