1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack_map_stream.h"
18
19 #include "art_method-inl.h"
20 #include "base/stl_util.h"
21 #include "optimizing/optimizing_compiler.h"
22 #include "runtime.h"
23 #include "scoped_thread_state_change-inl.h"
24
25 namespace art {
26
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * sp_mask,uint32_t num_dex_registers,uint8_t inlining_depth)27 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
28 uint32_t native_pc_offset,
29 uint32_t register_mask,
30 BitVector* sp_mask,
31 uint32_t num_dex_registers,
32 uint8_t inlining_depth) {
33 DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
34 DCHECK_NE(dex_pc, static_cast<uint32_t>(-1)) << "invalid dex_pc";
35 current_entry_.dex_pc = dex_pc;
36 current_entry_.native_pc_code_offset = CodeOffset::FromOffset(native_pc_offset, instruction_set_);
37 current_entry_.register_mask = register_mask;
38 current_entry_.sp_mask = sp_mask;
39 current_entry_.inlining_depth = inlining_depth;
40 current_entry_.inline_infos_start_index = inline_infos_.size();
41 current_entry_.stack_mask_index = 0;
42 current_entry_.dex_method_index = DexFile::kDexNoIndex;
43 current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
44 current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
45 current_entry_.dex_register_entry.live_dex_registers_mask = (num_dex_registers != 0)
46 ? ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream)
47 : nullptr;
48 if (sp_mask != nullptr) {
49 stack_mask_max_ = std::max(stack_mask_max_, sp_mask->GetHighestBitSet());
50 }
51 if (inlining_depth > 0) {
52 number_of_stack_maps_with_inline_info_++;
53 }
54
55 dex_pc_max_ = std::max(dex_pc_max_, dex_pc);
56 register_mask_max_ = std::max(register_mask_max_, register_mask);
57 current_dex_register_ = 0;
58 }
59
EndStackMapEntry()60 void StackMapStream::EndStackMapEntry() {
61 current_entry_.dex_register_map_index = AddDexRegisterMapEntry(current_entry_.dex_register_entry);
62 stack_maps_.push_back(current_entry_);
63 current_entry_ = StackMapEntry();
64 }
65
AddDexRegisterEntry(DexRegisterLocation::Kind kind,int32_t value)66 void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
67 if (kind != DexRegisterLocation::Kind::kNone) {
68 // Ensure we only use non-compressed location kind at this stage.
69 DCHECK(DexRegisterLocation::IsShortLocationKind(kind)) << kind;
70 DexRegisterLocation location(kind, value);
71
72 // Look for Dex register `location` in the location catalog (using the
73 // companion hash map of locations to indices). Use its index if it
74 // is already in the location catalog. If not, insert it (in the
75 // location catalog and the hash map) and use the newly created index.
76 auto it = location_catalog_entries_indices_.Find(location);
77 if (it != location_catalog_entries_indices_.end()) {
78 // Retrieve the index from the hash map.
79 dex_register_locations_.push_back(it->second);
80 } else {
81 // Create a new entry in the location catalog and the hash map.
82 size_t index = location_catalog_entries_.size();
83 location_catalog_entries_.push_back(location);
84 dex_register_locations_.push_back(index);
85 location_catalog_entries_indices_.Insert(std::make_pair(location, index));
86 }
87 DexRegisterMapEntry* const entry = in_inline_frame_
88 ? ¤t_inline_info_.dex_register_entry
89 : ¤t_entry_.dex_register_entry;
90 DCHECK_LT(current_dex_register_, entry->num_dex_registers);
91 entry->live_dex_registers_mask->SetBit(current_dex_register_);
92 entry->hash += (1 <<
93 (current_dex_register_ % (sizeof(DexRegisterMapEntry::hash) * kBitsPerByte)));
94 entry->hash += static_cast<uint32_t>(value);
95 entry->hash += static_cast<uint32_t>(kind);
96 }
97 current_dex_register_++;
98 }
99
AddInvoke(InvokeType invoke_type,uint32_t dex_method_index)100 void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
101 current_entry_.invoke_type = invoke_type;
102 current_entry_.dex_method_index = dex_method_index;
103 }
104
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file)105 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
106 uint32_t dex_pc,
107 uint32_t num_dex_registers,
108 const DexFile* outer_dex_file) {
109 DCHECK(!in_inline_frame_);
110 in_inline_frame_ = true;
111 if (EncodeArtMethodInInlineInfo(method)) {
112 current_inline_info_.method = method;
113 } else {
114 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
115 ScopedObjectAccess soa(Thread::Current());
116 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
117 }
118 current_inline_info_.method_index = method->GetDexMethodIndexUnchecked();
119 }
120 current_inline_info_.dex_pc = dex_pc;
121 current_inline_info_.dex_register_entry.num_dex_registers = num_dex_registers;
122 current_inline_info_.dex_register_entry.locations_start_index = dex_register_locations_.size();
123 current_inline_info_.dex_register_entry.live_dex_registers_mask = (num_dex_registers != 0)
124 ? ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream)
125 : nullptr;
126 current_dex_register_ = 0;
127 }
128
EndInlineInfoEntry()129 void StackMapStream::EndInlineInfoEntry() {
130 current_inline_info_.dex_register_map_index =
131 AddDexRegisterMapEntry(current_inline_info_.dex_register_entry);
132 DCHECK(in_inline_frame_);
133 DCHECK_EQ(current_dex_register_, current_inline_info_.dex_register_entry.num_dex_registers)
134 << "Inline information contains less registers than expected";
135 in_inline_frame_ = false;
136 inline_infos_.push_back(current_inline_info_);
137 current_inline_info_ = InlineInfoEntry();
138 }
139
ComputeMaxNativePcCodeOffset() const140 CodeOffset StackMapStream::ComputeMaxNativePcCodeOffset() const {
141 CodeOffset max_native_pc_offset;
142 for (const StackMapEntry& entry : stack_maps_) {
143 max_native_pc_offset = std::max(max_native_pc_offset, entry.native_pc_code_offset);
144 }
145 return max_native_pc_offset;
146 }
147
PrepareForFillIn()148 size_t StackMapStream::PrepareForFillIn() {
149 CodeInfoEncoding encoding;
150 encoding.dex_register_map.num_entries = 0; // TODO: Remove this field.
151 encoding.dex_register_map.num_bytes = ComputeDexRegisterMapsSize();
152 encoding.location_catalog.num_entries = location_catalog_entries_.size();
153 encoding.location_catalog.num_bytes = ComputeDexRegisterLocationCatalogSize();
154 encoding.inline_info.num_entries = inline_infos_.size();
155 // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
156 // dex_method_index_idx to be filled in.
157 PrepareMethodIndices();
158 ComputeInlineInfoEncoding(&encoding.inline_info.encoding,
159 encoding.dex_register_map.num_bytes);
160 CodeOffset max_native_pc_offset = ComputeMaxNativePcCodeOffset();
161 // Prepare the CodeInfo variable-sized encoding.
162 encoding.stack_mask.encoding.num_bits = stack_mask_max_ + 1; // Need room for max element too.
163 encoding.stack_mask.num_entries = PrepareStackMasks(encoding.stack_mask.encoding.num_bits);
164 encoding.register_mask.encoding.num_bits = MinimumBitsToStore(register_mask_max_);
165 encoding.register_mask.num_entries = PrepareRegisterMasks();
166 encoding.stack_map.num_entries = stack_maps_.size();
167 encoding.stack_map.encoding.SetFromSizes(
168 // The stack map contains compressed native PC offsets.
169 max_native_pc_offset.CompressedValue(),
170 dex_pc_max_,
171 encoding.dex_register_map.num_bytes,
172 encoding.inline_info.num_entries,
173 encoding.register_mask.num_entries,
174 encoding.stack_mask.num_entries);
175 ComputeInvokeInfoEncoding(&encoding);
176 DCHECK_EQ(code_info_encoding_.size(), 0u);
177 encoding.Compress(&code_info_encoding_);
178 encoding.ComputeTableOffsets();
179 // Compute table offsets so we can get the non header size.
180 DCHECK_EQ(encoding.HeaderSize(), code_info_encoding_.size());
181 needed_size_ = code_info_encoding_.size() + encoding.NonHeaderSize();
182 return needed_size_;
183 }
184
ComputeDexRegisterLocationCatalogSize() const185 size_t StackMapStream::ComputeDexRegisterLocationCatalogSize() const {
186 size_t size = DexRegisterLocationCatalog::kFixedSize;
187 for (const DexRegisterLocation& dex_register_location : location_catalog_entries_) {
188 size += DexRegisterLocationCatalog::EntrySize(dex_register_location);
189 }
190 return size;
191 }
192
ComputeSize(size_t catalog_size) const193 size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) const {
194 // For num_dex_registers == 0u live_dex_registers_mask may be null.
195 if (num_dex_registers == 0u) {
196 return 0u; // No register map will be emitted.
197 }
198 DCHECK(live_dex_registers_mask != nullptr);
199
200 // Size of the map in bytes.
201 size_t size = DexRegisterMap::kFixedSize;
202 // Add the live bit mask for the Dex register liveness.
203 size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
204 // Compute the size of the set of live Dex register entries.
205 size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
206 size_t map_entries_size_in_bits =
207 DexRegisterMap::SingleEntrySizeInBits(catalog_size) * number_of_live_dex_registers;
208 size_t map_entries_size_in_bytes =
209 RoundUp(map_entries_size_in_bits, kBitsPerByte) / kBitsPerByte;
210 size += map_entries_size_in_bytes;
211 return size;
212 }
213
ComputeDexRegisterMapsSize() const214 size_t StackMapStream::ComputeDexRegisterMapsSize() const {
215 size_t size = 0;
216 for (const DexRegisterMapEntry& entry : dex_register_entries_) {
217 size += entry.ComputeSize(location_catalog_entries_.size());
218 }
219 return size;
220 }
221
ComputeInvokeInfoEncoding(CodeInfoEncoding * encoding)222 void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
223 DCHECK(encoding != nullptr);
224 uint32_t native_pc_max = 0;
225 uint16_t method_index_max = 0;
226 size_t invoke_infos_count = 0;
227 size_t invoke_type_max = 0;
228 for (const StackMapEntry& entry : stack_maps_) {
229 if (entry.dex_method_index != DexFile::kDexNoIndex) {
230 native_pc_max = std::max(native_pc_max, entry.native_pc_code_offset.CompressedValue());
231 method_index_max = std::max(method_index_max, static_cast<uint16_t>(entry.dex_method_index));
232 invoke_type_max = std::max(invoke_type_max, static_cast<size_t>(entry.invoke_type));
233 ++invoke_infos_count;
234 }
235 }
236 encoding->invoke_info.num_entries = invoke_infos_count;
237 encoding->invoke_info.encoding.SetFromSizes(native_pc_max, invoke_type_max, method_index_max);
238 }
239
ComputeInlineInfoEncoding(InlineInfoEncoding * encoding,size_t dex_register_maps_bytes)240 void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
241 size_t dex_register_maps_bytes) {
242 uint32_t method_index_max = 0;
243 uint32_t dex_pc_max = DexFile::kDexNoIndex;
244 uint32_t extra_data_max = 0;
245
246 uint32_t inline_info_index = 0;
247 for (const StackMapEntry& entry : stack_maps_) {
248 for (size_t j = 0; j < entry.inlining_depth; ++j) {
249 InlineInfoEntry inline_entry = inline_infos_[inline_info_index++];
250 if (inline_entry.method == nullptr) {
251 method_index_max = std::max(method_index_max, inline_entry.dex_method_index_idx);
252 extra_data_max = std::max(extra_data_max, 1u);
253 } else {
254 method_index_max = std::max(
255 method_index_max, High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
256 extra_data_max = std::max(
257 extra_data_max, Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
258 }
259 if (inline_entry.dex_pc != DexFile::kDexNoIndex &&
260 (dex_pc_max == DexFile::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
261 dex_pc_max = inline_entry.dex_pc;
262 }
263 }
264 }
265 DCHECK_EQ(inline_info_index, inline_infos_.size());
266
267 encoding->SetFromSizes(method_index_max, dex_pc_max, extra_data_max, dex_register_maps_bytes);
268 }
269
MaybeCopyDexRegisterMap(DexRegisterMapEntry & entry,size_t * current_offset,MemoryRegion dex_register_locations_region)270 size_t StackMapStream::MaybeCopyDexRegisterMap(DexRegisterMapEntry& entry,
271 size_t* current_offset,
272 MemoryRegion dex_register_locations_region) {
273 DCHECK(current_offset != nullptr);
274 if ((entry.num_dex_registers == 0) || (entry.live_dex_registers_mask->NumSetBits() == 0)) {
275 // No dex register map needed.
276 return StackMap::kNoDexRegisterMap;
277 }
278 if (entry.offset == DexRegisterMapEntry::kOffsetUnassigned) {
279 // Not already copied, need to copy and and assign an offset.
280 entry.offset = *current_offset;
281 const size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
282 DexRegisterMap dex_register_map(
283 dex_register_locations_region.Subregion(entry.offset, entry_size));
284 *current_offset += entry_size;
285 // Fill in the map since it was just added.
286 FillInDexRegisterMap(dex_register_map,
287 entry.num_dex_registers,
288 *entry.live_dex_registers_mask,
289 entry.locations_start_index);
290 }
291 return entry.offset;
292 }
293
FillInMethodInfo(MemoryRegion region)294 void StackMapStream::FillInMethodInfo(MemoryRegion region) {
295 {
296 MethodInfo info(region.begin(), method_indices_.size());
297 for (size_t i = 0; i < method_indices_.size(); ++i) {
298 info.SetMethodIndex(i, method_indices_[i]);
299 }
300 }
301 if (kIsDebugBuild) {
302 // Check the data matches.
303 MethodInfo info(region.begin());
304 const size_t count = info.NumMethodIndices();
305 DCHECK_EQ(count, method_indices_.size());
306 for (size_t i = 0; i < count; ++i) {
307 DCHECK_EQ(info.GetMethodIndex(i), method_indices_[i]);
308 }
309 }
310 }
311
FillInCodeInfo(MemoryRegion region)312 void StackMapStream::FillInCodeInfo(MemoryRegion region) {
313 DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
314 DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before FillIn";
315
316 DCHECK_EQ(region.size(), needed_size_);
317
318 // Note that the memory region does not have to be zeroed when we JIT code
319 // because we do not use the arena allocator there.
320
321 // Write the CodeInfo header.
322 region.CopyFrom(0, MemoryRegion(code_info_encoding_.data(), code_info_encoding_.size()));
323
324 CodeInfo code_info(region);
325 CodeInfoEncoding encoding = code_info.ExtractEncoding();
326 DCHECK_EQ(encoding.stack_map.num_entries, stack_maps_.size());
327
328 MemoryRegion dex_register_locations_region = region.Subregion(
329 encoding.dex_register_map.byte_offset,
330 encoding.dex_register_map.num_bytes);
331
332 // Set the Dex register location catalog.
333 MemoryRegion dex_register_location_catalog_region = region.Subregion(
334 encoding.location_catalog.byte_offset,
335 encoding.location_catalog.num_bytes);
336 DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
337 // Offset in `dex_register_location_catalog` where to store the next
338 // register location.
339 size_t location_catalog_offset = DexRegisterLocationCatalog::kFixedSize;
340 for (DexRegisterLocation dex_register_location : location_catalog_entries_) {
341 dex_register_location_catalog.SetRegisterInfo(location_catalog_offset, dex_register_location);
342 location_catalog_offset += DexRegisterLocationCatalog::EntrySize(dex_register_location);
343 }
344 // Ensure we reached the end of the Dex registers location_catalog.
345 DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
346
347 ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
348 uintptr_t next_dex_register_map_offset = 0;
349 uintptr_t next_inline_info_index = 0;
350 size_t invoke_info_idx = 0;
351 for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {
352 StackMap stack_map = code_info.GetStackMapAt(i, encoding);
353 StackMapEntry entry = stack_maps_[i];
354
355 stack_map.SetDexPc(encoding.stack_map.encoding, entry.dex_pc);
356 stack_map.SetNativePcCodeOffset(encoding.stack_map.encoding, entry.native_pc_code_offset);
357 stack_map.SetRegisterMaskIndex(encoding.stack_map.encoding, entry.register_mask_index);
358 stack_map.SetStackMaskIndex(encoding.stack_map.encoding, entry.stack_mask_index);
359
360 size_t offset = MaybeCopyDexRegisterMap(dex_register_entries_[entry.dex_register_map_index],
361 &next_dex_register_map_offset,
362 dex_register_locations_region);
363 stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, offset);
364
365 if (entry.dex_method_index != DexFile::kDexNoIndex) {
366 InvokeInfo invoke_info(code_info.GetInvokeInfo(encoding, invoke_info_idx));
367 invoke_info.SetNativePcCodeOffset(encoding.invoke_info.encoding, entry.native_pc_code_offset);
368 invoke_info.SetInvokeType(encoding.invoke_info.encoding, entry.invoke_type);
369 invoke_info.SetMethodIndexIdx(encoding.invoke_info.encoding, entry.dex_method_index_idx);
370 ++invoke_info_idx;
371 }
372
373 // Set the inlining info.
374 if (entry.inlining_depth != 0) {
375 InlineInfo inline_info = code_info.GetInlineInfo(next_inline_info_index, encoding);
376
377 // Fill in the index.
378 stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, next_inline_info_index);
379 DCHECK_EQ(next_inline_info_index, entry.inline_infos_start_index);
380 next_inline_info_index += entry.inlining_depth;
381
382 inline_info.SetDepth(encoding.inline_info.encoding, entry.inlining_depth);
383 DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
384
385 for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
386 InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
387 if (inline_entry.method != nullptr) {
388 inline_info.SetMethodIndexIdxAtDepth(
389 encoding.inline_info.encoding,
390 depth,
391 High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
392 inline_info.SetExtraDataAtDepth(
393 encoding.inline_info.encoding,
394 depth,
395 Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
396 } else {
397 inline_info.SetMethodIndexIdxAtDepth(encoding.inline_info.encoding,
398 depth,
399 inline_entry.dex_method_index_idx);
400 inline_info.SetExtraDataAtDepth(encoding.inline_info.encoding, depth, 1);
401 }
402 inline_info.SetDexPcAtDepth(encoding.inline_info.encoding, depth, inline_entry.dex_pc);
403 size_t dex_register_map_offset = MaybeCopyDexRegisterMap(
404 dex_register_entries_[inline_entry.dex_register_map_index],
405 &next_dex_register_map_offset,
406 dex_register_locations_region);
407 inline_info.SetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding,
408 depth,
409 dex_register_map_offset);
410 }
411 } else if (encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
412 stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, StackMap::kNoInlineInfo);
413 }
414 }
415
416 // Write stack masks table.
417 const size_t stack_mask_bits = encoding.stack_mask.encoding.BitSize();
418 if (stack_mask_bits > 0) {
419 size_t stack_mask_bytes = RoundUp(stack_mask_bits, kBitsPerByte) / kBitsPerByte;
420 for (size_t i = 0; i < encoding.stack_mask.num_entries; ++i) {
421 MemoryRegion source(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes);
422 BitMemoryRegion stack_mask = code_info.GetStackMask(i, encoding);
423 for (size_t bit_index = 0; bit_index < stack_mask_bits; ++bit_index) {
424 stack_mask.StoreBit(bit_index, source.LoadBit(bit_index));
425 }
426 }
427 }
428
429 // Write register masks table.
430 for (size_t i = 0; i < encoding.register_mask.num_entries; ++i) {
431 BitMemoryRegion register_mask = code_info.GetRegisterMask(i, encoding);
432 register_mask.StoreBits(0, register_masks_[i], encoding.register_mask.encoding.BitSize());
433 }
434
435 // Verify all written data in debug build.
436 if (kIsDebugBuild) {
437 CheckCodeInfo(region);
438 }
439 }
440
FillInDexRegisterMap(DexRegisterMap dex_register_map,uint32_t num_dex_registers,const BitVector & live_dex_registers_mask,uint32_t start_index_in_dex_register_locations) const441 void StackMapStream::FillInDexRegisterMap(DexRegisterMap dex_register_map,
442 uint32_t num_dex_registers,
443 const BitVector& live_dex_registers_mask,
444 uint32_t start_index_in_dex_register_locations) const {
445 dex_register_map.SetLiveBitMask(num_dex_registers, live_dex_registers_mask);
446 // Set the dex register location mapping data.
447 size_t number_of_live_dex_registers = live_dex_registers_mask.NumSetBits();
448 DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
449 DCHECK_LE(start_index_in_dex_register_locations,
450 dex_register_locations_.size() - number_of_live_dex_registers);
451 for (size_t index_in_dex_register_locations = 0;
452 index_in_dex_register_locations != number_of_live_dex_registers;
453 ++index_in_dex_register_locations) {
454 size_t location_catalog_entry_index = dex_register_locations_[
455 start_index_in_dex_register_locations + index_in_dex_register_locations];
456 dex_register_map.SetLocationCatalogEntryIndex(
457 index_in_dex_register_locations,
458 location_catalog_entry_index,
459 num_dex_registers,
460 location_catalog_entries_.size());
461 }
462 }
463
AddDexRegisterMapEntry(const DexRegisterMapEntry & entry)464 size_t StackMapStream::AddDexRegisterMapEntry(const DexRegisterMapEntry& entry) {
465 const size_t current_entry_index = dex_register_entries_.size();
466 auto entries_it = dex_map_hash_to_stack_map_indices_.find(entry.hash);
467 if (entries_it == dex_map_hash_to_stack_map_indices_.end()) {
468 // We don't have a perfect hash functions so we need a list to collect all stack maps
469 // which might have the same dex register map.
470 ArenaVector<uint32_t> stack_map_indices(allocator_->Adapter(kArenaAllocStackMapStream));
471 stack_map_indices.push_back(current_entry_index);
472 dex_map_hash_to_stack_map_indices_.Put(entry.hash, std::move(stack_map_indices));
473 } else {
474 // We might have collisions, so we need to check whether or not we really have a match.
475 for (uint32_t test_entry_index : entries_it->second) {
476 if (DexRegisterMapEntryEquals(dex_register_entries_[test_entry_index], entry)) {
477 return test_entry_index;
478 }
479 }
480 entries_it->second.push_back(current_entry_index);
481 }
482 dex_register_entries_.push_back(entry);
483 return current_entry_index;
484 }
485
DexRegisterMapEntryEquals(const DexRegisterMapEntry & a,const DexRegisterMapEntry & b) const486 bool StackMapStream::DexRegisterMapEntryEquals(const DexRegisterMapEntry& a,
487 const DexRegisterMapEntry& b) const {
488 if ((a.live_dex_registers_mask == nullptr) != (b.live_dex_registers_mask == nullptr)) {
489 return false;
490 }
491 if (a.num_dex_registers != b.num_dex_registers) {
492 return false;
493 }
494 if (a.num_dex_registers != 0u) {
495 DCHECK(a.live_dex_registers_mask != nullptr);
496 DCHECK(b.live_dex_registers_mask != nullptr);
497 if (!a.live_dex_registers_mask->Equal(b.live_dex_registers_mask)) {
498 return false;
499 }
500 size_t number_of_live_dex_registers = a.live_dex_registers_mask->NumSetBits();
501 DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
502 DCHECK_LE(a.locations_start_index,
503 dex_register_locations_.size() - number_of_live_dex_registers);
504 DCHECK_LE(b.locations_start_index,
505 dex_register_locations_.size() - number_of_live_dex_registers);
506 auto a_begin = dex_register_locations_.begin() + a.locations_start_index;
507 auto b_begin = dex_register_locations_.begin() + b.locations_start_index;
508 if (!std::equal(a_begin, a_begin + number_of_live_dex_registers, b_begin)) {
509 return false;
510 }
511 }
512 return true;
513 }
514
515 // Helper for CheckCodeInfo - check that register map has the expected content.
CheckDexRegisterMap(const CodeInfo & code_info,const DexRegisterMap & dex_register_map,size_t num_dex_registers,BitVector * live_dex_registers_mask,size_t dex_register_locations_index) const516 void StackMapStream::CheckDexRegisterMap(const CodeInfo& code_info,
517 const DexRegisterMap& dex_register_map,
518 size_t num_dex_registers,
519 BitVector* live_dex_registers_mask,
520 size_t dex_register_locations_index) const {
521 CodeInfoEncoding encoding = code_info.ExtractEncoding();
522 for (size_t reg = 0; reg < num_dex_registers; reg++) {
523 // Find the location we tried to encode.
524 DexRegisterLocation expected = DexRegisterLocation::None();
525 if (live_dex_registers_mask->IsBitSet(reg)) {
526 size_t catalog_index = dex_register_locations_[dex_register_locations_index++];
527 expected = location_catalog_entries_[catalog_index];
528 }
529 // Compare to the seen location.
530 if (expected.GetKind() == DexRegisterLocation::Kind::kNone) {
531 DCHECK(!dex_register_map.IsValid() || !dex_register_map.IsDexRegisterLive(reg))
532 << dex_register_map.IsValid() << " " << dex_register_map.IsDexRegisterLive(reg);
533 } else {
534 DCHECK(dex_register_map.IsDexRegisterLive(reg));
535 DexRegisterLocation seen = dex_register_map.GetDexRegisterLocation(
536 reg, num_dex_registers, code_info, encoding);
537 DCHECK_EQ(expected.GetKind(), seen.GetKind());
538 DCHECK_EQ(expected.GetValue(), seen.GetValue());
539 }
540 }
541 if (num_dex_registers == 0) {
542 DCHECK(!dex_register_map.IsValid());
543 }
544 }
545
PrepareRegisterMasks()546 size_t StackMapStream::PrepareRegisterMasks() {
547 register_masks_.resize(stack_maps_.size(), 0u);
548 ArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
549 for (StackMapEntry& stack_map : stack_maps_) {
550 const size_t index = dedupe.size();
551 stack_map.register_mask_index = dedupe.emplace(stack_map.register_mask, index).first->second;
552 register_masks_[index] = stack_map.register_mask;
553 }
554 return dedupe.size();
555 }
556
PrepareMethodIndices()557 void StackMapStream::PrepareMethodIndices() {
558 CHECK(method_indices_.empty());
559 method_indices_.resize(stack_maps_.size() + inline_infos_.size());
560 ArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
561 for (StackMapEntry& stack_map : stack_maps_) {
562 const size_t index = dedupe.size();
563 const uint32_t method_index = stack_map.dex_method_index;
564 if (method_index != DexFile::kDexNoIndex) {
565 stack_map.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
566 method_indices_[index] = method_index;
567 }
568 }
569 for (InlineInfoEntry& inline_info : inline_infos_) {
570 const size_t index = dedupe.size();
571 const uint32_t method_index = inline_info.method_index;
572 CHECK_NE(method_index, DexFile::kDexNoIndex);
573 inline_info.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
574 method_indices_[index] = method_index;
575 }
576 method_indices_.resize(dedupe.size());
577 }
578
579
PrepareStackMasks(size_t entry_size_in_bits)580 size_t StackMapStream::PrepareStackMasks(size_t entry_size_in_bits) {
581 // Preallocate memory since we do not want it to move (the dedup map will point into it).
582 const size_t byte_entry_size = RoundUp(entry_size_in_bits, kBitsPerByte) / kBitsPerByte;
583 stack_masks_.resize(byte_entry_size * stack_maps_.size(), 0u);
584 // For deduplicating we store the stack masks as byte packed for simplicity. We can bit pack later
585 // when copying out from stack_masks_.
586 ArenaUnorderedMap<MemoryRegion,
587 size_t,
588 FNVHash<MemoryRegion>,
589 MemoryRegion::ContentEquals> dedup(
590 stack_maps_.size(), allocator_->Adapter(kArenaAllocStackMapStream));
591 for (StackMapEntry& stack_map : stack_maps_) {
592 size_t index = dedup.size();
593 MemoryRegion stack_mask(stack_masks_.data() + index * byte_entry_size, byte_entry_size);
594 for (size_t i = 0; i < entry_size_in_bits; i++) {
595 stack_mask.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
596 }
597 stack_map.stack_mask_index = dedup.emplace(stack_mask, index).first->second;
598 }
599 return dedup.size();
600 }
601
602 // Check that all StackMapStream inputs are correctly encoded by trying to read them back.
CheckCodeInfo(MemoryRegion region) const603 void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
604 CodeInfo code_info(region);
605 CodeInfoEncoding encoding = code_info.ExtractEncoding();
606 DCHECK_EQ(code_info.GetNumberOfStackMaps(encoding), stack_maps_.size());
607 size_t invoke_info_index = 0;
608 for (size_t s = 0; s < stack_maps_.size(); ++s) {
609 const StackMap stack_map = code_info.GetStackMapAt(s, encoding);
610 const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
611 StackMapEntry entry = stack_maps_[s];
612
613 // Check main stack map fields.
614 DCHECK_EQ(stack_map.GetNativePcOffset(stack_map_encoding, instruction_set_),
615 entry.native_pc_code_offset.Uint32Value(instruction_set_));
616 DCHECK_EQ(stack_map.GetDexPc(stack_map_encoding), entry.dex_pc);
617 DCHECK_EQ(stack_map.GetRegisterMaskIndex(stack_map_encoding), entry.register_mask_index);
618 DCHECK_EQ(code_info.GetRegisterMaskOf(encoding, stack_map), entry.register_mask);
619 const size_t num_stack_mask_bits = code_info.GetNumberOfStackMaskBits(encoding);
620 DCHECK_EQ(stack_map.GetStackMaskIndex(stack_map_encoding), entry.stack_mask_index);
621 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
622 if (entry.sp_mask != nullptr) {
623 DCHECK_GE(stack_mask.size_in_bits(), entry.sp_mask->GetNumberOfBits());
624 for (size_t b = 0; b < num_stack_mask_bits; b++) {
625 DCHECK_EQ(stack_mask.LoadBit(b), entry.sp_mask->IsBitSet(b));
626 }
627 } else {
628 for (size_t b = 0; b < num_stack_mask_bits; b++) {
629 DCHECK_EQ(stack_mask.LoadBit(b), 0u);
630 }
631 }
632 if (entry.dex_method_index != DexFile::kDexNoIndex) {
633 InvokeInfo invoke_info = code_info.GetInvokeInfo(encoding, invoke_info_index);
634 DCHECK_EQ(invoke_info.GetNativePcOffset(encoding.invoke_info.encoding, instruction_set_),
635 entry.native_pc_code_offset.Uint32Value(instruction_set_));
636 DCHECK_EQ(invoke_info.GetInvokeType(encoding.invoke_info.encoding), entry.invoke_type);
637 DCHECK_EQ(invoke_info.GetMethodIndexIdx(encoding.invoke_info.encoding),
638 entry.dex_method_index_idx);
639 invoke_info_index++;
640 }
641 CheckDexRegisterMap(code_info,
642 code_info.GetDexRegisterMapOf(
643 stack_map, encoding, entry.dex_register_entry.num_dex_registers),
644 entry.dex_register_entry.num_dex_registers,
645 entry.dex_register_entry.live_dex_registers_mask,
646 entry.dex_register_entry.locations_start_index);
647
648 // Check inline info.
649 DCHECK_EQ(stack_map.HasInlineInfo(stack_map_encoding), (entry.inlining_depth != 0));
650 if (entry.inlining_depth != 0) {
651 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
652 DCHECK_EQ(inline_info.GetDepth(encoding.inline_info.encoding), entry.inlining_depth);
653 for (size_t d = 0; d < entry.inlining_depth; ++d) {
654 size_t inline_info_index = entry.inline_infos_start_index + d;
655 DCHECK_LT(inline_info_index, inline_infos_.size());
656 InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
657 DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, d),
658 inline_entry.dex_pc);
659 if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, d)) {
660 DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info.encoding, d),
661 inline_entry.method);
662 } else {
663 const size_t method_index_idx =
664 inline_info.GetMethodIndexIdxAtDepth(encoding.inline_info.encoding, d);
665 DCHECK_EQ(method_index_idx, inline_entry.dex_method_index_idx);
666 DCHECK_EQ(method_indices_[method_index_idx], inline_entry.method_index);
667 }
668
669 CheckDexRegisterMap(code_info,
670 code_info.GetDexRegisterMapAtDepth(
671 d,
672 inline_info,
673 encoding,
674 inline_entry.dex_register_entry.num_dex_registers),
675 inline_entry.dex_register_entry.num_dex_registers,
676 inline_entry.dex_register_entry.live_dex_registers_mask,
677 inline_entry.dex_register_entry.locations_start_index);
678 }
679 }
680 }
681 }
682
ComputeMethodInfoSize() const683 size_t StackMapStream::ComputeMethodInfoSize() const {
684 DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before " << __FUNCTION__;
685 return MethodInfo::ComputeSize(method_indices_.size());
686 }
687
688 } // namespace art
689