1 /*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "stack_map_stream.h"
18
19 #include "art_method-inl.h"
20 #include "base/stl_util.h"
21 #include "dex/dex_file_types.h"
22 #include "optimizing/optimizing_compiler.h"
23 #include "runtime.h"
24 #include "scoped_thread_state_change-inl.h"
25
26 namespace art {
27
BeginStackMapEntry(uint32_t dex_pc,uint32_t native_pc_offset,uint32_t register_mask,BitVector * sp_mask,uint32_t num_dex_registers,uint8_t inlining_depth)28 void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
29 uint32_t native_pc_offset,
30 uint32_t register_mask,
31 BitVector* sp_mask,
32 uint32_t num_dex_registers,
33 uint8_t inlining_depth) {
34 DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
35 current_entry_.dex_pc = dex_pc;
36 current_entry_.native_pc_code_offset = CodeOffset::FromOffset(native_pc_offset, instruction_set_);
37 current_entry_.register_mask = register_mask;
38 current_entry_.sp_mask = sp_mask;
39 current_entry_.inlining_depth = inlining_depth;
40 current_entry_.inline_infos_start_index = inline_infos_.size();
41 current_entry_.stack_mask_index = 0;
42 current_entry_.dex_method_index = dex::kDexNoIndex;
43 current_entry_.dex_register_entry.num_dex_registers = num_dex_registers;
44 current_entry_.dex_register_entry.locations_start_index = dex_register_locations_.size();
45 current_entry_.dex_register_entry.live_dex_registers_mask = nullptr;
46 if (num_dex_registers != 0u) {
47 current_entry_.dex_register_entry.live_dex_registers_mask =
48 ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
49 current_entry_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
50 }
51 if (sp_mask != nullptr) {
52 stack_mask_max_ = std::max(stack_mask_max_, sp_mask->GetHighestBitSet());
53 }
54 if (inlining_depth > 0) {
55 number_of_stack_maps_with_inline_info_++;
56 }
57
58 // Note: dex_pc can be kNoDexPc for native method intrinsics.
59 if (dex_pc != dex::kDexNoIndex && (dex_pc_max_ == dex::kDexNoIndex || dex_pc_max_ < dex_pc)) {
60 dex_pc_max_ = dex_pc;
61 }
62 register_mask_max_ = std::max(register_mask_max_, register_mask);
63 current_dex_register_ = 0;
64 }
65
EndStackMapEntry()66 void StackMapStream::EndStackMapEntry() {
67 current_entry_.dex_register_map_index = AddDexRegisterMapEntry(current_entry_.dex_register_entry);
68 stack_maps_.push_back(current_entry_);
69 current_entry_ = StackMapEntry();
70 }
71
AddDexRegisterEntry(DexRegisterLocation::Kind kind,int32_t value)72 void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value) {
73 if (kind != DexRegisterLocation::Kind::kNone) {
74 // Ensure we only use non-compressed location kind at this stage.
75 DCHECK(DexRegisterLocation::IsShortLocationKind(kind)) << kind;
76 DexRegisterLocation location(kind, value);
77
78 // Look for Dex register `location` in the location catalog (using the
79 // companion hash map of locations to indices). Use its index if it
80 // is already in the location catalog. If not, insert it (in the
81 // location catalog and the hash map) and use the newly created index.
82 auto it = location_catalog_entries_indices_.Find(location);
83 if (it != location_catalog_entries_indices_.end()) {
84 // Retrieve the index from the hash map.
85 dex_register_locations_.push_back(it->second);
86 } else {
87 // Create a new entry in the location catalog and the hash map.
88 size_t index = location_catalog_entries_.size();
89 location_catalog_entries_.push_back(location);
90 dex_register_locations_.push_back(index);
91 location_catalog_entries_indices_.Insert(std::make_pair(location, index));
92 }
93 DexRegisterMapEntry* const entry = in_inline_frame_
94 ? ¤t_inline_info_.dex_register_entry
95 : ¤t_entry_.dex_register_entry;
96 DCHECK_LT(current_dex_register_, entry->num_dex_registers);
97 entry->live_dex_registers_mask->SetBit(current_dex_register_);
98 entry->hash += (1 <<
99 (current_dex_register_ % (sizeof(DexRegisterMapEntry::hash) * kBitsPerByte)));
100 entry->hash += static_cast<uint32_t>(value);
101 entry->hash += static_cast<uint32_t>(kind);
102 }
103 current_dex_register_++;
104 }
105
AddInvoke(InvokeType invoke_type,uint32_t dex_method_index)106 void StackMapStream::AddInvoke(InvokeType invoke_type, uint32_t dex_method_index) {
107 current_entry_.invoke_type = invoke_type;
108 current_entry_.dex_method_index = dex_method_index;
109 }
110
BeginInlineInfoEntry(ArtMethod * method,uint32_t dex_pc,uint32_t num_dex_registers,const DexFile * outer_dex_file)111 void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
112 uint32_t dex_pc,
113 uint32_t num_dex_registers,
114 const DexFile* outer_dex_file) {
115 DCHECK(!in_inline_frame_);
116 in_inline_frame_ = true;
117 if (EncodeArtMethodInInlineInfo(method)) {
118 current_inline_info_.method = method;
119 } else {
120 if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
121 ScopedObjectAccess soa(Thread::Current());
122 DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
123 }
124 current_inline_info_.method_index = method->GetDexMethodIndexUnchecked();
125 }
126 current_inline_info_.dex_pc = dex_pc;
127 current_inline_info_.dex_register_entry.num_dex_registers = num_dex_registers;
128 current_inline_info_.dex_register_entry.locations_start_index = dex_register_locations_.size();
129 current_inline_info_.dex_register_entry.live_dex_registers_mask = nullptr;
130 if (num_dex_registers != 0) {
131 current_inline_info_.dex_register_entry.live_dex_registers_mask =
132 ArenaBitVector::Create(allocator_, num_dex_registers, true, kArenaAllocStackMapStream);
133 current_inline_info_.dex_register_entry.live_dex_registers_mask->ClearAllBits();
134 }
135 current_dex_register_ = 0;
136 }
137
EndInlineInfoEntry()138 void StackMapStream::EndInlineInfoEntry() {
139 current_inline_info_.dex_register_map_index =
140 AddDexRegisterMapEntry(current_inline_info_.dex_register_entry);
141 DCHECK(in_inline_frame_);
142 DCHECK_EQ(current_dex_register_, current_inline_info_.dex_register_entry.num_dex_registers)
143 << "Inline information contains less registers than expected";
144 in_inline_frame_ = false;
145 inline_infos_.push_back(current_inline_info_);
146 current_inline_info_ = InlineInfoEntry();
147 }
148
ComputeMaxNativePcCodeOffset() const149 CodeOffset StackMapStream::ComputeMaxNativePcCodeOffset() const {
150 CodeOffset max_native_pc_offset;
151 for (const StackMapEntry& entry : stack_maps_) {
152 max_native_pc_offset = std::max(max_native_pc_offset, entry.native_pc_code_offset);
153 }
154 return max_native_pc_offset;
155 }
156
PrepareForFillIn()157 size_t StackMapStream::PrepareForFillIn() {
158 CodeInfoEncoding encoding;
159 encoding.dex_register_map.num_entries = 0; // TODO: Remove this field.
160 encoding.dex_register_map.num_bytes = ComputeDexRegisterMapsSize();
161 encoding.location_catalog.num_entries = location_catalog_entries_.size();
162 encoding.location_catalog.num_bytes = ComputeDexRegisterLocationCatalogSize();
163 encoding.inline_info.num_entries = inline_infos_.size();
164 // Must be done before calling ComputeInlineInfoEncoding since ComputeInlineInfoEncoding requires
165 // dex_method_index_idx to be filled in.
166 PrepareMethodIndices();
167 ComputeInlineInfoEncoding(&encoding.inline_info.encoding,
168 encoding.dex_register_map.num_bytes);
169 CodeOffset max_native_pc_offset = ComputeMaxNativePcCodeOffset();
170 // Prepare the CodeInfo variable-sized encoding.
171 encoding.stack_mask.encoding.num_bits = stack_mask_max_ + 1; // Need room for max element too.
172 encoding.stack_mask.num_entries = PrepareStackMasks(encoding.stack_mask.encoding.num_bits);
173 encoding.register_mask.encoding.num_bits = MinimumBitsToStore(register_mask_max_);
174 encoding.register_mask.num_entries = PrepareRegisterMasks();
175 encoding.stack_map.num_entries = stack_maps_.size();
176 encoding.stack_map.encoding.SetFromSizes(
177 // The stack map contains compressed native PC offsets.
178 max_native_pc_offset.CompressedValue(),
179 dex_pc_max_,
180 encoding.dex_register_map.num_bytes,
181 encoding.inline_info.num_entries,
182 encoding.register_mask.num_entries,
183 encoding.stack_mask.num_entries);
184 ComputeInvokeInfoEncoding(&encoding);
185 DCHECK_EQ(code_info_encoding_.size(), 0u);
186 encoding.Compress(&code_info_encoding_);
187 encoding.ComputeTableOffsets();
188 // Compute table offsets so we can get the non header size.
189 DCHECK_EQ(encoding.HeaderSize(), code_info_encoding_.size());
190 needed_size_ = code_info_encoding_.size() + encoding.NonHeaderSize();
191 return needed_size_;
192 }
193
ComputeDexRegisterLocationCatalogSize() const194 size_t StackMapStream::ComputeDexRegisterLocationCatalogSize() const {
195 size_t size = DexRegisterLocationCatalog::kFixedSize;
196 for (const DexRegisterLocation& dex_register_location : location_catalog_entries_) {
197 size += DexRegisterLocationCatalog::EntrySize(dex_register_location);
198 }
199 return size;
200 }
201
ComputeSize(size_t catalog_size) const202 size_t StackMapStream::DexRegisterMapEntry::ComputeSize(size_t catalog_size) const {
203 // For num_dex_registers == 0u live_dex_registers_mask may be null.
204 if (num_dex_registers == 0u) {
205 return 0u; // No register map will be emitted.
206 }
207 DCHECK(live_dex_registers_mask != nullptr);
208
209 // Size of the map in bytes.
210 size_t size = DexRegisterMap::kFixedSize;
211 // Add the live bit mask for the Dex register liveness.
212 size += DexRegisterMap::GetLiveBitMaskSize(num_dex_registers);
213 // Compute the size of the set of live Dex register entries.
214 size_t number_of_live_dex_registers = live_dex_registers_mask->NumSetBits();
215 size_t map_entries_size_in_bits =
216 DexRegisterMap::SingleEntrySizeInBits(catalog_size) * number_of_live_dex_registers;
217 size_t map_entries_size_in_bytes =
218 RoundUp(map_entries_size_in_bits, kBitsPerByte) / kBitsPerByte;
219 size += map_entries_size_in_bytes;
220 return size;
221 }
222
ComputeDexRegisterMapsSize() const223 size_t StackMapStream::ComputeDexRegisterMapsSize() const {
224 size_t size = 0;
225 for (const DexRegisterMapEntry& entry : dex_register_entries_) {
226 size += entry.ComputeSize(location_catalog_entries_.size());
227 }
228 return size;
229 }
230
ComputeInvokeInfoEncoding(CodeInfoEncoding * encoding)231 void StackMapStream::ComputeInvokeInfoEncoding(CodeInfoEncoding* encoding) {
232 DCHECK(encoding != nullptr);
233 uint32_t native_pc_max = 0;
234 uint16_t method_index_max = 0;
235 size_t invoke_infos_count = 0;
236 size_t invoke_type_max = 0;
237 for (const StackMapEntry& entry : stack_maps_) {
238 if (entry.dex_method_index != dex::kDexNoIndex) {
239 native_pc_max = std::max(native_pc_max, entry.native_pc_code_offset.CompressedValue());
240 method_index_max = std::max(method_index_max, static_cast<uint16_t>(entry.dex_method_index));
241 invoke_type_max = std::max(invoke_type_max, static_cast<size_t>(entry.invoke_type));
242 ++invoke_infos_count;
243 }
244 }
245 encoding->invoke_info.num_entries = invoke_infos_count;
246 encoding->invoke_info.encoding.SetFromSizes(native_pc_max, invoke_type_max, method_index_max);
247 }
248
ComputeInlineInfoEncoding(InlineInfoEncoding * encoding,size_t dex_register_maps_bytes)249 void StackMapStream::ComputeInlineInfoEncoding(InlineInfoEncoding* encoding,
250 size_t dex_register_maps_bytes) {
251 uint32_t method_index_max = 0;
252 uint32_t dex_pc_max = dex::kDexNoIndex;
253 uint32_t extra_data_max = 0;
254
255 uint32_t inline_info_index = 0;
256 for (const StackMapEntry& entry : stack_maps_) {
257 for (size_t j = 0; j < entry.inlining_depth; ++j) {
258 InlineInfoEntry inline_entry = inline_infos_[inline_info_index++];
259 if (inline_entry.method == nullptr) {
260 method_index_max = std::max(method_index_max, inline_entry.dex_method_index_idx);
261 extra_data_max = std::max(extra_data_max, 1u);
262 } else {
263 method_index_max = std::max(
264 method_index_max, High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
265 extra_data_max = std::max(
266 extra_data_max, Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
267 }
268 if (inline_entry.dex_pc != dex::kDexNoIndex &&
269 (dex_pc_max == dex::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
270 dex_pc_max = inline_entry.dex_pc;
271 }
272 }
273 }
274 DCHECK_EQ(inline_info_index, inline_infos_.size());
275
276 encoding->SetFromSizes(method_index_max, dex_pc_max, extra_data_max, dex_register_maps_bytes);
277 }
278
MaybeCopyDexRegisterMap(DexRegisterMapEntry & entry,size_t * current_offset,MemoryRegion dex_register_locations_region)279 size_t StackMapStream::MaybeCopyDexRegisterMap(DexRegisterMapEntry& entry,
280 size_t* current_offset,
281 MemoryRegion dex_register_locations_region) {
282 DCHECK(current_offset != nullptr);
283 if ((entry.num_dex_registers == 0) || (entry.live_dex_registers_mask->NumSetBits() == 0)) {
284 // No dex register map needed.
285 return StackMap::kNoDexRegisterMap;
286 }
287 if (entry.offset == DexRegisterMapEntry::kOffsetUnassigned) {
288 // Not already copied, need to copy and and assign an offset.
289 entry.offset = *current_offset;
290 const size_t entry_size = entry.ComputeSize(location_catalog_entries_.size());
291 DexRegisterMap dex_register_map(
292 dex_register_locations_region.Subregion(entry.offset, entry_size));
293 *current_offset += entry_size;
294 // Fill in the map since it was just added.
295 FillInDexRegisterMap(dex_register_map,
296 entry.num_dex_registers,
297 *entry.live_dex_registers_mask,
298 entry.locations_start_index);
299 }
300 return entry.offset;
301 }
302
FillInMethodInfo(MemoryRegion region)303 void StackMapStream::FillInMethodInfo(MemoryRegion region) {
304 {
305 MethodInfo info(region.begin(), method_indices_.size());
306 for (size_t i = 0; i < method_indices_.size(); ++i) {
307 info.SetMethodIndex(i, method_indices_[i]);
308 }
309 }
310 if (kIsDebugBuild) {
311 // Check the data matches.
312 MethodInfo info(region.begin());
313 const size_t count = info.NumMethodIndices();
314 DCHECK_EQ(count, method_indices_.size());
315 for (size_t i = 0; i < count; ++i) {
316 DCHECK_EQ(info.GetMethodIndex(i), method_indices_[i]);
317 }
318 }
319 }
320
FillInCodeInfo(MemoryRegion region)321 void StackMapStream::FillInCodeInfo(MemoryRegion region) {
322 DCHECK_EQ(0u, current_entry_.dex_pc) << "EndStackMapEntry not called after BeginStackMapEntry";
323 DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before FillIn";
324
325 DCHECK_EQ(region.size(), needed_size_);
326
327 // Note that the memory region does not have to be zeroed when we JIT code
328 // because we do not use the arena allocator there.
329
330 // Write the CodeInfo header.
331 region.CopyFrom(0, MemoryRegion(code_info_encoding_.data(), code_info_encoding_.size()));
332
333 CodeInfo code_info(region);
334 CodeInfoEncoding encoding = code_info.ExtractEncoding();
335 DCHECK_EQ(encoding.stack_map.num_entries, stack_maps_.size());
336
337 MemoryRegion dex_register_locations_region = region.Subregion(
338 encoding.dex_register_map.byte_offset,
339 encoding.dex_register_map.num_bytes);
340
341 // Set the Dex register location catalog.
342 MemoryRegion dex_register_location_catalog_region = region.Subregion(
343 encoding.location_catalog.byte_offset,
344 encoding.location_catalog.num_bytes);
345 DexRegisterLocationCatalog dex_register_location_catalog(dex_register_location_catalog_region);
346 // Offset in `dex_register_location_catalog` where to store the next
347 // register location.
348 size_t location_catalog_offset = DexRegisterLocationCatalog::kFixedSize;
349 for (DexRegisterLocation dex_register_location : location_catalog_entries_) {
350 dex_register_location_catalog.SetRegisterInfo(location_catalog_offset, dex_register_location);
351 location_catalog_offset += DexRegisterLocationCatalog::EntrySize(dex_register_location);
352 }
353 // Ensure we reached the end of the Dex registers location_catalog.
354 DCHECK_EQ(location_catalog_offset, dex_register_location_catalog_region.size());
355
356 ArenaBitVector empty_bitmask(allocator_, 0, /* expandable */ false, kArenaAllocStackMapStream);
357 uintptr_t next_dex_register_map_offset = 0;
358 uintptr_t next_inline_info_index = 0;
359 size_t invoke_info_idx = 0;
360 for (size_t i = 0, e = stack_maps_.size(); i < e; ++i) {
361 StackMap stack_map = code_info.GetStackMapAt(i, encoding);
362 StackMapEntry entry = stack_maps_[i];
363
364 stack_map.SetDexPc(encoding.stack_map.encoding, entry.dex_pc);
365 stack_map.SetNativePcCodeOffset(encoding.stack_map.encoding, entry.native_pc_code_offset);
366 stack_map.SetRegisterMaskIndex(encoding.stack_map.encoding, entry.register_mask_index);
367 stack_map.SetStackMaskIndex(encoding.stack_map.encoding, entry.stack_mask_index);
368
369 size_t offset = MaybeCopyDexRegisterMap(dex_register_entries_[entry.dex_register_map_index],
370 &next_dex_register_map_offset,
371 dex_register_locations_region);
372 stack_map.SetDexRegisterMapOffset(encoding.stack_map.encoding, offset);
373
374 if (entry.dex_method_index != dex::kDexNoIndex) {
375 InvokeInfo invoke_info(code_info.GetInvokeInfo(encoding, invoke_info_idx));
376 invoke_info.SetNativePcCodeOffset(encoding.invoke_info.encoding, entry.native_pc_code_offset);
377 invoke_info.SetInvokeType(encoding.invoke_info.encoding, entry.invoke_type);
378 invoke_info.SetMethodIndexIdx(encoding.invoke_info.encoding, entry.dex_method_index_idx);
379 ++invoke_info_idx;
380 }
381
382 // Set the inlining info.
383 if (entry.inlining_depth != 0) {
384 InlineInfo inline_info = code_info.GetInlineInfo(next_inline_info_index, encoding);
385
386 // Fill in the index.
387 stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, next_inline_info_index);
388 DCHECK_EQ(next_inline_info_index, entry.inline_infos_start_index);
389 next_inline_info_index += entry.inlining_depth;
390
391 inline_info.SetDepth(encoding.inline_info.encoding, entry.inlining_depth);
392 DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
393
394 for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
395 InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
396 if (inline_entry.method != nullptr) {
397 inline_info.SetMethodIndexIdxAtDepth(
398 encoding.inline_info.encoding,
399 depth,
400 High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
401 inline_info.SetExtraDataAtDepth(
402 encoding.inline_info.encoding,
403 depth,
404 Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
405 } else {
406 inline_info.SetMethodIndexIdxAtDepth(encoding.inline_info.encoding,
407 depth,
408 inline_entry.dex_method_index_idx);
409 inline_info.SetExtraDataAtDepth(encoding.inline_info.encoding, depth, 1);
410 }
411 inline_info.SetDexPcAtDepth(encoding.inline_info.encoding, depth, inline_entry.dex_pc);
412 size_t dex_register_map_offset = MaybeCopyDexRegisterMap(
413 dex_register_entries_[inline_entry.dex_register_map_index],
414 &next_dex_register_map_offset,
415 dex_register_locations_region);
416 inline_info.SetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding,
417 depth,
418 dex_register_map_offset);
419 }
420 } else if (encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
421 stack_map.SetInlineInfoIndex(encoding.stack_map.encoding, StackMap::kNoInlineInfo);
422 }
423 }
424
425 // Write stack masks table.
426 const size_t stack_mask_bits = encoding.stack_mask.encoding.BitSize();
427 if (stack_mask_bits > 0) {
428 size_t stack_mask_bytes = RoundUp(stack_mask_bits, kBitsPerByte) / kBitsPerByte;
429 for (size_t i = 0; i < encoding.stack_mask.num_entries; ++i) {
430 MemoryRegion source(&stack_masks_[i * stack_mask_bytes], stack_mask_bytes);
431 BitMemoryRegion stack_mask = code_info.GetStackMask(i, encoding);
432 for (size_t bit_index = 0; bit_index < stack_mask_bits; ++bit_index) {
433 stack_mask.StoreBit(bit_index, source.LoadBit(bit_index));
434 }
435 }
436 }
437
438 // Write register masks table.
439 for (size_t i = 0; i < encoding.register_mask.num_entries; ++i) {
440 BitMemoryRegion register_mask = code_info.GetRegisterMask(i, encoding);
441 register_mask.StoreBits(0, register_masks_[i], encoding.register_mask.encoding.BitSize());
442 }
443
444 // Verify all written data in debug build.
445 if (kIsDebugBuild) {
446 CheckCodeInfo(region);
447 }
448 }
449
FillInDexRegisterMap(DexRegisterMap dex_register_map,uint32_t num_dex_registers,const BitVector & live_dex_registers_mask,uint32_t start_index_in_dex_register_locations) const450 void StackMapStream::FillInDexRegisterMap(DexRegisterMap dex_register_map,
451 uint32_t num_dex_registers,
452 const BitVector& live_dex_registers_mask,
453 uint32_t start_index_in_dex_register_locations) const {
454 dex_register_map.SetLiveBitMask(num_dex_registers, live_dex_registers_mask);
455 // Set the dex register location mapping data.
456 size_t number_of_live_dex_registers = live_dex_registers_mask.NumSetBits();
457 DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
458 DCHECK_LE(start_index_in_dex_register_locations,
459 dex_register_locations_.size() - number_of_live_dex_registers);
460 for (size_t index_in_dex_register_locations = 0;
461 index_in_dex_register_locations != number_of_live_dex_registers;
462 ++index_in_dex_register_locations) {
463 size_t location_catalog_entry_index = dex_register_locations_[
464 start_index_in_dex_register_locations + index_in_dex_register_locations];
465 dex_register_map.SetLocationCatalogEntryIndex(
466 index_in_dex_register_locations,
467 location_catalog_entry_index,
468 num_dex_registers,
469 location_catalog_entries_.size());
470 }
471 }
472
AddDexRegisterMapEntry(const DexRegisterMapEntry & entry)473 size_t StackMapStream::AddDexRegisterMapEntry(const DexRegisterMapEntry& entry) {
474 const size_t current_entry_index = dex_register_entries_.size();
475 auto entries_it = dex_map_hash_to_stack_map_indices_.find(entry.hash);
476 if (entries_it == dex_map_hash_to_stack_map_indices_.end()) {
477 // We don't have a perfect hash functions so we need a list to collect all stack maps
478 // which might have the same dex register map.
479 ScopedArenaVector<uint32_t> stack_map_indices(allocator_->Adapter(kArenaAllocStackMapStream));
480 stack_map_indices.push_back(current_entry_index);
481 dex_map_hash_to_stack_map_indices_.Put(entry.hash, std::move(stack_map_indices));
482 } else {
483 // We might have collisions, so we need to check whether or not we really have a match.
484 for (uint32_t test_entry_index : entries_it->second) {
485 if (DexRegisterMapEntryEquals(dex_register_entries_[test_entry_index], entry)) {
486 return test_entry_index;
487 }
488 }
489 entries_it->second.push_back(current_entry_index);
490 }
491 dex_register_entries_.push_back(entry);
492 return current_entry_index;
493 }
494
DexRegisterMapEntryEquals(const DexRegisterMapEntry & a,const DexRegisterMapEntry & b) const495 bool StackMapStream::DexRegisterMapEntryEquals(const DexRegisterMapEntry& a,
496 const DexRegisterMapEntry& b) const {
497 if ((a.live_dex_registers_mask == nullptr) != (b.live_dex_registers_mask == nullptr)) {
498 return false;
499 }
500 if (a.num_dex_registers != b.num_dex_registers) {
501 return false;
502 }
503 if (a.num_dex_registers != 0u) {
504 DCHECK(a.live_dex_registers_mask != nullptr);
505 DCHECK(b.live_dex_registers_mask != nullptr);
506 if (!a.live_dex_registers_mask->Equal(b.live_dex_registers_mask)) {
507 return false;
508 }
509 size_t number_of_live_dex_registers = a.live_dex_registers_mask->NumSetBits();
510 DCHECK_LE(number_of_live_dex_registers, dex_register_locations_.size());
511 DCHECK_LE(a.locations_start_index,
512 dex_register_locations_.size() - number_of_live_dex_registers);
513 DCHECK_LE(b.locations_start_index,
514 dex_register_locations_.size() - number_of_live_dex_registers);
515 auto a_begin = dex_register_locations_.begin() + a.locations_start_index;
516 auto b_begin = dex_register_locations_.begin() + b.locations_start_index;
517 if (!std::equal(a_begin, a_begin + number_of_live_dex_registers, b_begin)) {
518 return false;
519 }
520 }
521 return true;
522 }
523
524 // Helper for CheckCodeInfo - check that register map has the expected content.
CheckDexRegisterMap(const CodeInfo & code_info,const DexRegisterMap & dex_register_map,size_t num_dex_registers,BitVector * live_dex_registers_mask,size_t dex_register_locations_index) const525 void StackMapStream::CheckDexRegisterMap(const CodeInfo& code_info,
526 const DexRegisterMap& dex_register_map,
527 size_t num_dex_registers,
528 BitVector* live_dex_registers_mask,
529 size_t dex_register_locations_index) const {
530 CodeInfoEncoding encoding = code_info.ExtractEncoding();
531 for (size_t reg = 0; reg < num_dex_registers; reg++) {
532 // Find the location we tried to encode.
533 DexRegisterLocation expected = DexRegisterLocation::None();
534 if (live_dex_registers_mask->IsBitSet(reg)) {
535 size_t catalog_index = dex_register_locations_[dex_register_locations_index++];
536 expected = location_catalog_entries_[catalog_index];
537 }
538 // Compare to the seen location.
539 if (expected.GetKind() == DexRegisterLocation::Kind::kNone) {
540 DCHECK(!dex_register_map.IsValid() || !dex_register_map.IsDexRegisterLive(reg))
541 << dex_register_map.IsValid() << " " << dex_register_map.IsDexRegisterLive(reg);
542 } else {
543 DCHECK(dex_register_map.IsDexRegisterLive(reg));
544 DexRegisterLocation seen = dex_register_map.GetDexRegisterLocation(
545 reg, num_dex_registers, code_info, encoding);
546 DCHECK_EQ(expected.GetKind(), seen.GetKind());
547 DCHECK_EQ(expected.GetValue(), seen.GetValue());
548 }
549 }
550 if (num_dex_registers == 0) {
551 DCHECK(!dex_register_map.IsValid());
552 }
553 }
554
PrepareRegisterMasks()555 size_t StackMapStream::PrepareRegisterMasks() {
556 register_masks_.resize(stack_maps_.size(), 0u);
557 ScopedArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
558 for (StackMapEntry& stack_map : stack_maps_) {
559 const size_t index = dedupe.size();
560 stack_map.register_mask_index = dedupe.emplace(stack_map.register_mask, index).first->second;
561 register_masks_[index] = stack_map.register_mask;
562 }
563 return dedupe.size();
564 }
565
PrepareMethodIndices()566 void StackMapStream::PrepareMethodIndices() {
567 CHECK(method_indices_.empty());
568 method_indices_.resize(stack_maps_.size() + inline_infos_.size());
569 ScopedArenaUnorderedMap<uint32_t, size_t> dedupe(allocator_->Adapter(kArenaAllocStackMapStream));
570 for (StackMapEntry& stack_map : stack_maps_) {
571 const size_t index = dedupe.size();
572 const uint32_t method_index = stack_map.dex_method_index;
573 if (method_index != dex::kDexNoIndex) {
574 stack_map.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
575 method_indices_[index] = method_index;
576 }
577 }
578 for (InlineInfoEntry& inline_info : inline_infos_) {
579 const size_t index = dedupe.size();
580 const uint32_t method_index = inline_info.method_index;
581 CHECK_NE(method_index, dex::kDexNoIndex);
582 inline_info.dex_method_index_idx = dedupe.emplace(method_index, index).first->second;
583 method_indices_[index] = method_index;
584 }
585 method_indices_.resize(dedupe.size());
586 }
587
588
PrepareStackMasks(size_t entry_size_in_bits)589 size_t StackMapStream::PrepareStackMasks(size_t entry_size_in_bits) {
590 // Preallocate memory since we do not want it to move (the dedup map will point into it).
591 const size_t byte_entry_size = RoundUp(entry_size_in_bits, kBitsPerByte) / kBitsPerByte;
592 stack_masks_.resize(byte_entry_size * stack_maps_.size(), 0u);
593 // For deduplicating we store the stack masks as byte packed for simplicity. We can bit pack later
594 // when copying out from stack_masks_.
595 ScopedArenaUnorderedMap<MemoryRegion,
596 size_t,
597 FNVHash<MemoryRegion>,
598 MemoryRegion::ContentEquals> dedup(
599 stack_maps_.size(), allocator_->Adapter(kArenaAllocStackMapStream));
600 for (StackMapEntry& stack_map : stack_maps_) {
601 size_t index = dedup.size();
602 MemoryRegion stack_mask(stack_masks_.data() + index * byte_entry_size, byte_entry_size);
603 for (size_t i = 0; i < entry_size_in_bits; i++) {
604 stack_mask.StoreBit(i, stack_map.sp_mask != nullptr && stack_map.sp_mask->IsBitSet(i));
605 }
606 stack_map.stack_mask_index = dedup.emplace(stack_mask, index).first->second;
607 }
608 return dedup.size();
609 }
610
611 // Check that all StackMapStream inputs are correctly encoded by trying to read them back.
CheckCodeInfo(MemoryRegion region) const612 void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
613 CodeInfo code_info(region);
614 CodeInfoEncoding encoding = code_info.ExtractEncoding();
615 DCHECK_EQ(code_info.GetNumberOfStackMaps(encoding), stack_maps_.size());
616 size_t invoke_info_index = 0;
617 for (size_t s = 0; s < stack_maps_.size(); ++s) {
618 const StackMap stack_map = code_info.GetStackMapAt(s, encoding);
619 const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
620 StackMapEntry entry = stack_maps_[s];
621
622 // Check main stack map fields.
623 DCHECK_EQ(stack_map.GetNativePcOffset(stack_map_encoding, instruction_set_),
624 entry.native_pc_code_offset.Uint32Value(instruction_set_));
625 DCHECK_EQ(stack_map.GetDexPc(stack_map_encoding), entry.dex_pc);
626 DCHECK_EQ(stack_map.GetRegisterMaskIndex(stack_map_encoding), entry.register_mask_index);
627 DCHECK_EQ(code_info.GetRegisterMaskOf(encoding, stack_map), entry.register_mask);
628 const size_t num_stack_mask_bits = code_info.GetNumberOfStackMaskBits(encoding);
629 DCHECK_EQ(stack_map.GetStackMaskIndex(stack_map_encoding), entry.stack_mask_index);
630 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(encoding, stack_map);
631 if (entry.sp_mask != nullptr) {
632 DCHECK_GE(stack_mask.size_in_bits(), entry.sp_mask->GetNumberOfBits());
633 for (size_t b = 0; b < num_stack_mask_bits; b++) {
634 DCHECK_EQ(stack_mask.LoadBit(b), entry.sp_mask->IsBitSet(b));
635 }
636 } else {
637 for (size_t b = 0; b < num_stack_mask_bits; b++) {
638 DCHECK_EQ(stack_mask.LoadBit(b), 0u);
639 }
640 }
641 if (entry.dex_method_index != dex::kDexNoIndex) {
642 InvokeInfo invoke_info = code_info.GetInvokeInfo(encoding, invoke_info_index);
643 DCHECK_EQ(invoke_info.GetNativePcOffset(encoding.invoke_info.encoding, instruction_set_),
644 entry.native_pc_code_offset.Uint32Value(instruction_set_));
645 DCHECK_EQ(invoke_info.GetInvokeType(encoding.invoke_info.encoding), entry.invoke_type);
646 DCHECK_EQ(invoke_info.GetMethodIndexIdx(encoding.invoke_info.encoding),
647 entry.dex_method_index_idx);
648 invoke_info_index++;
649 }
650 CheckDexRegisterMap(code_info,
651 code_info.GetDexRegisterMapOf(
652 stack_map, encoding, entry.dex_register_entry.num_dex_registers),
653 entry.dex_register_entry.num_dex_registers,
654 entry.dex_register_entry.live_dex_registers_mask,
655 entry.dex_register_entry.locations_start_index);
656
657 // Check inline info.
658 DCHECK_EQ(stack_map.HasInlineInfo(stack_map_encoding), (entry.inlining_depth != 0));
659 if (entry.inlining_depth != 0) {
660 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
661 DCHECK_EQ(inline_info.GetDepth(encoding.inline_info.encoding), entry.inlining_depth);
662 for (size_t d = 0; d < entry.inlining_depth; ++d) {
663 size_t inline_info_index = entry.inline_infos_start_index + d;
664 DCHECK_LT(inline_info_index, inline_infos_.size());
665 InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
666 DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info.encoding, d),
667 inline_entry.dex_pc);
668 if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info.encoding, d)) {
669 DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info.encoding, d),
670 inline_entry.method);
671 } else {
672 const size_t method_index_idx =
673 inline_info.GetMethodIndexIdxAtDepth(encoding.inline_info.encoding, d);
674 DCHECK_EQ(method_index_idx, inline_entry.dex_method_index_idx);
675 DCHECK_EQ(method_indices_[method_index_idx], inline_entry.method_index);
676 }
677
678 CheckDexRegisterMap(code_info,
679 code_info.GetDexRegisterMapAtDepth(
680 d,
681 inline_info,
682 encoding,
683 inline_entry.dex_register_entry.num_dex_registers),
684 inline_entry.dex_register_entry.num_dex_registers,
685 inline_entry.dex_register_entry.live_dex_registers_mask,
686 inline_entry.dex_register_entry.locations_start_index);
687 }
688 }
689 }
690 }
691
ComputeMethodInfoSize() const692 size_t StackMapStream::ComputeMethodInfoSize() const {
693 DCHECK_NE(0u, needed_size_) << "PrepareForFillIn not called before " << __FUNCTION__;
694 return MethodInfo::ComputeSize(method_indices_.size());
695 }
696
697 } // namespace art
698