1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "reg_type_cache-inl.h"
18
19 #include "base/arena_bit_vector.h"
20 #include "base/bit_vector-inl.h"
21 #include "base/casts.h"
22 #include "base/scoped_arena_allocator.h"
23 #include "base/stl_util.h"
24 #include "class_linker-inl.h"
25 #include "dex_file-inl.h"
26 #include "mirror/class-inl.h"
27 #include "mirror/object-inl.h"
28 #include "reg_type-inl.h"
29
30 namespace art {
31 namespace verifier {
32
33 bool RegTypeCache::primitive_initialized_ = false;
34 uint16_t RegTypeCache::primitive_count_ = 0;
35 const PreciseConstType* RegTypeCache::small_precise_constants_[kMaxSmallConstant -
36 kMinSmallConstant + 1];
37
MatchingPrecisionForClass(const RegType * entry,bool precise)38 ALWAYS_INLINE static inline bool MatchingPrecisionForClass(const RegType* entry, bool precise)
39 REQUIRES_SHARED(Locks::mutator_lock_) {
40 if (entry->IsPreciseReference() == precise) {
41 // We were or weren't looking for a precise reference and we found what we need.
42 return true;
43 } else {
44 if (!precise && entry->GetClass()->CannotBeAssignedFromOtherTypes()) {
45 // We weren't looking for a precise reference, as we're looking up based on a descriptor, but
46 // we found a matching entry based on the descriptor. Return the precise entry in that case.
47 return true;
48 }
49 return false;
50 }
51 }
52
FillPrimitiveAndSmallConstantTypes()53 void RegTypeCache::FillPrimitiveAndSmallConstantTypes() {
54 entries_.push_back(UndefinedType::GetInstance());
55 entries_.push_back(ConflictType::GetInstance());
56 entries_.push_back(BooleanType::GetInstance());
57 entries_.push_back(ByteType::GetInstance());
58 entries_.push_back(ShortType::GetInstance());
59 entries_.push_back(CharType::GetInstance());
60 entries_.push_back(IntegerType::GetInstance());
61 entries_.push_back(LongLoType::GetInstance());
62 entries_.push_back(LongHiType::GetInstance());
63 entries_.push_back(FloatType::GetInstance());
64 entries_.push_back(DoubleLoType::GetInstance());
65 entries_.push_back(DoubleHiType::GetInstance());
66 for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
67 int32_t i = value - kMinSmallConstant;
68 DCHECK_EQ(entries_.size(), small_precise_constants_[i]->GetId());
69 entries_.push_back(small_precise_constants_[i]);
70 }
71 DCHECK_EQ(entries_.size(), primitive_count_);
72 }
73
FromDescriptor(mirror::ClassLoader * loader,const char * descriptor,bool precise)74 const RegType& RegTypeCache::FromDescriptor(mirror::ClassLoader* loader,
75 const char* descriptor,
76 bool precise) {
77 DCHECK(RegTypeCache::primitive_initialized_);
78 if (descriptor[1] == '\0') {
79 switch (descriptor[0]) {
80 case 'Z':
81 return Boolean();
82 case 'B':
83 return Byte();
84 case 'S':
85 return Short();
86 case 'C':
87 return Char();
88 case 'I':
89 return Integer();
90 case 'J':
91 return LongLo();
92 case 'F':
93 return Float();
94 case 'D':
95 return DoubleLo();
96 case 'V': // For void types, conflict types.
97 default:
98 return Conflict();
99 }
100 } else if (descriptor[0] == 'L' || descriptor[0] == '[') {
101 return From(loader, descriptor, precise);
102 } else {
103 return Conflict();
104 }
105 }
106
RegTypeFromPrimitiveType(Primitive::Type prim_type) const107 const RegType& RegTypeCache::RegTypeFromPrimitiveType(Primitive::Type prim_type) const {
108 DCHECK(RegTypeCache::primitive_initialized_);
109 switch (prim_type) {
110 case Primitive::kPrimBoolean:
111 return *BooleanType::GetInstance();
112 case Primitive::kPrimByte:
113 return *ByteType::GetInstance();
114 case Primitive::kPrimShort:
115 return *ShortType::GetInstance();
116 case Primitive::kPrimChar:
117 return *CharType::GetInstance();
118 case Primitive::kPrimInt:
119 return *IntegerType::GetInstance();
120 case Primitive::kPrimLong:
121 return *LongLoType::GetInstance();
122 case Primitive::kPrimFloat:
123 return *FloatType::GetInstance();
124 case Primitive::kPrimDouble:
125 return *DoubleLoType::GetInstance();
126 case Primitive::kPrimVoid:
127 default:
128 return *ConflictType::GetInstance();
129 }
130 }
131
MatchDescriptor(size_t idx,const StringPiece & descriptor,bool precise)132 bool RegTypeCache::MatchDescriptor(size_t idx, const StringPiece& descriptor, bool precise) {
133 const RegType* entry = entries_[idx];
134 if (descriptor != entry->descriptor_) {
135 return false;
136 }
137 if (entry->HasClass()) {
138 return MatchingPrecisionForClass(entry, precise);
139 }
140 // There is no notion of precise unresolved references, the precise information is just dropped
141 // on the floor.
142 DCHECK(entry->IsUnresolvedReference());
143 return true;
144 }
145
ResolveClass(const char * descriptor,mirror::ClassLoader * loader)146 mirror::Class* RegTypeCache::ResolveClass(const char* descriptor, mirror::ClassLoader* loader) {
147 // Class was not found, must create new type.
148 // Try resolving class
149 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
150 Thread* self = Thread::Current();
151 StackHandleScope<1> hs(self);
152 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(loader));
153 mirror::Class* klass = nullptr;
154 if (can_load_classes_) {
155 klass = class_linker->FindClass(self, descriptor, class_loader);
156 } else {
157 klass = class_linker->LookupClass(self, descriptor, loader);
158 if (klass != nullptr && !klass->IsResolved()) {
159 // We found the class but without it being loaded its not safe for use.
160 klass = nullptr;
161 }
162 }
163 return klass;
164 }
165
AddString(const StringPiece & string_piece)166 StringPiece RegTypeCache::AddString(const StringPiece& string_piece) {
167 char* ptr = arena_.AllocArray<char>(string_piece.length());
168 memcpy(ptr, string_piece.data(), string_piece.length());
169 return StringPiece(ptr, string_piece.length());
170 }
171
From(mirror::ClassLoader * loader,const char * descriptor,bool precise)172 const RegType& RegTypeCache::From(mirror::ClassLoader* loader,
173 const char* descriptor,
174 bool precise) {
175 StringPiece sp_descriptor(descriptor);
176 // Try looking up the class in the cache first. We use a StringPiece to avoid continual strlen
177 // operations on the descriptor.
178 for (size_t i = primitive_count_; i < entries_.size(); i++) {
179 if (MatchDescriptor(i, sp_descriptor, precise)) {
180 return *(entries_[i]);
181 }
182 }
183 // Class not found in the cache, will create a new type for that.
184 // Try resolving class.
185 mirror::Class* klass = ResolveClass(descriptor, loader);
186 if (klass != nullptr) {
187 // Class resolved, first look for the class in the list of entries
188 // Class was not found, must create new type.
189 // To pass the verification, the type should be imprecise,
190 // instantiable or an interface with the precise type set to false.
191 DCHECK(!precise || klass->IsInstantiable());
192 // Create a precise type if:
193 // 1- Class is final and NOT an interface. a precise interface is meaningless !!
194 // 2- Precise Flag passed as true.
195 RegType* entry;
196 // Create an imprecise type if we can't tell for a fact that it is precise.
197 if (klass->CannotBeAssignedFromOtherTypes() || precise) {
198 DCHECK(!(klass->IsAbstract()) || klass->IsArrayClass());
199 DCHECK(!klass->IsInterface());
200 entry = new (&arena_) PreciseReferenceType(klass, AddString(sp_descriptor), entries_.size());
201 } else {
202 entry = new (&arena_) ReferenceType(klass, AddString(sp_descriptor), entries_.size());
203 }
204 return AddEntry(entry);
205 } else { // Class not resolved.
206 // We tried loading the class and failed, this might get an exception raised
207 // so we want to clear it before we go on.
208 if (can_load_classes_) {
209 DCHECK(Thread::Current()->IsExceptionPending());
210 Thread::Current()->ClearException();
211 } else {
212 DCHECK(!Thread::Current()->IsExceptionPending());
213 }
214 if (IsValidDescriptor(descriptor)) {
215 return AddEntry(
216 new (&arena_) UnresolvedReferenceType(AddString(sp_descriptor), entries_.size()));
217 } else {
218 // The descriptor is broken return the unknown type as there's nothing sensible that
219 // could be done at runtime
220 return Conflict();
221 }
222 }
223 }
224
FindClass(mirror::Class * klass,bool precise) const225 const RegType* RegTypeCache::FindClass(mirror::Class* klass, bool precise) const {
226 DCHECK(klass != nullptr);
227 if (klass->IsPrimitive()) {
228 // Note: precise isn't used for primitive classes. A char is assignable to an int. All
229 // primitive classes are final.
230 return &RegTypeFromPrimitiveType(klass->GetPrimitiveType());
231 }
232 for (auto& pair : klass_entries_) {
233 mirror::Class* const reg_klass = pair.first.Read();
234 if (reg_klass == klass) {
235 const RegType* reg_type = pair.second;
236 if (MatchingPrecisionForClass(reg_type, precise)) {
237 return reg_type;
238 }
239 }
240 }
241 return nullptr;
242 }
243
InsertClass(const StringPiece & descriptor,mirror::Class * klass,bool precise)244 const RegType* RegTypeCache::InsertClass(const StringPiece& descriptor,
245 mirror::Class* klass,
246 bool precise) {
247 // No reference to the class was found, create new reference.
248 DCHECK(FindClass(klass, precise) == nullptr);
249 RegType* const reg_type = precise
250 ? static_cast<RegType*>(
251 new (&arena_) PreciseReferenceType(klass, descriptor, entries_.size()))
252 : new (&arena_) ReferenceType(klass, descriptor, entries_.size());
253 return &AddEntry(reg_type);
254 }
255
FromClass(const char * descriptor,mirror::Class * klass,bool precise)256 const RegType& RegTypeCache::FromClass(const char* descriptor, mirror::Class* klass, bool precise) {
257 DCHECK(klass != nullptr);
258 const RegType* reg_type = FindClass(klass, precise);
259 if (reg_type == nullptr) {
260 reg_type = InsertClass(AddString(StringPiece(descriptor)), klass, precise);
261 }
262 return *reg_type;
263 }
264
RegTypeCache(bool can_load_classes,ScopedArenaAllocator & arena)265 RegTypeCache::RegTypeCache(bool can_load_classes, ScopedArenaAllocator& arena)
266 : entries_(arena.Adapter(kArenaAllocVerifier)),
267 klass_entries_(arena.Adapter(kArenaAllocVerifier)),
268 can_load_classes_(can_load_classes),
269 arena_(arena) {
270 if (kIsDebugBuild) {
271 Thread::Current()->AssertThreadSuspensionIsAllowable(gAborting == 0);
272 }
273 // The klass_entries_ array does not have primitives or small constants.
274 static constexpr size_t kNumReserveEntries = 32;
275 klass_entries_.reserve(kNumReserveEntries);
276 // We want to have room for additional entries after inserting primitives and small
277 // constants.
278 entries_.reserve(kNumReserveEntries + kNumPrimitivesAndSmallConstants);
279 FillPrimitiveAndSmallConstantTypes();
280 }
281
~RegTypeCache()282 RegTypeCache::~RegTypeCache() {
283 DCHECK_LE(primitive_count_, entries_.size());
284 }
285
ShutDown()286 void RegTypeCache::ShutDown() {
287 if (RegTypeCache::primitive_initialized_) {
288 UndefinedType::Destroy();
289 ConflictType::Destroy();
290 BooleanType::Destroy();
291 ByteType::Destroy();
292 ShortType::Destroy();
293 CharType::Destroy();
294 IntegerType::Destroy();
295 LongLoType::Destroy();
296 LongHiType::Destroy();
297 FloatType::Destroy();
298 DoubleLoType::Destroy();
299 DoubleHiType::Destroy();
300 for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
301 const PreciseConstType* type = small_precise_constants_[value - kMinSmallConstant];
302 delete type;
303 small_precise_constants_[value - kMinSmallConstant] = nullptr;
304 }
305 RegTypeCache::primitive_initialized_ = false;
306 RegTypeCache::primitive_count_ = 0;
307 }
308 }
309
310 template <class Type>
CreatePrimitiveTypeInstance(const std::string & descriptor)311 const Type* RegTypeCache::CreatePrimitiveTypeInstance(const std::string& descriptor) {
312 mirror::Class* klass = nullptr;
313 // Try loading the class from linker.
314 if (!descriptor.empty()) {
315 klass = art::Runtime::Current()->GetClassLinker()->FindSystemClass(Thread::Current(),
316 descriptor.c_str());
317 DCHECK(klass != nullptr);
318 }
319 const Type* entry = Type::CreateInstance(klass, descriptor, RegTypeCache::primitive_count_);
320 RegTypeCache::primitive_count_++;
321 return entry;
322 }
323
CreatePrimitiveAndSmallConstantTypes()324 void RegTypeCache::CreatePrimitiveAndSmallConstantTypes() {
325 CreatePrimitiveTypeInstance<UndefinedType>("");
326 CreatePrimitiveTypeInstance<ConflictType>("");
327 CreatePrimitiveTypeInstance<BooleanType>("Z");
328 CreatePrimitiveTypeInstance<ByteType>("B");
329 CreatePrimitiveTypeInstance<ShortType>("S");
330 CreatePrimitiveTypeInstance<CharType>("C");
331 CreatePrimitiveTypeInstance<IntegerType>("I");
332 CreatePrimitiveTypeInstance<LongLoType>("J");
333 CreatePrimitiveTypeInstance<LongHiType>("J");
334 CreatePrimitiveTypeInstance<FloatType>("F");
335 CreatePrimitiveTypeInstance<DoubleLoType>("D");
336 CreatePrimitiveTypeInstance<DoubleHiType>("D");
337 for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
338 PreciseConstType* type = new PreciseConstType(value, primitive_count_);
339 small_precise_constants_[value - kMinSmallConstant] = type;
340 primitive_count_++;
341 }
342 }
343
FromUnresolvedMerge(const RegType & left,const RegType & right,MethodVerifier * verifier)344 const RegType& RegTypeCache::FromUnresolvedMerge(const RegType& left,
345 const RegType& right,
346 MethodVerifier* verifier) {
347 ArenaBitVector types(&arena_,
348 kDefaultArenaBitVectorBytes * kBitsPerByte, // Allocate at least 8 bytes.
349 true); // Is expandable.
350 const RegType* left_resolved;
351 bool left_unresolved_is_array;
352 if (left.IsUnresolvedMergedReference()) {
353 const UnresolvedMergedType& left_merge = *down_cast<const UnresolvedMergedType*>(&left);
354
355 types.Copy(&left_merge.GetUnresolvedTypes());
356 left_resolved = &left_merge.GetResolvedPart();
357 left_unresolved_is_array = left.IsArrayTypes();
358 } else if (left.IsUnresolvedTypes()) {
359 types.ClearAllBits();
360 types.SetBit(left.GetId());
361 left_resolved = &Zero();
362 left_unresolved_is_array = left.IsArrayTypes();
363 } else {
364 types.ClearAllBits();
365 left_resolved = &left;
366 left_unresolved_is_array = false;
367 }
368
369 const RegType* right_resolved;
370 bool right_unresolved_is_array;
371 if (right.IsUnresolvedMergedReference()) {
372 const UnresolvedMergedType& right_merge = *down_cast<const UnresolvedMergedType*>(&right);
373
374 types.Union(&right_merge.GetUnresolvedTypes());
375 right_resolved = &right_merge.GetResolvedPart();
376 right_unresolved_is_array = right.IsArrayTypes();
377 } else if (right.IsUnresolvedTypes()) {
378 types.SetBit(right.GetId());
379 right_resolved = &Zero();
380 right_unresolved_is_array = right.IsArrayTypes();
381 } else {
382 right_resolved = &right;
383 right_unresolved_is_array = false;
384 }
385
386 // Merge the resolved parts. Left and right might be equal, so use SafeMerge.
387 const RegType& resolved_parts_merged = left_resolved->SafeMerge(*right_resolved, this, verifier);
388 // If we get a conflict here, the merge result is a conflict, not an unresolved merge type.
389 if (resolved_parts_merged.IsConflict()) {
390 return Conflict();
391 }
392
393 bool resolved_merged_is_array = resolved_parts_merged.IsArrayTypes();
394 if (left_unresolved_is_array || right_unresolved_is_array || resolved_merged_is_array) {
395 // Arrays involved, see if we need to merge to Object.
396
397 // Is the resolved part a primitive array?
398 if (resolved_merged_is_array && !resolved_parts_merged.IsObjectArrayTypes()) {
399 return JavaLangObject(false /* precise */);
400 }
401
402 // Is any part not an array (but exists)?
403 if ((!left_unresolved_is_array && left_resolved != &left) ||
404 (!right_unresolved_is_array && right_resolved != &right) ||
405 !resolved_merged_is_array) {
406 return JavaLangObject(false /* precise */);
407 }
408 }
409
410 // Check if entry already exists.
411 for (size_t i = primitive_count_; i < entries_.size(); i++) {
412 const RegType* cur_entry = entries_[i];
413 if (cur_entry->IsUnresolvedMergedReference()) {
414 const UnresolvedMergedType* cmp_type = down_cast<const UnresolvedMergedType*>(cur_entry);
415 const RegType& resolved_part = cmp_type->GetResolvedPart();
416 const BitVector& unresolved_part = cmp_type->GetUnresolvedTypes();
417 // Use SameBitsSet. "types" is expandable to allow merging in the components, but the
418 // BitVector in the final RegType will be made non-expandable.
419 if (&resolved_part == &resolved_parts_merged && types.SameBitsSet(&unresolved_part)) {
420 return *cur_entry;
421 }
422 }
423 }
424 return AddEntry(new (&arena_) UnresolvedMergedType(resolved_parts_merged,
425 types,
426 this,
427 entries_.size()));
428 }
429
FromUnresolvedSuperClass(const RegType & child)430 const RegType& RegTypeCache::FromUnresolvedSuperClass(const RegType& child) {
431 // Check if entry already exists.
432 for (size_t i = primitive_count_; i < entries_.size(); i++) {
433 const RegType* cur_entry = entries_[i];
434 if (cur_entry->IsUnresolvedSuperClass()) {
435 const UnresolvedSuperClass* tmp_entry =
436 down_cast<const UnresolvedSuperClass*>(cur_entry);
437 uint16_t unresolved_super_child_id =
438 tmp_entry->GetUnresolvedSuperClassChildId();
439 if (unresolved_super_child_id == child.GetId()) {
440 return *cur_entry;
441 }
442 }
443 }
444 return AddEntry(new (&arena_) UnresolvedSuperClass(child.GetId(), this, entries_.size()));
445 }
446
Uninitialized(const RegType & type,uint32_t allocation_pc)447 const UninitializedType& RegTypeCache::Uninitialized(const RegType& type, uint32_t allocation_pc) {
448 UninitializedType* entry = nullptr;
449 const StringPiece& descriptor(type.GetDescriptor());
450 if (type.IsUnresolvedTypes()) {
451 for (size_t i = primitive_count_; i < entries_.size(); i++) {
452 const RegType* cur_entry = entries_[i];
453 if (cur_entry->IsUnresolvedAndUninitializedReference() &&
454 down_cast<const UnresolvedUninitializedRefType*>(cur_entry)->GetAllocationPc()
455 == allocation_pc &&
456 (cur_entry->GetDescriptor() == descriptor)) {
457 return *down_cast<const UnresolvedUninitializedRefType*>(cur_entry);
458 }
459 }
460 entry = new (&arena_) UnresolvedUninitializedRefType(descriptor,
461 allocation_pc,
462 entries_.size());
463 } else {
464 mirror::Class* klass = type.GetClass();
465 for (size_t i = primitive_count_; i < entries_.size(); i++) {
466 const RegType* cur_entry = entries_[i];
467 if (cur_entry->IsUninitializedReference() &&
468 down_cast<const UninitializedReferenceType*>(cur_entry)
469 ->GetAllocationPc() == allocation_pc &&
470 cur_entry->GetClass() == klass) {
471 return *down_cast<const UninitializedReferenceType*>(cur_entry);
472 }
473 }
474 entry = new (&arena_) UninitializedReferenceType(klass,
475 descriptor,
476 allocation_pc,
477 entries_.size());
478 }
479 return AddEntry(entry);
480 }
481
FromUninitialized(const RegType & uninit_type)482 const RegType& RegTypeCache::FromUninitialized(const RegType& uninit_type) {
483 RegType* entry;
484
485 if (uninit_type.IsUnresolvedTypes()) {
486 const StringPiece& descriptor(uninit_type.GetDescriptor());
487 for (size_t i = primitive_count_; i < entries_.size(); i++) {
488 const RegType* cur_entry = entries_[i];
489 if (cur_entry->IsUnresolvedReference() &&
490 cur_entry->GetDescriptor() == descriptor) {
491 return *cur_entry;
492 }
493 }
494 entry = new (&arena_) UnresolvedReferenceType(descriptor, entries_.size());
495 } else {
496 mirror::Class* klass = uninit_type.GetClass();
497 if (uninit_type.IsUninitializedThisReference() && !klass->IsFinal()) {
498 // For uninitialized "this reference" look for reference types that are not precise.
499 for (size_t i = primitive_count_; i < entries_.size(); i++) {
500 const RegType* cur_entry = entries_[i];
501 if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
502 return *cur_entry;
503 }
504 }
505 entry = new (&arena_) ReferenceType(klass, "", entries_.size());
506 } else if (!klass->IsPrimitive()) {
507 // We're uninitialized because of allocation, look or create a precise type as allocations
508 // may only create objects of that type.
509 // Note: we do not check whether the given klass is actually instantiable (besides being
510 // primitive), that is, we allow interfaces and abstract classes here. The reasoning is
511 // twofold:
512 // 1) The "new-instance" instruction to generate the uninitialized type will already
513 // queue an instantiation error. This is a soft error that must be thrown at runtime,
514 // and could potentially change if the class is resolved differently at runtime.
515 // 2) Checking whether the klass is instantiable and using conflict may produce a hard
516 // error when the value is used, which leads to a VerifyError, which is not the
517 // correct semantics.
518 for (size_t i = primitive_count_; i < entries_.size(); i++) {
519 const RegType* cur_entry = entries_[i];
520 if (cur_entry->IsPreciseReference() && cur_entry->GetClass() == klass) {
521 return *cur_entry;
522 }
523 }
524 entry = new (&arena_) PreciseReferenceType(klass,
525 uninit_type.GetDescriptor(),
526 entries_.size());
527 } else {
528 return Conflict();
529 }
530 }
531 return AddEntry(entry);
532 }
533
UninitializedThisArgument(const RegType & type)534 const UninitializedType& RegTypeCache::UninitializedThisArgument(const RegType& type) {
535 UninitializedType* entry;
536 const StringPiece& descriptor(type.GetDescriptor());
537 if (type.IsUnresolvedTypes()) {
538 for (size_t i = primitive_count_; i < entries_.size(); i++) {
539 const RegType* cur_entry = entries_[i];
540 if (cur_entry->IsUnresolvedAndUninitializedThisReference() &&
541 cur_entry->GetDescriptor() == descriptor) {
542 return *down_cast<const UninitializedType*>(cur_entry);
543 }
544 }
545 entry = new (&arena_) UnresolvedUninitializedThisRefType(descriptor, entries_.size());
546 } else {
547 mirror::Class* klass = type.GetClass();
548 for (size_t i = primitive_count_; i < entries_.size(); i++) {
549 const RegType* cur_entry = entries_[i];
550 if (cur_entry->IsUninitializedThisReference() && cur_entry->GetClass() == klass) {
551 return *down_cast<const UninitializedType*>(cur_entry);
552 }
553 }
554 entry = new (&arena_) UninitializedThisReferenceType(klass, descriptor, entries_.size());
555 }
556 return AddEntry(entry);
557 }
558
FromCat1NonSmallConstant(int32_t value,bool precise)559 const ConstantType& RegTypeCache::FromCat1NonSmallConstant(int32_t value, bool precise) {
560 for (size_t i = primitive_count_; i < entries_.size(); i++) {
561 const RegType* cur_entry = entries_[i];
562 if (cur_entry->klass_.IsNull() && cur_entry->IsConstant() &&
563 cur_entry->IsPreciseConstant() == precise &&
564 (down_cast<const ConstantType*>(cur_entry))->ConstantValue() == value) {
565 return *down_cast<const ConstantType*>(cur_entry);
566 }
567 }
568 ConstantType* entry;
569 if (precise) {
570 entry = new (&arena_) PreciseConstType(value, entries_.size());
571 } else {
572 entry = new (&arena_) ImpreciseConstType(value, entries_.size());
573 }
574 return AddEntry(entry);
575 }
576
FromCat2ConstLo(int32_t value,bool precise)577 const ConstantType& RegTypeCache::FromCat2ConstLo(int32_t value, bool precise) {
578 for (size_t i = primitive_count_; i < entries_.size(); i++) {
579 const RegType* cur_entry = entries_[i];
580 if (cur_entry->IsConstantLo() && (cur_entry->IsPrecise() == precise) &&
581 (down_cast<const ConstantType*>(cur_entry))->ConstantValueLo() == value) {
582 return *down_cast<const ConstantType*>(cur_entry);
583 }
584 }
585 ConstantType* entry;
586 if (precise) {
587 entry = new (&arena_) PreciseConstLoType(value, entries_.size());
588 } else {
589 entry = new (&arena_) ImpreciseConstLoType(value, entries_.size());
590 }
591 return AddEntry(entry);
592 }
593
FromCat2ConstHi(int32_t value,bool precise)594 const ConstantType& RegTypeCache::FromCat2ConstHi(int32_t value, bool precise) {
595 for (size_t i = primitive_count_; i < entries_.size(); i++) {
596 const RegType* cur_entry = entries_[i];
597 if (cur_entry->IsConstantHi() && (cur_entry->IsPrecise() == precise) &&
598 (down_cast<const ConstantType*>(cur_entry))->ConstantValueHi() == value) {
599 return *down_cast<const ConstantType*>(cur_entry);
600 }
601 }
602 ConstantType* entry;
603 if (precise) {
604 entry = new (&arena_) PreciseConstHiType(value, entries_.size());
605 } else {
606 entry = new (&arena_) ImpreciseConstHiType(value, entries_.size());
607 }
608 return AddEntry(entry);
609 }
610
GetComponentType(const RegType & array,mirror::ClassLoader * loader)611 const RegType& RegTypeCache::GetComponentType(const RegType& array, mirror::ClassLoader* loader) {
612 if (!array.IsArrayTypes()) {
613 return Conflict();
614 } else if (array.IsUnresolvedTypes()) {
615 DCHECK(!array.IsUnresolvedMergedReference()); // Caller must make sure not to ask for this.
616 const std::string descriptor(array.GetDescriptor().as_string());
617 return FromDescriptor(loader, descriptor.c_str() + 1, false);
618 } else {
619 mirror::Class* klass = array.GetClass()->GetComponentType();
620 std::string temp;
621 const char* descriptor = klass->GetDescriptor(&temp);
622 if (klass->IsErroneous()) {
623 // Arrays may have erroneous component types, use unresolved in that case.
624 // We assume that the primitive classes are not erroneous, so we know it is a
625 // reference type.
626 return FromDescriptor(loader, descriptor, false);
627 } else {
628 return FromClass(descriptor, klass, klass->CannotBeAssignedFromOtherTypes());
629 }
630 }
631 }
632
Dump(std::ostream & os)633 void RegTypeCache::Dump(std::ostream& os) {
634 for (size_t i = 0; i < entries_.size(); i++) {
635 const RegType* cur_entry = entries_[i];
636 if (cur_entry != nullptr) {
637 os << i << ": " << cur_entry->Dump() << "\n";
638 }
639 }
640 }
641
VisitStaticRoots(RootVisitor * visitor)642 void RegTypeCache::VisitStaticRoots(RootVisitor* visitor) {
643 // Visit the primitive types, this is required since if there are no active verifiers they wont
644 // be in the entries array, and therefore not visited as roots.
645 if (primitive_initialized_) {
646 RootInfo ri(kRootUnknown);
647 UndefinedType::GetInstance()->VisitRoots(visitor, ri);
648 ConflictType::GetInstance()->VisitRoots(visitor, ri);
649 BooleanType::GetInstance()->VisitRoots(visitor, ri);
650 ByteType::GetInstance()->VisitRoots(visitor, ri);
651 ShortType::GetInstance()->VisitRoots(visitor, ri);
652 CharType::GetInstance()->VisitRoots(visitor, ri);
653 IntegerType::GetInstance()->VisitRoots(visitor, ri);
654 LongLoType::GetInstance()->VisitRoots(visitor, ri);
655 LongHiType::GetInstance()->VisitRoots(visitor, ri);
656 FloatType::GetInstance()->VisitRoots(visitor, ri);
657 DoubleLoType::GetInstance()->VisitRoots(visitor, ri);
658 DoubleHiType::GetInstance()->VisitRoots(visitor, ri);
659 for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
660 small_precise_constants_[value - kMinSmallConstant]->VisitRoots(visitor, ri);
661 }
662 }
663 }
664
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)665 void RegTypeCache::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
666 // Exclude the static roots that are visited by VisitStaticRoots().
667 for (size_t i = primitive_count_; i < entries_.size(); ++i) {
668 entries_[i]->VisitRoots(visitor, root_info);
669 }
670 for (auto& pair : klass_entries_) {
671 GcRoot<mirror::Class>& root = pair.first;
672 root.VisitRoot(visitor, root_info);
673 }
674 }
675
676 } // namespace verifier
677 } // namespace art
678