1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "reg_type_cache-inl.h"
18 
19 #include <type_traits>
20 
21 #include "base/aborting.h"
22 #include "base/arena_bit_vector.h"
23 #include "base/bit_vector-inl.h"
24 #include "base/casts.h"
25 #include "base/scoped_arena_allocator.h"
26 #include "base/stl_util.h"
27 #include "class_linker-inl.h"
28 #include "dex/descriptors_names.h"
29 #include "dex/dex_file-inl.h"
30 #include "mirror/class-inl.h"
31 #include "mirror/object-inl.h"
32 #include "reg_type-inl.h"
33 
34 namespace art {
35 namespace verifier {
36 
37 bool RegTypeCache::primitive_initialized_ = false;
38 uint16_t RegTypeCache::primitive_count_ = 0;
39 const PreciseConstType* RegTypeCache::small_precise_constants_[kMaxSmallConstant -
40                                                                kMinSmallConstant + 1];
41 
MatchingPrecisionForClass(const RegType * entry,bool precise)42 ALWAYS_INLINE static inline bool MatchingPrecisionForClass(const RegType* entry, bool precise)
43     REQUIRES_SHARED(Locks::mutator_lock_) {
44   if (entry->IsPreciseReference() == precise) {
45     // We were or weren't looking for a precise reference and we found what we need.
46     return true;
47   } else {
48     if (!precise && entry->GetClass()->CannotBeAssignedFromOtherTypes()) {
49       // We weren't looking for a precise reference, as we're looking up based on a descriptor, but
50       // we found a matching entry based on the descriptor. Return the precise entry in that case.
51       return true;
52     }
53     return false;
54   }
55 }
56 
FillPrimitiveAndSmallConstantTypes()57 void RegTypeCache::FillPrimitiveAndSmallConstantTypes() {
58   // Note: this must have the same order as CreatePrimitiveAndSmallConstantTypes.
59   entries_.push_back(UndefinedType::GetInstance());
60   entries_.push_back(ConflictType::GetInstance());
61   entries_.push_back(NullType::GetInstance());
62   entries_.push_back(BooleanType::GetInstance());
63   entries_.push_back(ByteType::GetInstance());
64   entries_.push_back(ShortType::GetInstance());
65   entries_.push_back(CharType::GetInstance());
66   entries_.push_back(IntegerType::GetInstance());
67   entries_.push_back(LongLoType::GetInstance());
68   entries_.push_back(LongHiType::GetInstance());
69   entries_.push_back(FloatType::GetInstance());
70   entries_.push_back(DoubleLoType::GetInstance());
71   entries_.push_back(DoubleHiType::GetInstance());
72   for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
73     int32_t i = value - kMinSmallConstant;
74     DCHECK_EQ(entries_.size(), small_precise_constants_[i]->GetId());
75     entries_.push_back(small_precise_constants_[i]);
76   }
77   DCHECK_EQ(entries_.size(), primitive_count_);
78 }
79 
FromDescriptor(mirror::ClassLoader * loader,const char * descriptor,bool precise)80 const RegType& RegTypeCache::FromDescriptor(mirror::ClassLoader* loader,
81                                             const char* descriptor,
82                                             bool precise) {
83   DCHECK(RegTypeCache::primitive_initialized_);
84   if (descriptor[1] == '\0') {
85     switch (descriptor[0]) {
86       case 'Z':
87         return Boolean();
88       case 'B':
89         return Byte();
90       case 'S':
91         return Short();
92       case 'C':
93         return Char();
94       case 'I':
95         return Integer();
96       case 'J':
97         return LongLo();
98       case 'F':
99         return Float();
100       case 'D':
101         return DoubleLo();
102       case 'V':  // For void types, conflict types.
103       default:
104         return Conflict();
105     }
106   } else if (descriptor[0] == 'L' || descriptor[0] == '[') {
107     return From(loader, descriptor, precise);
108   } else {
109     return Conflict();
110   }
111 }
112 
RegTypeFromPrimitiveType(Primitive::Type prim_type) const113 const RegType& RegTypeCache::RegTypeFromPrimitiveType(Primitive::Type prim_type) const {
114   DCHECK(RegTypeCache::primitive_initialized_);
115   switch (prim_type) {
116     case Primitive::kPrimBoolean:
117       return *BooleanType::GetInstance();
118     case Primitive::kPrimByte:
119       return *ByteType::GetInstance();
120     case Primitive::kPrimShort:
121       return *ShortType::GetInstance();
122     case Primitive::kPrimChar:
123       return *CharType::GetInstance();
124     case Primitive::kPrimInt:
125       return *IntegerType::GetInstance();
126     case Primitive::kPrimLong:
127       return *LongLoType::GetInstance();
128     case Primitive::kPrimFloat:
129       return *FloatType::GetInstance();
130     case Primitive::kPrimDouble:
131       return *DoubleLoType::GetInstance();
132     case Primitive::kPrimVoid:
133     default:
134       return *ConflictType::GetInstance();
135   }
136 }
137 
MatchDescriptor(size_t idx,const StringPiece & descriptor,bool precise)138 bool RegTypeCache::MatchDescriptor(size_t idx, const StringPiece& descriptor, bool precise) {
139   const RegType* entry = entries_[idx];
140   if (descriptor != entry->descriptor_) {
141     return false;
142   }
143   if (entry->HasClass()) {
144     return MatchingPrecisionForClass(entry, precise);
145   }
146   // There is no notion of precise unresolved references, the precise information is just dropped
147   // on the floor.
148   DCHECK(entry->IsUnresolvedReference());
149   return true;
150 }
151 
ResolveClass(const char * descriptor,mirror::ClassLoader * loader)152 mirror::Class* RegTypeCache::ResolveClass(const char* descriptor, mirror::ClassLoader* loader) {
153   // Class was not found, must create new type.
154   // Try resolving class
155   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
156   Thread* self = Thread::Current();
157   StackHandleScope<1> hs(self);
158   Handle<mirror::ClassLoader> class_loader(hs.NewHandle(loader));
159   mirror::Class* klass = nullptr;
160   if (can_load_classes_) {
161     klass = class_linker->FindClass(self, descriptor, class_loader);
162   } else {
163     klass = class_linker->LookupClass(self, descriptor, loader);
164     if (klass != nullptr && !klass->IsResolved()) {
165       // We found the class but without it being loaded its not safe for use.
166       klass = nullptr;
167     }
168   }
169   return klass;
170 }
171 
AddString(const StringPiece & string_piece)172 StringPiece RegTypeCache::AddString(const StringPiece& string_piece) {
173   char* ptr = allocator_.AllocArray<char>(string_piece.length());
174   memcpy(ptr, string_piece.data(), string_piece.length());
175   return StringPiece(ptr, string_piece.length());
176 }
177 
From(mirror::ClassLoader * loader,const char * descriptor,bool precise)178 const RegType& RegTypeCache::From(mirror::ClassLoader* loader,
179                                   const char* descriptor,
180                                   bool precise) {
181   StringPiece sp_descriptor(descriptor);
182   // Try looking up the class in the cache first. We use a StringPiece to avoid continual strlen
183   // operations on the descriptor.
184   for (size_t i = primitive_count_; i < entries_.size(); i++) {
185     if (MatchDescriptor(i, sp_descriptor, precise)) {
186       return *(entries_[i]);
187     }
188   }
189   // Class not found in the cache, will create a new type for that.
190   // Try resolving class.
191   mirror::Class* klass = ResolveClass(descriptor, loader);
192   if (klass != nullptr) {
193     // Class resolved, first look for the class in the list of entries
194     // Class was not found, must create new type.
195     // To pass the verification, the type should be imprecise,
196     // instantiable or an interface with the precise type set to false.
197     DCHECK(!precise || klass->IsInstantiable());
198     // Create a precise type if:
199     // 1- Class is final and NOT an interface. a precise interface is meaningless !!
200     // 2- Precise Flag passed as true.
201     RegType* entry;
202     // Create an imprecise type if we can't tell for a fact that it is precise.
203     if (klass->CannotBeAssignedFromOtherTypes() || precise) {
204       DCHECK(!(klass->IsAbstract()) || klass->IsArrayClass());
205       DCHECK(!klass->IsInterface());
206       entry =
207           new (&allocator_) PreciseReferenceType(klass, AddString(sp_descriptor), entries_.size());
208     } else {
209       entry = new (&allocator_) ReferenceType(klass, AddString(sp_descriptor), entries_.size());
210     }
211     return AddEntry(entry);
212   } else {  // Class not resolved.
213     // We tried loading the class and failed, this might get an exception raised
214     // so we want to clear it before we go on.
215     if (can_load_classes_) {
216       DCHECK(Thread::Current()->IsExceptionPending());
217       Thread::Current()->ClearException();
218     } else {
219       DCHECK(!Thread::Current()->IsExceptionPending());
220     }
221     if (IsValidDescriptor(descriptor)) {
222       return AddEntry(
223           new (&allocator_) UnresolvedReferenceType(AddString(sp_descriptor), entries_.size()));
224     } else {
225       // The descriptor is broken return the unknown type as there's nothing sensible that
226       // could be done at runtime
227       return Conflict();
228     }
229   }
230 }
231 
MakeUnresolvedReference()232 const RegType& RegTypeCache::MakeUnresolvedReference() {
233   // The descriptor is intentionally invalid so nothing else will match this type.
234   return AddEntry(new (&allocator_) UnresolvedReferenceType(AddString("a"), entries_.size()));
235 }
236 
FindClass(mirror::Class * klass,bool precise) const237 const RegType* RegTypeCache::FindClass(mirror::Class* klass, bool precise) const {
238   DCHECK(klass != nullptr);
239   if (klass->IsPrimitive()) {
240     // Note: precise isn't used for primitive classes. A char is assignable to an int. All
241     // primitive classes are final.
242     return &RegTypeFromPrimitiveType(klass->GetPrimitiveType());
243   }
244   for (auto& pair : klass_entries_) {
245     mirror::Class* const reg_klass = pair.first.Read();
246     if (reg_klass == klass) {
247       const RegType* reg_type = pair.second;
248       if (MatchingPrecisionForClass(reg_type, precise)) {
249         return reg_type;
250       }
251     }
252   }
253   return nullptr;
254 }
255 
InsertClass(const StringPiece & descriptor,mirror::Class * klass,bool precise)256 const RegType* RegTypeCache::InsertClass(const StringPiece& descriptor,
257                                          mirror::Class* klass,
258                                          bool precise) {
259   // No reference to the class was found, create new reference.
260   DCHECK(FindClass(klass, precise) == nullptr);
261   RegType* const reg_type = precise
262       ? static_cast<RegType*>(
263           new (&allocator_) PreciseReferenceType(klass, descriptor, entries_.size()))
264       : new (&allocator_) ReferenceType(klass, descriptor, entries_.size());
265   return &AddEntry(reg_type);
266 }
267 
FromClass(const char * descriptor,mirror::Class * klass,bool precise)268 const RegType& RegTypeCache::FromClass(const char* descriptor, mirror::Class* klass, bool precise) {
269   DCHECK(klass != nullptr);
270   const RegType* reg_type = FindClass(klass, precise);
271   if (reg_type == nullptr) {
272     reg_type = InsertClass(AddString(StringPiece(descriptor)), klass, precise);
273   }
274   return *reg_type;
275 }
276 
RegTypeCache(bool can_load_classes,ScopedArenaAllocator & allocator,bool can_suspend)277 RegTypeCache::RegTypeCache(bool can_load_classes, ScopedArenaAllocator& allocator, bool can_suspend)
278     : entries_(allocator.Adapter(kArenaAllocVerifier)),
279       klass_entries_(allocator.Adapter(kArenaAllocVerifier)),
280       can_load_classes_(can_load_classes),
281       allocator_(allocator) {
282   DCHECK(can_suspend || !can_load_classes) << "Cannot load classes if suspension is disabled!";
283   if (kIsDebugBuild && can_suspend) {
284     Thread::Current()->AssertThreadSuspensionIsAllowable(gAborting == 0);
285   }
286   // The klass_entries_ array does not have primitives or small constants.
287   static constexpr size_t kNumReserveEntries = 32;
288   klass_entries_.reserve(kNumReserveEntries);
289   // We want to have room for additional entries after inserting primitives and small
290   // constants.
291   entries_.reserve(kNumReserveEntries + kNumPrimitivesAndSmallConstants);
292   FillPrimitiveAndSmallConstantTypes();
293 }
294 
~RegTypeCache()295 RegTypeCache::~RegTypeCache() {
296   DCHECK_LE(primitive_count_, entries_.size());
297 }
298 
ShutDown()299 void RegTypeCache::ShutDown() {
300   if (RegTypeCache::primitive_initialized_) {
301     UndefinedType::Destroy();
302     ConflictType::Destroy();
303     BooleanType::Destroy();
304     ByteType::Destroy();
305     ShortType::Destroy();
306     CharType::Destroy();
307     IntegerType::Destroy();
308     LongLoType::Destroy();
309     LongHiType::Destroy();
310     FloatType::Destroy();
311     DoubleLoType::Destroy();
312     DoubleHiType::Destroy();
313     NullType::Destroy();
314     for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
315       const PreciseConstType* type = small_precise_constants_[value - kMinSmallConstant];
316       delete type;
317       small_precise_constants_[value - kMinSmallConstant] = nullptr;
318     }
319     RegTypeCache::primitive_initialized_ = false;
320     RegTypeCache::primitive_count_ = 0;
321   }
322 }
323 
324 // Helper for create_primitive_type_instance lambda.
325 namespace {
326 template <typename T>
327 struct TypeHelper {
328   using type = T;
329   static_assert(std::is_convertible<T*, RegType*>::value, "T must be a RegType");
330 
331   const char* descriptor;
332 
TypeHelperart::verifier::__anon9fe3c3390111::TypeHelper333   explicit TypeHelper(const char* d) : descriptor(d) {}
334 };
335 }  // namespace
336 
CreatePrimitiveAndSmallConstantTypes()337 void RegTypeCache::CreatePrimitiveAndSmallConstantTypes() {
338   // Note: this must have the same order as FillPrimitiveAndSmallConstantTypes.
339 
340   // It is acceptable to pass on the const char* in type to CreateInstance, as all calls below are
341   // with compile-time constants that will have global lifetime. Use of the lambda ensures this
342   // code cannot leak to other users.
343   auto create_primitive_type_instance = [&](auto type) REQUIRES_SHARED(Locks::mutator_lock_) {
344     using Type = typename decltype(type)::type;
345     mirror::Class* klass = nullptr;
346     // Try loading the class from linker.
347     DCHECK(type.descriptor != nullptr);
348     if (strlen(type.descriptor) > 0) {
349       klass = art::Runtime::Current()->GetClassLinker()->FindSystemClass(Thread::Current(),
350                                                                          type.descriptor);
351       DCHECK(klass != nullptr);
352     }
353     const Type* entry = Type::CreateInstance(klass,
354                                              type.descriptor,
355                                              RegTypeCache::primitive_count_);
356     RegTypeCache::primitive_count_++;
357     return entry;
358   };
359   create_primitive_type_instance(TypeHelper<UndefinedType>(""));
360   create_primitive_type_instance(TypeHelper<ConflictType>(""));
361   create_primitive_type_instance(TypeHelper<NullType>(""));
362   create_primitive_type_instance(TypeHelper<BooleanType>("Z"));
363   create_primitive_type_instance(TypeHelper<ByteType>("B"));
364   create_primitive_type_instance(TypeHelper<ShortType>("S"));
365   create_primitive_type_instance(TypeHelper<CharType>("C"));
366   create_primitive_type_instance(TypeHelper<IntegerType>("I"));
367   create_primitive_type_instance(TypeHelper<LongLoType>("J"));
368   create_primitive_type_instance(TypeHelper<LongHiType>("J"));
369   create_primitive_type_instance(TypeHelper<FloatType>("F"));
370   create_primitive_type_instance(TypeHelper<DoubleLoType>("D"));
371   create_primitive_type_instance(TypeHelper<DoubleHiType>("D"));
372 
373   for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
374     PreciseConstType* type = new PreciseConstType(value, primitive_count_);
375     small_precise_constants_[value - kMinSmallConstant] = type;
376     primitive_count_++;
377   }
378 }
379 
FromUnresolvedMerge(const RegType & left,const RegType & right,MethodVerifier * verifier)380 const RegType& RegTypeCache::FromUnresolvedMerge(const RegType& left,
381                                                  const RegType& right,
382                                                  MethodVerifier* verifier) {
383   ArenaBitVector types(&allocator_,
384                        kDefaultArenaBitVectorBytes * kBitsPerByte,  // Allocate at least 8 bytes.
385                        true);                                       // Is expandable.
386   const RegType* left_resolved;
387   bool left_unresolved_is_array;
388   if (left.IsUnresolvedMergedReference()) {
389     const UnresolvedMergedType& left_merge = *down_cast<const UnresolvedMergedType*>(&left);
390 
391     types.Copy(&left_merge.GetUnresolvedTypes());
392     left_resolved = &left_merge.GetResolvedPart();
393     left_unresolved_is_array = left.IsArrayTypes();
394   } else if (left.IsUnresolvedTypes()) {
395     types.ClearAllBits();
396     types.SetBit(left.GetId());
397     left_resolved = &Zero();
398     left_unresolved_is_array = left.IsArrayTypes();
399   } else {
400     types.ClearAllBits();
401     left_resolved = &left;
402     left_unresolved_is_array = false;
403   }
404 
405   const RegType* right_resolved;
406   bool right_unresolved_is_array;
407   if (right.IsUnresolvedMergedReference()) {
408     const UnresolvedMergedType& right_merge = *down_cast<const UnresolvedMergedType*>(&right);
409 
410     types.Union(&right_merge.GetUnresolvedTypes());
411     right_resolved = &right_merge.GetResolvedPart();
412     right_unresolved_is_array = right.IsArrayTypes();
413   } else if (right.IsUnresolvedTypes()) {
414     types.SetBit(right.GetId());
415     right_resolved = &Zero();
416     right_unresolved_is_array = right.IsArrayTypes();
417   } else {
418     right_resolved = &right;
419     right_unresolved_is_array = false;
420   }
421 
422   // Merge the resolved parts. Left and right might be equal, so use SafeMerge.
423   const RegType& resolved_parts_merged = left_resolved->SafeMerge(*right_resolved, this, verifier);
424   // If we get a conflict here, the merge result is a conflict, not an unresolved merge type.
425   if (resolved_parts_merged.IsConflict()) {
426     return Conflict();
427   }
428   if (resolved_parts_merged.IsJavaLangObject()) {
429     return resolved_parts_merged;
430   }
431 
432   bool resolved_merged_is_array = resolved_parts_merged.IsArrayTypes();
433   if (left_unresolved_is_array || right_unresolved_is_array || resolved_merged_is_array) {
434     // Arrays involved, see if we need to merge to Object.
435 
436     // Is the resolved part a primitive array?
437     if (resolved_merged_is_array && !resolved_parts_merged.IsObjectArrayTypes()) {
438       return JavaLangObject(false /* precise */);
439     }
440 
441     // Is any part not an array (but exists)?
442     if ((!left_unresolved_is_array && left_resolved != &left) ||
443         (!right_unresolved_is_array && right_resolved != &right) ||
444         !resolved_merged_is_array) {
445       return JavaLangObject(false /* precise */);
446     }
447   }
448 
449   // Check if entry already exists.
450   for (size_t i = primitive_count_; i < entries_.size(); i++) {
451     const RegType* cur_entry = entries_[i];
452     if (cur_entry->IsUnresolvedMergedReference()) {
453       const UnresolvedMergedType* cmp_type = down_cast<const UnresolvedMergedType*>(cur_entry);
454       const RegType& resolved_part = cmp_type->GetResolvedPart();
455       const BitVector& unresolved_part = cmp_type->GetUnresolvedTypes();
456       // Use SameBitsSet. "types" is expandable to allow merging in the components, but the
457       // BitVector in the final RegType will be made non-expandable.
458       if (&resolved_part == &resolved_parts_merged && types.SameBitsSet(&unresolved_part)) {
459         return *cur_entry;
460       }
461     }
462   }
463   return AddEntry(new (&allocator_) UnresolvedMergedType(resolved_parts_merged,
464                                                          types,
465                                                          this,
466                                                          entries_.size()));
467 }
468 
FromUnresolvedSuperClass(const RegType & child)469 const RegType& RegTypeCache::FromUnresolvedSuperClass(const RegType& child) {
470   // Check if entry already exists.
471   for (size_t i = primitive_count_; i < entries_.size(); i++) {
472     const RegType* cur_entry = entries_[i];
473     if (cur_entry->IsUnresolvedSuperClass()) {
474       const UnresolvedSuperClass* tmp_entry =
475           down_cast<const UnresolvedSuperClass*>(cur_entry);
476       uint16_t unresolved_super_child_id =
477           tmp_entry->GetUnresolvedSuperClassChildId();
478       if (unresolved_super_child_id == child.GetId()) {
479         return *cur_entry;
480       }
481     }
482   }
483   return AddEntry(new (&allocator_) UnresolvedSuperClass(child.GetId(), this, entries_.size()));
484 }
485 
Uninitialized(const RegType & type,uint32_t allocation_pc)486 const UninitializedType& RegTypeCache::Uninitialized(const RegType& type, uint32_t allocation_pc) {
487   UninitializedType* entry = nullptr;
488   const StringPiece& descriptor(type.GetDescriptor());
489   if (type.IsUnresolvedTypes()) {
490     for (size_t i = primitive_count_; i < entries_.size(); i++) {
491       const RegType* cur_entry = entries_[i];
492       if (cur_entry->IsUnresolvedAndUninitializedReference() &&
493           down_cast<const UnresolvedUninitializedRefType*>(cur_entry)->GetAllocationPc()
494               == allocation_pc &&
495           (cur_entry->GetDescriptor() == descriptor)) {
496         return *down_cast<const UnresolvedUninitializedRefType*>(cur_entry);
497       }
498     }
499     entry = new (&allocator_) UnresolvedUninitializedRefType(descriptor,
500                                                              allocation_pc,
501                                                              entries_.size());
502   } else {
503     mirror::Class* klass = type.GetClass();
504     for (size_t i = primitive_count_; i < entries_.size(); i++) {
505       const RegType* cur_entry = entries_[i];
506       if (cur_entry->IsUninitializedReference() &&
507           down_cast<const UninitializedReferenceType*>(cur_entry)
508               ->GetAllocationPc() == allocation_pc &&
509           cur_entry->GetClass() == klass) {
510         return *down_cast<const UninitializedReferenceType*>(cur_entry);
511       }
512     }
513     entry = new (&allocator_) UninitializedReferenceType(klass,
514                                                          descriptor,
515                                                          allocation_pc,
516                                                          entries_.size());
517   }
518   return AddEntry(entry);
519 }
520 
FromUninitialized(const RegType & uninit_type)521 const RegType& RegTypeCache::FromUninitialized(const RegType& uninit_type) {
522   RegType* entry;
523 
524   if (uninit_type.IsUnresolvedTypes()) {
525     const StringPiece& descriptor(uninit_type.GetDescriptor());
526     for (size_t i = primitive_count_; i < entries_.size(); i++) {
527       const RegType* cur_entry = entries_[i];
528       if (cur_entry->IsUnresolvedReference() &&
529           cur_entry->GetDescriptor() == descriptor) {
530         return *cur_entry;
531       }
532     }
533     entry = new (&allocator_) UnresolvedReferenceType(descriptor, entries_.size());
534   } else {
535     mirror::Class* klass = uninit_type.GetClass();
536     if (uninit_type.IsUninitializedThisReference() && !klass->IsFinal()) {
537       // For uninitialized "this reference" look for reference types that are not precise.
538       for (size_t i = primitive_count_; i < entries_.size(); i++) {
539         const RegType* cur_entry = entries_[i];
540         if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
541           return *cur_entry;
542         }
543       }
544       entry = new (&allocator_) ReferenceType(klass, "", entries_.size());
545     } else if (!klass->IsPrimitive()) {
546       // We're uninitialized because of allocation, look or create a precise type as allocations
547       // may only create objects of that type.
548       // Note: we do not check whether the given klass is actually instantiable (besides being
549       //       primitive), that is, we allow interfaces and abstract classes here. The reasoning is
550       //       twofold:
551       //       1) The "new-instance" instruction to generate the uninitialized type will already
552       //          queue an instantiation error. This is a soft error that must be thrown at runtime,
553       //          and could potentially change if the class is resolved differently at runtime.
554       //       2) Checking whether the klass is instantiable and using conflict may produce a hard
555       //          error when the value is used, which leads to a VerifyError, which is not the
556       //          correct semantics.
557       for (size_t i = primitive_count_; i < entries_.size(); i++) {
558         const RegType* cur_entry = entries_[i];
559         if (cur_entry->IsPreciseReference() && cur_entry->GetClass() == klass) {
560           return *cur_entry;
561         }
562       }
563       entry = new (&allocator_) PreciseReferenceType(klass,
564                                                      uninit_type.GetDescriptor(),
565                                                      entries_.size());
566     } else {
567       return Conflict();
568     }
569   }
570   return AddEntry(entry);
571 }
572 
UninitializedThisArgument(const RegType & type)573 const UninitializedType& RegTypeCache::UninitializedThisArgument(const RegType& type) {
574   UninitializedType* entry;
575   const StringPiece& descriptor(type.GetDescriptor());
576   if (type.IsUnresolvedTypes()) {
577     for (size_t i = primitive_count_; i < entries_.size(); i++) {
578       const RegType* cur_entry = entries_[i];
579       if (cur_entry->IsUnresolvedAndUninitializedThisReference() &&
580           cur_entry->GetDescriptor() == descriptor) {
581         return *down_cast<const UninitializedType*>(cur_entry);
582       }
583     }
584     entry = new (&allocator_) UnresolvedUninitializedThisRefType(descriptor, entries_.size());
585   } else {
586     mirror::Class* klass = type.GetClass();
587     for (size_t i = primitive_count_; i < entries_.size(); i++) {
588       const RegType* cur_entry = entries_[i];
589       if (cur_entry->IsUninitializedThisReference() && cur_entry->GetClass() == klass) {
590         return *down_cast<const UninitializedType*>(cur_entry);
591       }
592     }
593     entry = new (&allocator_) UninitializedThisReferenceType(klass, descriptor, entries_.size());
594   }
595   return AddEntry(entry);
596 }
597 
FromCat1NonSmallConstant(int32_t value,bool precise)598 const ConstantType& RegTypeCache::FromCat1NonSmallConstant(int32_t value, bool precise) {
599   for (size_t i = primitive_count_; i < entries_.size(); i++) {
600     const RegType* cur_entry = entries_[i];
601     if (cur_entry->klass_.IsNull() && cur_entry->IsConstant() &&
602         cur_entry->IsPreciseConstant() == precise &&
603         (down_cast<const ConstantType*>(cur_entry))->ConstantValue() == value) {
604       return *down_cast<const ConstantType*>(cur_entry);
605     }
606   }
607   ConstantType* entry;
608   if (precise) {
609     entry = new (&allocator_) PreciseConstType(value, entries_.size());
610   } else {
611     entry = new (&allocator_) ImpreciseConstType(value, entries_.size());
612   }
613   return AddEntry(entry);
614 }
615 
FromCat2ConstLo(int32_t value,bool precise)616 const ConstantType& RegTypeCache::FromCat2ConstLo(int32_t value, bool precise) {
617   for (size_t i = primitive_count_; i < entries_.size(); i++) {
618     const RegType* cur_entry = entries_[i];
619     if (cur_entry->IsConstantLo() && (cur_entry->IsPrecise() == precise) &&
620         (down_cast<const ConstantType*>(cur_entry))->ConstantValueLo() == value) {
621       return *down_cast<const ConstantType*>(cur_entry);
622     }
623   }
624   ConstantType* entry;
625   if (precise) {
626     entry = new (&allocator_) PreciseConstLoType(value, entries_.size());
627   } else {
628     entry = new (&allocator_) ImpreciseConstLoType(value, entries_.size());
629   }
630   return AddEntry(entry);
631 }
632 
FromCat2ConstHi(int32_t value,bool precise)633 const ConstantType& RegTypeCache::FromCat2ConstHi(int32_t value, bool precise) {
634   for (size_t i = primitive_count_; i < entries_.size(); i++) {
635     const RegType* cur_entry = entries_[i];
636     if (cur_entry->IsConstantHi() && (cur_entry->IsPrecise() == precise) &&
637         (down_cast<const ConstantType*>(cur_entry))->ConstantValueHi() == value) {
638       return *down_cast<const ConstantType*>(cur_entry);
639     }
640   }
641   ConstantType* entry;
642   if (precise) {
643     entry = new (&allocator_) PreciseConstHiType(value, entries_.size());
644   } else {
645     entry = new (&allocator_) ImpreciseConstHiType(value, entries_.size());
646   }
647   return AddEntry(entry);
648 }
649 
GetComponentType(const RegType & array,mirror::ClassLoader * loader)650 const RegType& RegTypeCache::GetComponentType(const RegType& array, mirror::ClassLoader* loader) {
651   if (!array.IsArrayTypes()) {
652     return Conflict();
653   } else if (array.IsUnresolvedTypes()) {
654     DCHECK(!array.IsUnresolvedMergedReference());  // Caller must make sure not to ask for this.
655     const std::string descriptor(array.GetDescriptor().as_string());
656     return FromDescriptor(loader, descriptor.c_str() + 1, false);
657   } else {
658     mirror::Class* klass = array.GetClass()->GetComponentType();
659     std::string temp;
660     const char* descriptor = klass->GetDescriptor(&temp);
661     if (klass->IsErroneous()) {
662       // Arrays may have erroneous component types, use unresolved in that case.
663       // We assume that the primitive classes are not erroneous, so we know it is a
664       // reference type.
665       return FromDescriptor(loader, descriptor, false);
666     } else {
667       return FromClass(descriptor, klass, klass->CannotBeAssignedFromOtherTypes());
668     }
669   }
670 }
671 
Dump(std::ostream & os)672 void RegTypeCache::Dump(std::ostream& os) {
673   for (size_t i = 0; i < entries_.size(); i++) {
674     const RegType* cur_entry = entries_[i];
675     if (cur_entry != nullptr) {
676       os << i << ": " << cur_entry->Dump() << "\n";
677     }
678   }
679 }
680 
VisitStaticRoots(RootVisitor * visitor)681 void RegTypeCache::VisitStaticRoots(RootVisitor* visitor) {
682   // Visit the primitive types, this is required since if there are no active verifiers they wont
683   // be in the entries array, and therefore not visited as roots.
684   if (primitive_initialized_) {
685     RootInfo ri(kRootUnknown);
686     UndefinedType::GetInstance()->VisitRoots(visitor, ri);
687     ConflictType::GetInstance()->VisitRoots(visitor, ri);
688     BooleanType::GetInstance()->VisitRoots(visitor, ri);
689     ByteType::GetInstance()->VisitRoots(visitor, ri);
690     ShortType::GetInstance()->VisitRoots(visitor, ri);
691     CharType::GetInstance()->VisitRoots(visitor, ri);
692     IntegerType::GetInstance()->VisitRoots(visitor, ri);
693     LongLoType::GetInstance()->VisitRoots(visitor, ri);
694     LongHiType::GetInstance()->VisitRoots(visitor, ri);
695     FloatType::GetInstance()->VisitRoots(visitor, ri);
696     DoubleLoType::GetInstance()->VisitRoots(visitor, ri);
697     DoubleHiType::GetInstance()->VisitRoots(visitor, ri);
698     for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
699       small_precise_constants_[value - kMinSmallConstant]->VisitRoots(visitor, ri);
700     }
701   }
702 }
703 
VisitRoots(RootVisitor * visitor,const RootInfo & root_info)704 void RegTypeCache::VisitRoots(RootVisitor* visitor, const RootInfo& root_info) {
705   // Exclude the static roots that are visited by VisitStaticRoots().
706   for (size_t i = primitive_count_; i < entries_.size(); ++i) {
707     entries_[i]->VisitRoots(visitor, root_info);
708   }
709   for (auto& pair : klass_entries_) {
710     GcRoot<mirror::Class>& root = pair.first;
711     root.VisitRoot(visitor, root_info);
712   }
713 }
714 
715 }  // namespace verifier
716 }  // namespace art
717