1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "reg_type_cache-inl.h"
18
19 #include <type_traits>
20
21 #include "base/aborting.h"
22 #include "base/arena_bit_vector.h"
23 #include "base/bit_vector-inl.h"
24 #include "base/casts.h"
25 #include "base/scoped_arena_allocator.h"
26 #include "base/stl_util.h"
27 #include "class_linker-inl.h"
28 #include "class_root-inl.h"
29 #include "dex/descriptors_names.h"
30 #include "dex/dex_file-inl.h"
31 #include "mirror/class-inl.h"
32 #include "mirror/object-inl.h"
33 #include "reg_type-inl.h"
34
35 namespace art HIDDEN {
36 namespace verifier {
37
MatchingPrecisionForClass(const RegType * entry,bool precise)38 ALWAYS_INLINE static inline bool MatchingPrecisionForClass(const RegType* entry, bool precise)
39 REQUIRES_SHARED(Locks::mutator_lock_) {
40 if (entry->IsPreciseReference() == precise) {
41 // We were or weren't looking for a precise reference and we found what we need.
42 return true;
43 } else {
44 if (!precise && entry->GetClass()->CannotBeAssignedFromOtherTypes()) {
45 // We weren't looking for a precise reference, as we're looking up based on a descriptor, but
46 // we found a matching entry based on the descriptor. Return the precise entry in that case.
47 return true;
48 }
49 return false;
50 }
51 }
52
FillPrimitiveAndSmallConstantTypes()53 void RegTypeCache::FillPrimitiveAndSmallConstantTypes() {
54 entries_.resize(kNumPrimitivesAndSmallConstants);
55 for (int32_t value = kMinSmallConstant; value <= kMaxSmallConstant; ++value) {
56 int32_t i = value - kMinSmallConstant;
57 entries_[i] = new (&allocator_) PreciseConstType(null_handle_, value, i);
58 }
59
60 #define CREATE_PRIMITIVE_TYPE(type, class_root, descriptor, id) \
61 entries_[id] = new (&allocator_) type( \
62 handles_.NewHandle(GetClassRoot(class_root, class_linker_)), \
63 descriptor, \
64 id); \
65
66 CREATE_PRIMITIVE_TYPE(BooleanType, ClassRoot::kPrimitiveBoolean, "Z", kBooleanCacheId);
67 CREATE_PRIMITIVE_TYPE(ByteType, ClassRoot::kPrimitiveByte, "B", kByteCacheId);
68 CREATE_PRIMITIVE_TYPE(ShortType, ClassRoot::kPrimitiveShort, "S", kShortCacheId);
69 CREATE_PRIMITIVE_TYPE(CharType, ClassRoot::kPrimitiveChar, "C", kCharCacheId);
70 CREATE_PRIMITIVE_TYPE(IntegerType, ClassRoot::kPrimitiveInt, "I", kIntCacheId);
71 CREATE_PRIMITIVE_TYPE(LongLoType, ClassRoot::kPrimitiveLong, "J", kLongLoCacheId);
72 CREATE_PRIMITIVE_TYPE(LongHiType, ClassRoot::kPrimitiveLong, "J", kLongHiCacheId);
73 CREATE_PRIMITIVE_TYPE(FloatType, ClassRoot::kPrimitiveFloat, "F", kFloatCacheId);
74 CREATE_PRIMITIVE_TYPE(DoubleLoType, ClassRoot::kPrimitiveDouble, "D", kDoubleLoCacheId);
75 CREATE_PRIMITIVE_TYPE(DoubleHiType, ClassRoot::kPrimitiveDouble, "D", kDoubleHiCacheId);
76
77 #undef CREATE_PRIMITIVE_TYPE
78
79 entries_[kUndefinedCacheId] =
80 new (&allocator_) UndefinedType(null_handle_, "", kUndefinedCacheId);
81 entries_[kConflictCacheId] =
82 new (&allocator_) ConflictType(null_handle_, "", kConflictCacheId);
83 entries_[kNullCacheId] =
84 new (&allocator_) NullType(null_handle_, "", kNullCacheId);
85 }
86
FromDescriptor(Handle<mirror::ClassLoader> loader,const char * descriptor)87 const RegType& RegTypeCache::FromDescriptor(Handle<mirror::ClassLoader> loader,
88 const char* descriptor) {
89 if (descriptor[1] == '\0') {
90 switch (descriptor[0]) {
91 case 'Z':
92 return Boolean();
93 case 'B':
94 return Byte();
95 case 'S':
96 return Short();
97 case 'C':
98 return Char();
99 case 'I':
100 return Integer();
101 case 'J':
102 return LongLo();
103 case 'F':
104 return Float();
105 case 'D':
106 return DoubleLo();
107 case 'V': // For void types, conflict types.
108 default:
109 return Conflict();
110 }
111 } else if (descriptor[0] == 'L' || descriptor[0] == '[') {
112 return From(loader, descriptor);
113 } else {
114 return Conflict();
115 }
116 }
117
118
RegTypeFromPrimitiveType(Primitive::Type prim_type) const119 const RegType& RegTypeCache::RegTypeFromPrimitiveType(Primitive::Type prim_type) const {
120 switch (prim_type) {
121 case Primitive::kPrimBoolean:
122 return *entries_[kBooleanCacheId];
123 case Primitive::kPrimByte:
124 return *entries_[kByteCacheId];
125 case Primitive::kPrimShort:
126 return *entries_[kShortCacheId];
127 case Primitive::kPrimChar:
128 return *entries_[kCharCacheId];
129 case Primitive::kPrimInt:
130 return *entries_[kIntCacheId];
131 case Primitive::kPrimLong:
132 return *entries_[kLongLoCacheId];
133 case Primitive::kPrimFloat:
134 return *entries_[kFloatCacheId];
135 case Primitive::kPrimDouble:
136 return *entries_[kDoubleLoCacheId];
137 case Primitive::kPrimVoid:
138 default:
139 return *entries_[kConflictCacheId];
140 }
141 }
142
MatchDescriptor(size_t idx,const std::string_view & descriptor,bool precise)143 bool RegTypeCache::MatchDescriptor(size_t idx, const std::string_view& descriptor, bool precise) {
144 const RegType* entry = entries_[idx];
145 if (descriptor != entry->descriptor_) {
146 return false;
147 }
148 if (entry->HasClass()) {
149 return MatchingPrecisionForClass(entry, precise);
150 }
151 // There is no notion of precise unresolved references, the precise information is just dropped
152 // on the floor.
153 DCHECK(entry->IsUnresolvedReference());
154 return true;
155 }
156
ResolveClass(const char * descriptor,Handle<mirror::ClassLoader> loader)157 ObjPtr<mirror::Class> RegTypeCache::ResolveClass(const char* descriptor,
158 Handle<mirror::ClassLoader> loader) {
159 // Class was not found, must create new type.
160 // Try resolving class
161 Thread* self = Thread::Current();
162 ObjPtr<mirror::Class> klass = nullptr;
163 if (can_load_classes_) {
164 klass = class_linker_->FindClass(self, descriptor, loader);
165 } else {
166 klass = class_linker_->LookupClass(self, descriptor, loader.Get());
167 if (klass != nullptr && !klass->IsResolved()) {
168 // We found the class but without it being loaded its not safe for use.
169 klass = nullptr;
170 }
171 }
172 return klass;
173 }
174
AddString(const std::string_view & str)175 std::string_view RegTypeCache::AddString(const std::string_view& str) {
176 char* ptr = allocator_.AllocArray<char>(str.length());
177 memcpy(ptr, str.data(), str.length());
178 return std::string_view(ptr, str.length());
179 }
180
From(Handle<mirror::ClassLoader> loader,const char * descriptor)181 const RegType& RegTypeCache::From(Handle<mirror::ClassLoader> loader, const char* descriptor) {
182 std::string_view sv_descriptor(descriptor);
183 // Try looking up the class in the cache first. We use a std::string_view to avoid
184 // repeated strlen operations on the descriptor.
185 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
186 if (MatchDescriptor(i, sv_descriptor, /* precise= */ false)) {
187 return *(entries_[i]);
188 }
189 }
190 // Class not found in the cache, will create a new type for that.
191 // Try resolving class.
192 ObjPtr<mirror::Class> klass = ResolveClass(descriptor, loader);
193 if (klass != nullptr) {
194 // Create a precise type if the class cannot be assigned from other types
195 // (final classes, arrays of final classes and primitive arrays, see
196 // `Class::CannotBeAssignedFromOtherTypes()`; primitive types should not
197 // reach this code).
198 DCHECK(!klass->IsPrimitive());
199 RegType* entry;
200 // Create an imprecise type if we can't tell for a fact that it is precise.
201 if (klass->CannotBeAssignedFromOtherTypes()) {
202 DCHECK_IMPLIES(klass->IsAbstract(), klass->IsArrayClass());
203 DCHECK(!klass->IsInterface());
204 entry = new (&allocator_) PreciseReferenceType(handles_.NewHandle(klass),
205 AddString(sv_descriptor),
206 entries_.size());
207 } else {
208 entry = new (&allocator_) ReferenceType(handles_.NewHandle(klass),
209 AddString(sv_descriptor),
210 entries_.size());
211 }
212 return AddEntry(entry);
213 } else { // Class not resolved.
214 // We tried loading the class and failed, this might get an exception raised
215 // so we want to clear it before we go on.
216 if (can_load_classes_) {
217 DCHECK(Thread::Current()->IsExceptionPending());
218 Thread::Current()->ClearException();
219 } else {
220 DCHECK(!Thread::Current()->IsExceptionPending());
221 }
222 if (IsValidDescriptor(descriptor)) {
223 return AddEntry(new (&allocator_) UnresolvedReferenceType(null_handle_,
224 AddString(sv_descriptor),
225 entries_.size()));
226 } else {
227 // The descriptor is broken return the unknown type as there's nothing sensible that
228 // could be done at runtime
229 return Conflict();
230 }
231 }
232 }
233
MakeUnresolvedReference()234 const RegType& RegTypeCache::MakeUnresolvedReference() {
235 // The descriptor is intentionally invalid so nothing else will match this type.
236 return AddEntry(new (&allocator_) UnresolvedReferenceType(
237 null_handle_, AddString("a"), entries_.size()));
238 }
239
FindClass(ObjPtr<mirror::Class> klass,bool precise) const240 const RegType* RegTypeCache::FindClass(ObjPtr<mirror::Class> klass, bool precise) const {
241 DCHECK(klass != nullptr);
242 if (klass->IsPrimitive()) {
243 // Note: precise isn't used for primitive classes. A char is assignable to an int. All
244 // primitive classes are final.
245 return &RegTypeFromPrimitiveType(klass->GetPrimitiveType());
246 }
247 for (auto& pair : klass_entries_) {
248 const Handle<mirror::Class> reg_klass = pair.first;
249 if (reg_klass.Get() == klass) {
250 const RegType* reg_type = pair.second;
251 if (MatchingPrecisionForClass(reg_type, precise)) {
252 return reg_type;
253 }
254 }
255 }
256 return nullptr;
257 }
258
InsertClass(const std::string_view & descriptor,ObjPtr<mirror::Class> klass,bool precise)259 const RegType* RegTypeCache::InsertClass(const std::string_view& descriptor,
260 ObjPtr<mirror::Class> klass,
261 bool precise) {
262 // No reference to the class was found, create new reference.
263 DCHECK(FindClass(klass, precise) == nullptr);
264 RegType* const reg_type = precise
265 ? static_cast<RegType*>(
266 new (&allocator_) PreciseReferenceType(handles_.NewHandle(klass),
267 descriptor,
268 entries_.size()))
269 : new (&allocator_) ReferenceType(handles_.NewHandle(klass), descriptor, entries_.size());
270 return &AddEntry(reg_type);
271 }
272
FromClass(const char * descriptor,ObjPtr<mirror::Class> klass,bool precise)273 const RegType& RegTypeCache::FromClass(const char* descriptor,
274 ObjPtr<mirror::Class> klass,
275 bool precise) {
276 DCHECK(klass != nullptr);
277 const RegType* reg_type = FindClass(klass, precise);
278 if (reg_type == nullptr) {
279 reg_type = InsertClass(AddString(std::string_view(descriptor)), klass, precise);
280 }
281 return *reg_type;
282 }
283
RegTypeCache(Thread * self,ClassLinker * class_linker,bool can_load_classes,ScopedArenaAllocator & allocator,bool can_suspend)284 RegTypeCache::RegTypeCache(Thread* self,
285 ClassLinker* class_linker,
286 bool can_load_classes,
287 ScopedArenaAllocator& allocator,
288 bool can_suspend)
289 : entries_(allocator.Adapter(kArenaAllocVerifier)),
290 klass_entries_(allocator.Adapter(kArenaAllocVerifier)),
291 allocator_(allocator),
292 handles_(self),
293 class_linker_(class_linker),
294 can_load_classes_(can_load_classes) {
295 DCHECK(can_suspend || !can_load_classes) << "Cannot load classes if suspension is disabled!";
296 if (kIsDebugBuild && can_suspend) {
297 Thread::Current()->AssertThreadSuspensionIsAllowable(gAborting == 0);
298 }
299 // The klass_entries_ array does not have primitives or small constants.
300 static constexpr size_t kNumReserveEntries = 32;
301 klass_entries_.reserve(kNumReserveEntries);
302 // We want to have room for additional entries after inserting primitives and small
303 // constants.
304 entries_.reserve(kNumReserveEntries + kNumPrimitivesAndSmallConstants);
305 FillPrimitiveAndSmallConstantTypes();
306 }
307
FromUnresolvedMerge(const RegType & left,const RegType & right,MethodVerifier * verifier)308 const RegType& RegTypeCache::FromUnresolvedMerge(const RegType& left,
309 const RegType& right,
310 MethodVerifier* verifier) {
311 ArenaBitVector types(&allocator_,
312 kDefaultArenaBitVectorBytes * kBitsPerByte, // Allocate at least 8 bytes.
313 true); // Is expandable.
314 const RegType* left_resolved;
315 bool left_unresolved_is_array;
316 if (left.IsUnresolvedMergedReference()) {
317 const UnresolvedMergedType& left_merge = *down_cast<const UnresolvedMergedType*>(&left);
318
319 types.Copy(&left_merge.GetUnresolvedTypes());
320 left_resolved = &left_merge.GetResolvedPart();
321 left_unresolved_is_array = left.IsArrayTypes();
322 } else if (left.IsUnresolvedTypes()) {
323 types.SetBit(left.GetId());
324 left_resolved = &Zero();
325 left_unresolved_is_array = left.IsArrayTypes();
326 } else {
327 left_resolved = &left;
328 left_unresolved_is_array = false;
329 }
330
331 const RegType* right_resolved;
332 bool right_unresolved_is_array;
333 if (right.IsUnresolvedMergedReference()) {
334 const UnresolvedMergedType& right_merge = *down_cast<const UnresolvedMergedType*>(&right);
335
336 types.Union(&right_merge.GetUnresolvedTypes());
337 right_resolved = &right_merge.GetResolvedPart();
338 right_unresolved_is_array = right.IsArrayTypes();
339 } else if (right.IsUnresolvedTypes()) {
340 types.SetBit(right.GetId());
341 right_resolved = &Zero();
342 right_unresolved_is_array = right.IsArrayTypes();
343 } else {
344 right_resolved = &right;
345 right_unresolved_is_array = false;
346 }
347
348 // Merge the resolved parts. Left and right might be equal, so use SafeMerge.
349 const RegType& resolved_parts_merged = left_resolved->SafeMerge(*right_resolved, this, verifier);
350 // If we get a conflict here, the merge result is a conflict, not an unresolved merge type.
351 if (resolved_parts_merged.IsConflict()) {
352 return Conflict();
353 }
354 if (resolved_parts_merged.IsJavaLangObject()) {
355 return resolved_parts_merged;
356 }
357
358 bool resolved_merged_is_array = resolved_parts_merged.IsArrayTypes();
359 if (left_unresolved_is_array || right_unresolved_is_array || resolved_merged_is_array) {
360 // Arrays involved, see if we need to merge to Object.
361
362 // Is the resolved part a primitive array?
363 if (resolved_merged_is_array && !resolved_parts_merged.IsObjectArrayTypes()) {
364 return JavaLangObject(/* precise= */ false);
365 }
366
367 // Is any part not an array (but exists)?
368 if ((!left_unresolved_is_array && left_resolved != &left) ||
369 (!right_unresolved_is_array && right_resolved != &right) ||
370 !resolved_merged_is_array) {
371 return JavaLangObject(/* precise= */ false);
372 }
373 }
374
375 // Check if entry already exists.
376 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
377 const RegType* cur_entry = entries_[i];
378 if (cur_entry->IsUnresolvedMergedReference()) {
379 const UnresolvedMergedType* cmp_type = down_cast<const UnresolvedMergedType*>(cur_entry);
380 const RegType& resolved_part = cmp_type->GetResolvedPart();
381 const BitVector& unresolved_part = cmp_type->GetUnresolvedTypes();
382 // Use SameBitsSet. "types" is expandable to allow merging in the components, but the
383 // BitVector in the final RegType will be made non-expandable.
384 if (&resolved_part == &resolved_parts_merged && types.SameBitsSet(&unresolved_part)) {
385 return *cur_entry;
386 }
387 }
388 }
389 return AddEntry(new (&allocator_) UnresolvedMergedType(resolved_parts_merged,
390 types,
391 this,
392 entries_.size()));
393 }
394
FromUnresolvedSuperClass(const RegType & child)395 const RegType& RegTypeCache::FromUnresolvedSuperClass(const RegType& child) {
396 // Check if entry already exists.
397 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
398 const RegType* cur_entry = entries_[i];
399 if (cur_entry->IsUnresolvedSuperClass()) {
400 const UnresolvedSuperClass* tmp_entry =
401 down_cast<const UnresolvedSuperClass*>(cur_entry);
402 uint16_t unresolved_super_child_id =
403 tmp_entry->GetUnresolvedSuperClassChildId();
404 if (unresolved_super_child_id == child.GetId()) {
405 return *cur_entry;
406 }
407 }
408 }
409 return AddEntry(new (&allocator_) UnresolvedSuperClass(
410 null_handle_, child.GetId(), this, entries_.size()));
411 }
412
Uninitialized(const RegType & type,uint32_t allocation_pc)413 const UninitializedType& RegTypeCache::Uninitialized(const RegType& type, uint32_t allocation_pc) {
414 UninitializedType* entry = nullptr;
415 const std::string_view& descriptor(type.GetDescriptor());
416 if (type.IsUnresolvedTypes()) {
417 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
418 const RegType* cur_entry = entries_[i];
419 if (cur_entry->IsUnresolvedAndUninitializedReference() &&
420 down_cast<const UnresolvedUninitializedRefType*>(cur_entry)->GetAllocationPc()
421 == allocation_pc &&
422 (cur_entry->GetDescriptor() == descriptor)) {
423 return *down_cast<const UnresolvedUninitializedRefType*>(cur_entry);
424 }
425 }
426 entry = new (&allocator_) UnresolvedUninitializedRefType(null_handle_,
427 descriptor,
428 allocation_pc,
429 entries_.size());
430 } else {
431 ObjPtr<mirror::Class> klass = type.GetClass();
432 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
433 const RegType* cur_entry = entries_[i];
434 if (cur_entry->IsUninitializedReference() &&
435 down_cast<const UninitializedReferenceType*>(cur_entry)
436 ->GetAllocationPc() == allocation_pc &&
437 cur_entry->GetClass() == klass) {
438 return *down_cast<const UninitializedReferenceType*>(cur_entry);
439 }
440 }
441 entry = new (&allocator_) UninitializedReferenceType(handles_.NewHandle(klass),
442 descriptor,
443 allocation_pc,
444 entries_.size());
445 }
446 return AddEntry(entry);
447 }
448
FromUninitialized(const RegType & uninit_type)449 const RegType& RegTypeCache::FromUninitialized(const RegType& uninit_type) {
450 RegType* entry;
451
452 if (uninit_type.IsUnresolvedTypes()) {
453 const std::string_view& descriptor(uninit_type.GetDescriptor());
454 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
455 const RegType* cur_entry = entries_[i];
456 if (cur_entry->IsUnresolvedReference() &&
457 cur_entry->GetDescriptor() == descriptor) {
458 return *cur_entry;
459 }
460 }
461 entry = new (&allocator_) UnresolvedReferenceType(null_handle_, descriptor, entries_.size());
462 } else {
463 ObjPtr<mirror::Class> klass = uninit_type.GetClass();
464 if (uninit_type.IsUninitializedThisReference() && !klass->IsFinal()) {
465 // For uninitialized "this reference" look for reference types that are not precise.
466 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
467 const RegType* cur_entry = entries_[i];
468 if (cur_entry->IsReference() && cur_entry->GetClass() == klass) {
469 return *cur_entry;
470 }
471 }
472 entry = new (&allocator_) ReferenceType(handles_.NewHandle(klass), "", entries_.size());
473 } else if (!klass->IsPrimitive()) {
474 // We're uninitialized because of allocation, look or create a precise type as allocations
475 // may only create objects of that type.
476 // Note: we do not check whether the given klass is actually instantiable (besides being
477 // primitive), that is, we allow interfaces and abstract classes here. The reasoning is
478 // twofold:
479 // 1) The "new-instance" instruction to generate the uninitialized type will already
480 // queue an instantiation error. This is a soft error that must be thrown at runtime,
481 // and could potentially change if the class is resolved differently at runtime.
482 // 2) Checking whether the klass is instantiable and using conflict may produce a hard
483 // error when the value is used, which leads to a VerifyError, which is not the
484 // correct semantics.
485 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
486 const RegType* cur_entry = entries_[i];
487 if (cur_entry->IsPreciseReference() && cur_entry->GetClass() == klass) {
488 return *cur_entry;
489 }
490 }
491 entry = new (&allocator_) PreciseReferenceType(handles_.NewHandle(klass),
492 uninit_type.GetDescriptor(),
493 entries_.size());
494 } else {
495 return Conflict();
496 }
497 }
498 return AddEntry(entry);
499 }
500
UninitializedThisArgument(const RegType & type)501 const UninitializedType& RegTypeCache::UninitializedThisArgument(const RegType& type) {
502 UninitializedType* entry;
503 const std::string_view& descriptor(type.GetDescriptor());
504 if (type.IsUnresolvedTypes()) {
505 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
506 const RegType* cur_entry = entries_[i];
507 if (cur_entry->IsUnresolvedAndUninitializedThisReference() &&
508 cur_entry->GetDescriptor() == descriptor) {
509 return *down_cast<const UninitializedType*>(cur_entry);
510 }
511 }
512 entry = new (&allocator_) UnresolvedUninitializedThisRefType(
513 null_handle_, descriptor, entries_.size());
514 } else {
515 ObjPtr<mirror::Class> klass = type.GetClass();
516 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
517 const RegType* cur_entry = entries_[i];
518 if (cur_entry->IsUninitializedThisReference() && cur_entry->GetClass() == klass) {
519 return *down_cast<const UninitializedType*>(cur_entry);
520 }
521 }
522 entry = new (&allocator_) UninitializedThisReferenceType(handles_.NewHandle(klass),
523 descriptor,
524 entries_.size());
525 }
526 return AddEntry(entry);
527 }
528
FromCat1NonSmallConstant(int32_t value,bool precise)529 const ConstantType& RegTypeCache::FromCat1NonSmallConstant(int32_t value, bool precise) {
530 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
531 const RegType* cur_entry = entries_[i];
532 if (!cur_entry->HasClass() && cur_entry->IsConstant() &&
533 cur_entry->IsPreciseConstant() == precise &&
534 (down_cast<const ConstantType*>(cur_entry))->ConstantValue() == value) {
535 return *down_cast<const ConstantType*>(cur_entry);
536 }
537 }
538 ConstantType* entry;
539 if (precise) {
540 entry = new (&allocator_) PreciseConstType(null_handle_, value, entries_.size());
541 } else {
542 entry = new (&allocator_) ImpreciseConstType(null_handle_, value, entries_.size());
543 }
544 return AddEntry(entry);
545 }
546
FromCat2ConstLo(int32_t value,bool precise)547 const ConstantType& RegTypeCache::FromCat2ConstLo(int32_t value, bool precise) {
548 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
549 const RegType* cur_entry = entries_[i];
550 if (cur_entry->IsConstantLo() && (cur_entry->IsPrecise() == precise) &&
551 (down_cast<const ConstantType*>(cur_entry))->ConstantValueLo() == value) {
552 return *down_cast<const ConstantType*>(cur_entry);
553 }
554 }
555 ConstantType* entry;
556 if (precise) {
557 entry = new (&allocator_) PreciseConstLoType(null_handle_, value, entries_.size());
558 } else {
559 entry = new (&allocator_) ImpreciseConstLoType(null_handle_, value, entries_.size());
560 }
561 return AddEntry(entry);
562 }
563
FromCat2ConstHi(int32_t value,bool precise)564 const ConstantType& RegTypeCache::FromCat2ConstHi(int32_t value, bool precise) {
565 for (size_t i = kNumPrimitivesAndSmallConstants; i < entries_.size(); i++) {
566 const RegType* cur_entry = entries_[i];
567 if (cur_entry->IsConstantHi() && (cur_entry->IsPrecise() == precise) &&
568 (down_cast<const ConstantType*>(cur_entry))->ConstantValueHi() == value) {
569 return *down_cast<const ConstantType*>(cur_entry);
570 }
571 }
572 ConstantType* entry;
573 if (precise) {
574 entry = new (&allocator_) PreciseConstHiType(null_handle_, value, entries_.size());
575 } else {
576 entry = new (&allocator_) ImpreciseConstHiType(null_handle_, value, entries_.size());
577 }
578 return AddEntry(entry);
579 }
580
GetComponentType(const RegType & array,Handle<mirror::ClassLoader> loader)581 const RegType& RegTypeCache::GetComponentType(const RegType& array,
582 Handle<mirror::ClassLoader> loader) {
583 if (!array.IsArrayTypes()) {
584 return Conflict();
585 } else if (array.IsUnresolvedTypes()) {
586 DCHECK(!array.IsUnresolvedMergedReference()); // Caller must make sure not to ask for this.
587 const std::string descriptor(array.GetDescriptor());
588 return FromDescriptor(loader, descriptor.c_str() + 1);
589 } else {
590 ObjPtr<mirror::Class> klass = array.GetClass()->GetComponentType();
591 std::string temp;
592 const char* descriptor = klass->GetDescriptor(&temp);
593 if (klass->IsErroneous()) {
594 // Arrays may have erroneous component types, use unresolved in that case.
595 // We assume that the primitive classes are not erroneous, so we know it is a
596 // reference type.
597 return FromDescriptor(loader, descriptor);
598 } else {
599 return FromClass(descriptor, klass, klass->CannotBeAssignedFromOtherTypes());
600 }
601 }
602 }
603
Dump(std::ostream & os)604 void RegTypeCache::Dump(std::ostream& os) {
605 for (size_t i = 0; i < entries_.size(); i++) {
606 const RegType* cur_entry = entries_[i];
607 if (cur_entry != nullptr) {
608 os << i << ": " << cur_entry->Dump() << "\n";
609 }
610 }
611 }
612
613 } // namespace verifier
614 } // namespace art
615