1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "reg_type.h"
18 
19 #include <set>
20 
21 #include "base/bit_vector.h"
22 #include "base/casts.h"
23 #include "base/scoped_arena_allocator.h"
24 #include "common_runtime_test.h"
25 #include "compiler_callbacks.h"
26 #include "reg_type-inl.h"
27 #include "reg_type_cache-inl.h"
28 #include "scoped_thread_state_change-inl.h"
29 #include "thread-current-inl.h"
30 
31 namespace art HIDDEN {
32 namespace verifier {
33 
34 class RegTypeTest : public CommonRuntimeTest {
35  protected:
RegTypeTest()36   RegTypeTest() {
37     use_boot_image_ = true;  // Make the Runtime creation cheaper.
38   }
39 
PreciseJavaLangObjectFromDescriptor(RegTypeCache * cache,Handle<mirror::ClassLoader> loader)40   static const RegType& PreciseJavaLangObjectFromDescriptor(RegTypeCache* cache,
41                                                             Handle<mirror::ClassLoader> loader)
42       REQUIRES_SHARED(Locks::mutator_lock_) {
43     // To create a precise `java.lang.Object` reference from a descriptor, go through
44     // `Uninitialized()` and `FromUninitialized()` as we would for `new Object()`.
45     const RegType& imprecise_obj = cache->FromDescriptor(loader, "Ljava/lang/Object;");
46     CHECK(!imprecise_obj.IsPreciseReference());
47     const RegType& precise_obj =
48         cache->FromUninitialized(cache->Uninitialized(imprecise_obj, /* allocation_pc= */ 0u));
49     CHECK(precise_obj.IsPreciseReference());
50     return precise_obj;
51   }
52 };
53 
TEST_F(RegTypeTest,ConstLoHi)54 TEST_F(RegTypeTest, ConstLoHi) {
55   // Tests creating primitive types types.
56   ArenaStack stack(Runtime::Current()->GetArenaPool());
57   ScopedArenaAllocator allocator(&stack);
58   ScopedObjectAccess soa(Thread::Current());
59   RegTypeCache cache(
60       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
61   const RegType& ref_type_const_0 = cache.FromCat1Const(10, true);
62   const RegType& ref_type_const_1 = cache.FromCat1Const(10, true);
63   const RegType& ref_type_const_2 = cache.FromCat1Const(30, true);
64   const RegType& ref_type_const_3 = cache.FromCat1Const(30, false);
65   EXPECT_TRUE(ref_type_const_0.Equals(ref_type_const_1));
66   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_2));
67   EXPECT_FALSE(ref_type_const_0.Equals(ref_type_const_3));
68 
69   const RegType& ref_type_const_wide_0 = cache.FromCat2ConstHi(50, true);
70   const RegType& ref_type_const_wide_1 = cache.FromCat2ConstHi(50, true);
71   EXPECT_TRUE(ref_type_const_wide_0.Equals(ref_type_const_wide_1));
72 
73   const RegType& ref_type_const_wide_2 = cache.FromCat2ConstLo(50, true);
74   const RegType& ref_type_const_wide_3 = cache.FromCat2ConstLo(50, true);
75   const RegType& ref_type_const_wide_4 = cache.FromCat2ConstLo(55, true);
76   EXPECT_TRUE(ref_type_const_wide_2.Equals(ref_type_const_wide_3));
77   EXPECT_FALSE(ref_type_const_wide_2.Equals(ref_type_const_wide_4));
78 }
79 
TEST_F(RegTypeTest,Pairs)80 TEST_F(RegTypeTest, Pairs) {
81   ArenaStack stack(Runtime::Current()->GetArenaPool());
82   ScopedArenaAllocator allocator(&stack);
83   ScopedObjectAccess soa(Thread::Current());
84   RegTypeCache cache(
85       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
86   int64_t val = static_cast<int32_t>(1234);
87   const RegType& precise_lo = cache.FromCat2ConstLo(static_cast<int32_t>(val), true);
88   const RegType& precise_hi = cache.FromCat2ConstHi(static_cast<int32_t>(val >> 32), true);
89   const RegType& precise_const = cache.FromCat1Const(static_cast<int32_t>(val >> 32), true);
90   const RegType& long_lo = cache.LongLo();
91   const RegType& long_hi = cache.LongHi();
92   // Check the expectations for types.
93   EXPECT_TRUE(precise_lo.IsLowHalf());
94   EXPECT_FALSE(precise_hi.IsLowHalf());
95   EXPECT_FALSE(precise_lo.IsHighHalf());
96   EXPECT_TRUE(precise_hi.IsHighHalf());
97   EXPECT_TRUE(long_hi.IsLongHighTypes());
98   EXPECT_TRUE(precise_hi.IsLongHighTypes());
99   // Check Pairing.
100   EXPECT_FALSE(precise_lo.CheckWidePair(precise_const));
101   EXPECT_TRUE(precise_lo.CheckWidePair(precise_hi));
102   // Test Merging.
103   EXPECT_TRUE((long_lo.Merge(precise_lo, &cache, /* verifier= */ nullptr)).IsLongTypes());
104   EXPECT_TRUE((long_hi.Merge(precise_hi, &cache, /* verifier= */ nullptr)).IsLongHighTypes());
105 }
106 
TEST_F(RegTypeTest,Primitives)107 TEST_F(RegTypeTest, Primitives) {
108   ArenaStack stack(Runtime::Current()->GetArenaPool());
109   ScopedArenaAllocator allocator(&stack);
110   ScopedObjectAccess soa(Thread::Current());
111   RegTypeCache cache(
112       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
113 
114   const RegType& bool_reg_type = cache.Boolean();
115   EXPECT_FALSE(bool_reg_type.IsUndefined());
116   EXPECT_FALSE(bool_reg_type.IsConflict());
117   EXPECT_FALSE(bool_reg_type.IsZero());
118   EXPECT_FALSE(bool_reg_type.IsOne());
119   EXPECT_FALSE(bool_reg_type.IsLongConstant());
120   EXPECT_TRUE(bool_reg_type.IsBoolean());
121   EXPECT_FALSE(bool_reg_type.IsByte());
122   EXPECT_FALSE(bool_reg_type.IsChar());
123   EXPECT_FALSE(bool_reg_type.IsShort());
124   EXPECT_FALSE(bool_reg_type.IsInteger());
125   EXPECT_FALSE(bool_reg_type.IsLong());
126   EXPECT_FALSE(bool_reg_type.IsFloat());
127   EXPECT_FALSE(bool_reg_type.IsDouble());
128   EXPECT_FALSE(bool_reg_type.IsReference());
129   EXPECT_FALSE(bool_reg_type.IsLowHalf());
130   EXPECT_FALSE(bool_reg_type.IsHighHalf());
131   EXPECT_FALSE(bool_reg_type.IsLongOrDoubleTypes());
132   EXPECT_FALSE(bool_reg_type.IsReferenceTypes());
133   EXPECT_TRUE(bool_reg_type.IsCategory1Types());
134   EXPECT_FALSE(bool_reg_type.IsCategory2Types());
135   EXPECT_TRUE(bool_reg_type.IsBooleanTypes());
136   EXPECT_TRUE(bool_reg_type.IsByteTypes());
137   EXPECT_TRUE(bool_reg_type.IsShortTypes());
138   EXPECT_TRUE(bool_reg_type.IsCharTypes());
139   EXPECT_TRUE(bool_reg_type.IsIntegralTypes());
140   EXPECT_FALSE(bool_reg_type.IsFloatTypes());
141   EXPECT_FALSE(bool_reg_type.IsLongTypes());
142   EXPECT_FALSE(bool_reg_type.IsDoubleTypes());
143   EXPECT_TRUE(bool_reg_type.IsArrayIndexTypes());
144   EXPECT_FALSE(bool_reg_type.IsNonZeroReferenceTypes());
145   EXPECT_TRUE(bool_reg_type.HasClass());
146 
147   const RegType& byte_reg_type = cache.Byte();
148   EXPECT_FALSE(byte_reg_type.IsUndefined());
149   EXPECT_FALSE(byte_reg_type.IsConflict());
150   EXPECT_FALSE(byte_reg_type.IsZero());
151   EXPECT_FALSE(byte_reg_type.IsOne());
152   EXPECT_FALSE(byte_reg_type.IsLongConstant());
153   EXPECT_FALSE(byte_reg_type.IsBoolean());
154   EXPECT_TRUE(byte_reg_type.IsByte());
155   EXPECT_FALSE(byte_reg_type.IsChar());
156   EXPECT_FALSE(byte_reg_type.IsShort());
157   EXPECT_FALSE(byte_reg_type.IsInteger());
158   EXPECT_FALSE(byte_reg_type.IsLong());
159   EXPECT_FALSE(byte_reg_type.IsFloat());
160   EXPECT_FALSE(byte_reg_type.IsDouble());
161   EXPECT_FALSE(byte_reg_type.IsReference());
162   EXPECT_FALSE(byte_reg_type.IsLowHalf());
163   EXPECT_FALSE(byte_reg_type.IsHighHalf());
164   EXPECT_FALSE(byte_reg_type.IsLongOrDoubleTypes());
165   EXPECT_FALSE(byte_reg_type.IsReferenceTypes());
166   EXPECT_TRUE(byte_reg_type.IsCategory1Types());
167   EXPECT_FALSE(byte_reg_type.IsCategory2Types());
168   EXPECT_FALSE(byte_reg_type.IsBooleanTypes());
169   EXPECT_TRUE(byte_reg_type.IsByteTypes());
170   EXPECT_TRUE(byte_reg_type.IsShortTypes());
171   EXPECT_FALSE(byte_reg_type.IsCharTypes());
172   EXPECT_TRUE(byte_reg_type.IsIntegralTypes());
173   EXPECT_FALSE(byte_reg_type.IsFloatTypes());
174   EXPECT_FALSE(byte_reg_type.IsLongTypes());
175   EXPECT_FALSE(byte_reg_type.IsDoubleTypes());
176   EXPECT_TRUE(byte_reg_type.IsArrayIndexTypes());
177   EXPECT_FALSE(byte_reg_type.IsNonZeroReferenceTypes());
178   EXPECT_TRUE(byte_reg_type.HasClass());
179 
180   const RegType& char_reg_type = cache.Char();
181   EXPECT_FALSE(char_reg_type.IsUndefined());
182   EXPECT_FALSE(char_reg_type.IsConflict());
183   EXPECT_FALSE(char_reg_type.IsZero());
184   EXPECT_FALSE(char_reg_type.IsOne());
185   EXPECT_FALSE(char_reg_type.IsLongConstant());
186   EXPECT_FALSE(char_reg_type.IsBoolean());
187   EXPECT_FALSE(char_reg_type.IsByte());
188   EXPECT_TRUE(char_reg_type.IsChar());
189   EXPECT_FALSE(char_reg_type.IsShort());
190   EXPECT_FALSE(char_reg_type.IsInteger());
191   EXPECT_FALSE(char_reg_type.IsLong());
192   EXPECT_FALSE(char_reg_type.IsFloat());
193   EXPECT_FALSE(char_reg_type.IsDouble());
194   EXPECT_FALSE(char_reg_type.IsReference());
195   EXPECT_FALSE(char_reg_type.IsLowHalf());
196   EXPECT_FALSE(char_reg_type.IsHighHalf());
197   EXPECT_FALSE(char_reg_type.IsLongOrDoubleTypes());
198   EXPECT_FALSE(char_reg_type.IsReferenceTypes());
199   EXPECT_TRUE(char_reg_type.IsCategory1Types());
200   EXPECT_FALSE(char_reg_type.IsCategory2Types());
201   EXPECT_FALSE(char_reg_type.IsBooleanTypes());
202   EXPECT_FALSE(char_reg_type.IsByteTypes());
203   EXPECT_FALSE(char_reg_type.IsShortTypes());
204   EXPECT_TRUE(char_reg_type.IsCharTypes());
205   EXPECT_TRUE(char_reg_type.IsIntegralTypes());
206   EXPECT_FALSE(char_reg_type.IsFloatTypes());
207   EXPECT_FALSE(char_reg_type.IsLongTypes());
208   EXPECT_FALSE(char_reg_type.IsDoubleTypes());
209   EXPECT_TRUE(char_reg_type.IsArrayIndexTypes());
210   EXPECT_FALSE(char_reg_type.IsNonZeroReferenceTypes());
211   EXPECT_TRUE(char_reg_type.HasClass());
212 
213   const RegType& short_reg_type = cache.Short();
214   EXPECT_FALSE(short_reg_type.IsUndefined());
215   EXPECT_FALSE(short_reg_type.IsConflict());
216   EXPECT_FALSE(short_reg_type.IsZero());
217   EXPECT_FALSE(short_reg_type.IsOne());
218   EXPECT_FALSE(short_reg_type.IsLongConstant());
219   EXPECT_FALSE(short_reg_type.IsBoolean());
220   EXPECT_FALSE(short_reg_type.IsByte());
221   EXPECT_FALSE(short_reg_type.IsChar());
222   EXPECT_TRUE(short_reg_type.IsShort());
223   EXPECT_FALSE(short_reg_type.IsInteger());
224   EXPECT_FALSE(short_reg_type.IsLong());
225   EXPECT_FALSE(short_reg_type.IsFloat());
226   EXPECT_FALSE(short_reg_type.IsDouble());
227   EXPECT_FALSE(short_reg_type.IsReference());
228   EXPECT_FALSE(short_reg_type.IsLowHalf());
229   EXPECT_FALSE(short_reg_type.IsHighHalf());
230   EXPECT_FALSE(short_reg_type.IsLongOrDoubleTypes());
231   EXPECT_FALSE(short_reg_type.IsReferenceTypes());
232   EXPECT_TRUE(short_reg_type.IsCategory1Types());
233   EXPECT_FALSE(short_reg_type.IsCategory2Types());
234   EXPECT_FALSE(short_reg_type.IsBooleanTypes());
235   EXPECT_FALSE(short_reg_type.IsByteTypes());
236   EXPECT_TRUE(short_reg_type.IsShortTypes());
237   EXPECT_FALSE(short_reg_type.IsCharTypes());
238   EXPECT_TRUE(short_reg_type.IsIntegralTypes());
239   EXPECT_FALSE(short_reg_type.IsFloatTypes());
240   EXPECT_FALSE(short_reg_type.IsLongTypes());
241   EXPECT_FALSE(short_reg_type.IsDoubleTypes());
242   EXPECT_TRUE(short_reg_type.IsArrayIndexTypes());
243   EXPECT_FALSE(short_reg_type.IsNonZeroReferenceTypes());
244   EXPECT_TRUE(short_reg_type.HasClass());
245 
246   const RegType& int_reg_type = cache.Integer();
247   EXPECT_FALSE(int_reg_type.IsUndefined());
248   EXPECT_FALSE(int_reg_type.IsConflict());
249   EXPECT_FALSE(int_reg_type.IsZero());
250   EXPECT_FALSE(int_reg_type.IsOne());
251   EXPECT_FALSE(int_reg_type.IsLongConstant());
252   EXPECT_FALSE(int_reg_type.IsBoolean());
253   EXPECT_FALSE(int_reg_type.IsByte());
254   EXPECT_FALSE(int_reg_type.IsChar());
255   EXPECT_FALSE(int_reg_type.IsShort());
256   EXPECT_TRUE(int_reg_type.IsInteger());
257   EXPECT_FALSE(int_reg_type.IsLong());
258   EXPECT_FALSE(int_reg_type.IsFloat());
259   EXPECT_FALSE(int_reg_type.IsDouble());
260   EXPECT_FALSE(int_reg_type.IsReference());
261   EXPECT_FALSE(int_reg_type.IsLowHalf());
262   EXPECT_FALSE(int_reg_type.IsHighHalf());
263   EXPECT_FALSE(int_reg_type.IsLongOrDoubleTypes());
264   EXPECT_FALSE(int_reg_type.IsReferenceTypes());
265   EXPECT_TRUE(int_reg_type.IsCategory1Types());
266   EXPECT_FALSE(int_reg_type.IsCategory2Types());
267   EXPECT_FALSE(int_reg_type.IsBooleanTypes());
268   EXPECT_FALSE(int_reg_type.IsByteTypes());
269   EXPECT_FALSE(int_reg_type.IsShortTypes());
270   EXPECT_FALSE(int_reg_type.IsCharTypes());
271   EXPECT_TRUE(int_reg_type.IsIntegralTypes());
272   EXPECT_FALSE(int_reg_type.IsFloatTypes());
273   EXPECT_FALSE(int_reg_type.IsLongTypes());
274   EXPECT_FALSE(int_reg_type.IsDoubleTypes());
275   EXPECT_TRUE(int_reg_type.IsArrayIndexTypes());
276   EXPECT_FALSE(int_reg_type.IsNonZeroReferenceTypes());
277   EXPECT_TRUE(int_reg_type.HasClass());
278 
279   const RegType& long_reg_type = cache.LongLo();
280   EXPECT_FALSE(long_reg_type.IsUndefined());
281   EXPECT_FALSE(long_reg_type.IsConflict());
282   EXPECT_FALSE(long_reg_type.IsZero());
283   EXPECT_FALSE(long_reg_type.IsOne());
284   EXPECT_FALSE(long_reg_type.IsLongConstant());
285   EXPECT_FALSE(long_reg_type.IsBoolean());
286   EXPECT_FALSE(long_reg_type.IsByte());
287   EXPECT_FALSE(long_reg_type.IsChar());
288   EXPECT_FALSE(long_reg_type.IsShort());
289   EXPECT_FALSE(long_reg_type.IsInteger());
290   EXPECT_TRUE(long_reg_type.IsLong());
291   EXPECT_FALSE(long_reg_type.IsFloat());
292   EXPECT_FALSE(long_reg_type.IsDouble());
293   EXPECT_FALSE(long_reg_type.IsReference());
294   EXPECT_TRUE(long_reg_type.IsLowHalf());
295   EXPECT_FALSE(long_reg_type.IsHighHalf());
296   EXPECT_TRUE(long_reg_type.IsLongOrDoubleTypes());
297   EXPECT_FALSE(long_reg_type.IsReferenceTypes());
298   EXPECT_FALSE(long_reg_type.IsCategory1Types());
299   EXPECT_TRUE(long_reg_type.IsCategory2Types());
300   EXPECT_FALSE(long_reg_type.IsBooleanTypes());
301   EXPECT_FALSE(long_reg_type.IsByteTypes());
302   EXPECT_FALSE(long_reg_type.IsShortTypes());
303   EXPECT_FALSE(long_reg_type.IsCharTypes());
304   EXPECT_FALSE(long_reg_type.IsIntegralTypes());
305   EXPECT_FALSE(long_reg_type.IsFloatTypes());
306   EXPECT_TRUE(long_reg_type.IsLongTypes());
307   EXPECT_FALSE(long_reg_type.IsDoubleTypes());
308   EXPECT_FALSE(long_reg_type.IsArrayIndexTypes());
309   EXPECT_FALSE(long_reg_type.IsNonZeroReferenceTypes());
310   EXPECT_TRUE(long_reg_type.HasClass());
311 
312   const RegType& float_reg_type = cache.Float();
313   EXPECT_FALSE(float_reg_type.IsUndefined());
314   EXPECT_FALSE(float_reg_type.IsConflict());
315   EXPECT_FALSE(float_reg_type.IsZero());
316   EXPECT_FALSE(float_reg_type.IsOne());
317   EXPECT_FALSE(float_reg_type.IsLongConstant());
318   EXPECT_FALSE(float_reg_type.IsBoolean());
319   EXPECT_FALSE(float_reg_type.IsByte());
320   EXPECT_FALSE(float_reg_type.IsChar());
321   EXPECT_FALSE(float_reg_type.IsShort());
322   EXPECT_FALSE(float_reg_type.IsInteger());
323   EXPECT_FALSE(float_reg_type.IsLong());
324   EXPECT_TRUE(float_reg_type.IsFloat());
325   EXPECT_FALSE(float_reg_type.IsDouble());
326   EXPECT_FALSE(float_reg_type.IsReference());
327   EXPECT_FALSE(float_reg_type.IsLowHalf());
328   EXPECT_FALSE(float_reg_type.IsHighHalf());
329   EXPECT_FALSE(float_reg_type.IsLongOrDoubleTypes());
330   EXPECT_FALSE(float_reg_type.IsReferenceTypes());
331   EXPECT_TRUE(float_reg_type.IsCategory1Types());
332   EXPECT_FALSE(float_reg_type.IsCategory2Types());
333   EXPECT_FALSE(float_reg_type.IsBooleanTypes());
334   EXPECT_FALSE(float_reg_type.IsByteTypes());
335   EXPECT_FALSE(float_reg_type.IsShortTypes());
336   EXPECT_FALSE(float_reg_type.IsCharTypes());
337   EXPECT_FALSE(float_reg_type.IsIntegralTypes());
338   EXPECT_TRUE(float_reg_type.IsFloatTypes());
339   EXPECT_FALSE(float_reg_type.IsLongTypes());
340   EXPECT_FALSE(float_reg_type.IsDoubleTypes());
341   EXPECT_FALSE(float_reg_type.IsArrayIndexTypes());
342   EXPECT_FALSE(float_reg_type.IsNonZeroReferenceTypes());
343   EXPECT_TRUE(float_reg_type.HasClass());
344 
345   const RegType& double_reg_type = cache.DoubleLo();
346   EXPECT_FALSE(double_reg_type.IsUndefined());
347   EXPECT_FALSE(double_reg_type.IsConflict());
348   EXPECT_FALSE(double_reg_type.IsZero());
349   EXPECT_FALSE(double_reg_type.IsOne());
350   EXPECT_FALSE(double_reg_type.IsLongConstant());
351   EXPECT_FALSE(double_reg_type.IsBoolean());
352   EXPECT_FALSE(double_reg_type.IsByte());
353   EXPECT_FALSE(double_reg_type.IsChar());
354   EXPECT_FALSE(double_reg_type.IsShort());
355   EXPECT_FALSE(double_reg_type.IsInteger());
356   EXPECT_FALSE(double_reg_type.IsLong());
357   EXPECT_FALSE(double_reg_type.IsFloat());
358   EXPECT_TRUE(double_reg_type.IsDouble());
359   EXPECT_FALSE(double_reg_type.IsReference());
360   EXPECT_TRUE(double_reg_type.IsLowHalf());
361   EXPECT_FALSE(double_reg_type.IsHighHalf());
362   EXPECT_TRUE(double_reg_type.IsLongOrDoubleTypes());
363   EXPECT_FALSE(double_reg_type.IsReferenceTypes());
364   EXPECT_FALSE(double_reg_type.IsCategory1Types());
365   EXPECT_TRUE(double_reg_type.IsCategory2Types());
366   EXPECT_FALSE(double_reg_type.IsBooleanTypes());
367   EXPECT_FALSE(double_reg_type.IsByteTypes());
368   EXPECT_FALSE(double_reg_type.IsShortTypes());
369   EXPECT_FALSE(double_reg_type.IsCharTypes());
370   EXPECT_FALSE(double_reg_type.IsIntegralTypes());
371   EXPECT_FALSE(double_reg_type.IsFloatTypes());
372   EXPECT_FALSE(double_reg_type.IsLongTypes());
373   EXPECT_TRUE(double_reg_type.IsDoubleTypes());
374   EXPECT_FALSE(double_reg_type.IsArrayIndexTypes());
375   EXPECT_FALSE(double_reg_type.IsNonZeroReferenceTypes());
376   EXPECT_TRUE(double_reg_type.HasClass());
377 }
378 
379 class RegTypeReferenceTest : public RegTypeTest {};
380 
TEST_F(RegTypeReferenceTest,JavaLangObjectImprecise)381 TEST_F(RegTypeReferenceTest, JavaLangObjectImprecise) {
382   // Tests matching precisions. A reference type that was created precise doesn't
383   // match the one that is imprecise.
384   ArenaStack stack(Runtime::Current()->GetArenaPool());
385   ScopedArenaAllocator allocator(&stack);
386   ScopedObjectAccess soa(Thread::Current());
387   ScopedNullHandle<mirror::ClassLoader> loader;
388   RegTypeCache cache(
389       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
390   const RegType& imprecise_obj = cache.JavaLangObject(false);
391   const RegType& precise_obj = cache.JavaLangObject(true);
392   const RegType& precise_obj_2 = PreciseJavaLangObjectFromDescriptor(&cache, loader);
393 
394   EXPECT_TRUE(precise_obj.Equals(precise_obj_2));
395   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
396   EXPECT_FALSE(imprecise_obj.Equals(precise_obj));
397   EXPECT_FALSE(imprecise_obj.Equals(precise_obj_2));
398 }
399 
TEST_F(RegTypeReferenceTest,UnresolvedType)400 TEST_F(RegTypeReferenceTest, UnresolvedType) {
401   // Tests creating unresolved types. Miss for the first time asking the cache and
402   // a hit second time.
403   ArenaStack stack(Runtime::Current()->GetArenaPool());
404   ScopedArenaAllocator allocator(&stack);
405   ScopedObjectAccess soa(Thread::Current());
406   ScopedNullHandle<mirror::ClassLoader> loader;
407   RegTypeCache cache(
408       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
409   const RegType& ref_type_0 = cache.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
410   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
411   EXPECT_TRUE(ref_type_0.IsNonZeroReferenceTypes());
412 
413   const RegType& ref_type_1 = cache.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
414   EXPECT_TRUE(ref_type_0.Equals(ref_type_1));
415 
416   const RegType& unresolved_super_class =  cache.FromUnresolvedSuperClass(ref_type_0);
417   EXPECT_TRUE(unresolved_super_class.IsUnresolvedSuperClass());
418   EXPECT_TRUE(unresolved_super_class.IsNonZeroReferenceTypes());
419 }
420 
TEST_F(RegTypeReferenceTest,UnresolvedUnintializedType)421 TEST_F(RegTypeReferenceTest, UnresolvedUnintializedType) {
422   // Tests creating types uninitialized types from unresolved types.
423   ArenaStack stack(Runtime::Current()->GetArenaPool());
424   ScopedArenaAllocator allocator(&stack);
425   ScopedObjectAccess soa(Thread::Current());
426   ScopedNullHandle<mirror::ClassLoader> loader;
427   RegTypeCache cache(
428       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
429   const RegType& ref_type_0 = cache.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
430   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
431   const RegType& ref_type = cache.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
432   EXPECT_TRUE(ref_type_0.Equals(ref_type));
433   // Create an uninitialized type of this unresolved type
434   const RegType& unresolved_unintialised = cache.Uninitialized(ref_type, 1101ull);
435   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
436   EXPECT_TRUE(unresolved_unintialised.IsUninitializedTypes());
437   EXPECT_TRUE(unresolved_unintialised.IsNonZeroReferenceTypes());
438   // Create an uninitialized type of this unresolved type with different  PC
439   const RegType& ref_type_unresolved_unintialised_1 =  cache.Uninitialized(ref_type, 1102ull);
440   EXPECT_TRUE(unresolved_unintialised.IsUnresolvedAndUninitializedReference());
441   EXPECT_FALSE(unresolved_unintialised.Equals(ref_type_unresolved_unintialised_1));
442   // Create an uninitialized type of this unresolved type with the same PC
443   const RegType& unresolved_unintialised_2 = cache.Uninitialized(ref_type, 1101ull);
444   EXPECT_TRUE(unresolved_unintialised.Equals(unresolved_unintialised_2));
445 }
446 
TEST_F(RegTypeReferenceTest,Dump)447 TEST_F(RegTypeReferenceTest, Dump) {
448   // Tests types for proper Dump messages.
449   ArenaStack stack(Runtime::Current()->GetArenaPool());
450   ScopedArenaAllocator allocator(&stack);
451   ScopedObjectAccess soa(Thread::Current());
452   ScopedNullHandle<mirror::ClassLoader> loader;
453   RegTypeCache cache(
454       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
455   const RegType& unresolved_ref = cache.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
456   const RegType& unresolved_ref_another =
457       cache.FromDescriptor(loader, "Ljava/lang/DoesNotExistEither;");
458   const RegType& resolved_ref = cache.JavaLangString();
459   const RegType& resolved_unintialiesd = cache.Uninitialized(resolved_ref, 10);
460   const RegType& unresolved_unintialized = cache.Uninitialized(unresolved_ref, 12);
461   const RegType& unresolved_merged = cache.FromUnresolvedMerge(
462       unresolved_ref, unresolved_ref_another, /* verifier= */ nullptr);
463 
464   std::string expected = "Unresolved Reference: java.lang.DoesNotExist";
465   EXPECT_EQ(expected, unresolved_ref.Dump());
466   expected = "Precise Reference: java.lang.String";
467   EXPECT_EQ(expected, resolved_ref.Dump());
468   expected ="Uninitialized Reference: java.lang.String Allocation PC: 10";
469   EXPECT_EQ(expected, resolved_unintialiesd.Dump());
470   expected = "Unresolved And Uninitialized Reference: java.lang.DoesNotExist Allocation PC: 12";
471   EXPECT_EQ(expected, unresolved_unintialized.Dump());
472   expected = "UnresolvedMergedReferences(Zero/null | Unresolved Reference: java.lang.DoesNotExist, Unresolved Reference: java.lang.DoesNotExistEither)";
473   EXPECT_EQ(expected, unresolved_merged.Dump());
474 }
475 
TEST_F(RegTypeReferenceTest,JavalangString)476 TEST_F(RegTypeReferenceTest, JavalangString) {
477   // Add a class to the cache then look for the same class and make sure it is  a
478   // Hit the second time. Then check for the same effect when using
479   // The JavaLangObject method instead of FromDescriptor. String class is final.
480   ArenaStack stack(Runtime::Current()->GetArenaPool());
481   ScopedArenaAllocator allocator(&stack);
482   ScopedObjectAccess soa(Thread::Current());
483   ScopedNullHandle<mirror::ClassLoader> loader;
484   RegTypeCache cache(
485       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
486   const RegType& ref_type = cache.JavaLangString();
487   const RegType& ref_type_2 = cache.JavaLangString();
488   const RegType& ref_type_3 = cache.FromDescriptor(loader, "Ljava/lang/String;");
489 
490   EXPECT_TRUE(ref_type.Equals(ref_type_2));
491   EXPECT_TRUE(ref_type_2.Equals(ref_type_3));
492   EXPECT_TRUE(ref_type.IsPreciseReference());
493 
494   // Create an uninitialized type out of this:
495   const RegType& ref_type_unintialized = cache.Uninitialized(ref_type, 0110ull);
496   EXPECT_TRUE(ref_type_unintialized.IsUninitializedReference());
497   EXPECT_FALSE(ref_type_unintialized.IsUnresolvedAndUninitializedReference());
498 }
499 
TEST_F(RegTypeReferenceTest,JavalangObject)500 TEST_F(RegTypeReferenceTest, JavalangObject) {
501   // Add a class to the cache then look for the same class and make sure it is  a
502   // Hit the second time. Then I am checking for the same effect when using
503   // The JavaLangObject method instead of FromDescriptor. Object Class in not final.
504   ArenaStack stack(Runtime::Current()->GetArenaPool());
505   ScopedArenaAllocator allocator(&stack);
506   ScopedObjectAccess soa(Thread::Current());
507   ScopedNullHandle<mirror::ClassLoader> loader;
508   RegTypeCache cache(
509       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
510   const RegType& ref_type = cache.JavaLangObject(true);
511   const RegType& ref_type_2 = cache.JavaLangObject(true);
512   const RegType& ref_type_3 = PreciseJavaLangObjectFromDescriptor(&cache, loader);
513 
514   EXPECT_TRUE(ref_type.Equals(ref_type_2));
515   EXPECT_TRUE(ref_type_3.Equals(ref_type_2));
516   EXPECT_EQ(ref_type.GetId(), ref_type_3.GetId());
517 }
TEST_F(RegTypeReferenceTest,Merging)518 TEST_F(RegTypeReferenceTest, Merging) {
519   // Tests merging logic
520   // String and object , LUB is object.
521   ScopedObjectAccess soa(Thread::Current());
522   ArenaStack stack(Runtime::Current()->GetArenaPool());
523   ScopedArenaAllocator allocator(&stack);
524   ScopedNullHandle<mirror::ClassLoader> loader;
525   RegTypeCache cache_new(
526       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
527   const RegType& string = cache_new.JavaLangString();
528   const RegType& Object = cache_new.JavaLangObject(true);
529   EXPECT_TRUE(string.Merge(Object, &cache_new, /* verifier= */ nullptr).IsJavaLangObject());
530   // Merge two unresolved types.
531   const RegType& ref_type_0 = cache_new.FromDescriptor(loader, "Ljava/lang/DoesNotExist;");
532   EXPECT_TRUE(ref_type_0.IsUnresolvedReference());
533   const RegType& ref_type_1 = cache_new.FromDescriptor(loader, "Ljava/lang/DoesNotExistToo;");
534   EXPECT_FALSE(ref_type_0.Equals(ref_type_1));
535 
536   const RegType& merged = ref_type_1.Merge(ref_type_0, &cache_new, /* verifier= */ nullptr);
537   EXPECT_TRUE(merged.IsUnresolvedMergedReference());
538   RegType& merged_nonconst = const_cast<RegType&>(merged);
539 
540   const BitVector& unresolved_parts =
541       down_cast<UnresolvedMergedType*>(&merged_nonconst)->GetUnresolvedTypes();
542   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_0.GetId()));
543   EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_1.GetId()));
544 }
545 
TEST_F(RegTypeTest,MergingFloat)546 TEST_F(RegTypeTest, MergingFloat) {
547   // Testing merging logic with float and float constants.
548   ArenaStack stack(Runtime::Current()->GetArenaPool());
549   ScopedArenaAllocator allocator(&stack);
550   ScopedObjectAccess soa(Thread::Current());
551   RegTypeCache cache_new(
552       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
553 
554   constexpr int32_t kTestConstantValue = 10;
555   const RegType& float_type = cache_new.Float();
556   const RegType& precise_cst = cache_new.FromCat1Const(kTestConstantValue, true);
557   const RegType& imprecise_cst = cache_new.FromCat1Const(kTestConstantValue, false);
558   {
559     // float MERGE precise cst => float.
560     const RegType& merged = float_type.Merge(precise_cst, &cache_new, /* verifier= */ nullptr);
561     EXPECT_TRUE(merged.IsFloat());
562   }
563   {
564     // precise cst MERGE float => float.
565     const RegType& merged = precise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
566     EXPECT_TRUE(merged.IsFloat());
567   }
568   {
569     // float MERGE imprecise cst => float.
570     const RegType& merged = float_type.Merge(imprecise_cst, &cache_new, /* verifier= */ nullptr);
571     EXPECT_TRUE(merged.IsFloat());
572   }
573   {
574     // imprecise cst MERGE float => float.
575     const RegType& merged = imprecise_cst.Merge(float_type, &cache_new, /* verifier= */ nullptr);
576     EXPECT_TRUE(merged.IsFloat());
577   }
578 }
579 
TEST_F(RegTypeTest,MergingLong)580 TEST_F(RegTypeTest, MergingLong) {
581   // Testing merging logic with long and long constants.
582   ArenaStack stack(Runtime::Current()->GetArenaPool());
583   ScopedArenaAllocator allocator(&stack);
584   ScopedObjectAccess soa(Thread::Current());
585   RegTypeCache cache_new(
586       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
587 
588   constexpr int32_t kTestConstantValue = 10;
589   const RegType& long_lo_type = cache_new.LongLo();
590   const RegType& long_hi_type = cache_new.LongHi();
591   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
592   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
593   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
594   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
595   {
596     // lo MERGE precise cst lo => lo.
597     const RegType& merged = long_lo_type.Merge(precise_cst_lo, &cache_new, /* verifier= */ nullptr);
598     EXPECT_TRUE(merged.IsLongLo());
599   }
600   {
601     // precise cst lo MERGE lo => lo.
602     const RegType& merged = precise_cst_lo.Merge(long_lo_type, &cache_new, /* verifier= */ nullptr);
603     EXPECT_TRUE(merged.IsLongLo());
604   }
605   {
606     // lo MERGE imprecise cst lo => lo.
607     const RegType& merged = long_lo_type.Merge(
608         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
609     EXPECT_TRUE(merged.IsLongLo());
610   }
611   {
612     // imprecise cst lo MERGE lo => lo.
613     const RegType& merged = imprecise_cst_lo.Merge(
614         long_lo_type, &cache_new, /* verifier= */ nullptr);
615     EXPECT_TRUE(merged.IsLongLo());
616   }
617   {
618     // hi MERGE precise cst hi => hi.
619     const RegType& merged = long_hi_type.Merge(precise_cst_hi, &cache_new, /* verifier= */ nullptr);
620     EXPECT_TRUE(merged.IsLongHi());
621   }
622   {
623     // precise cst hi MERGE hi => hi.
624     const RegType& merged = precise_cst_hi.Merge(long_hi_type, &cache_new, /* verifier= */ nullptr);
625     EXPECT_TRUE(merged.IsLongHi());
626   }
627   {
628     // hi MERGE imprecise cst hi => hi.
629     const RegType& merged = long_hi_type.Merge(
630         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
631     EXPECT_TRUE(merged.IsLongHi());
632   }
633   {
634     // imprecise cst hi MERGE hi => hi.
635     const RegType& merged = imprecise_cst_hi.Merge(
636         long_hi_type, &cache_new, /* verifier= */ nullptr);
637     EXPECT_TRUE(merged.IsLongHi());
638   }
639 }
640 
TEST_F(RegTypeTest,MergingDouble)641 TEST_F(RegTypeTest, MergingDouble) {
642   // Testing merging logic with double and double constants.
643   ArenaStack stack(Runtime::Current()->GetArenaPool());
644   ScopedArenaAllocator allocator(&stack);
645   ScopedObjectAccess soa(Thread::Current());
646   RegTypeCache cache_new(
647       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
648 
649   constexpr int32_t kTestConstantValue = 10;
650   const RegType& double_lo_type = cache_new.DoubleLo();
651   const RegType& double_hi_type = cache_new.DoubleHi();
652   const RegType& precise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, true);
653   const RegType& imprecise_cst_lo = cache_new.FromCat2ConstLo(kTestConstantValue, false);
654   const RegType& precise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, true);
655   const RegType& imprecise_cst_hi = cache_new.FromCat2ConstHi(kTestConstantValue, false);
656   {
657     // lo MERGE precise cst lo => lo.
658     const RegType& merged = double_lo_type.Merge(
659         precise_cst_lo, &cache_new, /* verifier= */ nullptr);
660     EXPECT_TRUE(merged.IsDoubleLo());
661   }
662   {
663     // precise cst lo MERGE lo => lo.
664     const RegType& merged = precise_cst_lo.Merge(
665         double_lo_type, &cache_new, /* verifier= */ nullptr);
666     EXPECT_TRUE(merged.IsDoubleLo());
667   }
668   {
669     // lo MERGE imprecise cst lo => lo.
670     const RegType& merged = double_lo_type.Merge(
671         imprecise_cst_lo, &cache_new, /* verifier= */ nullptr);
672     EXPECT_TRUE(merged.IsDoubleLo());
673   }
674   {
675     // imprecise cst lo MERGE lo => lo.
676     const RegType& merged = imprecise_cst_lo.Merge(
677         double_lo_type, &cache_new, /* verifier= */ nullptr);
678     EXPECT_TRUE(merged.IsDoubleLo());
679   }
680   {
681     // hi MERGE precise cst hi => hi.
682     const RegType& merged = double_hi_type.Merge(
683         precise_cst_hi, &cache_new, /* verifier= */ nullptr);
684     EXPECT_TRUE(merged.IsDoubleHi());
685   }
686   {
687     // precise cst hi MERGE hi => hi.
688     const RegType& merged = precise_cst_hi.Merge(
689         double_hi_type, &cache_new, /* verifier= */ nullptr);
690     EXPECT_TRUE(merged.IsDoubleHi());
691   }
692   {
693     // hi MERGE imprecise cst hi => hi.
694     const RegType& merged = double_hi_type.Merge(
695         imprecise_cst_hi, &cache_new, /* verifier= */ nullptr);
696     EXPECT_TRUE(merged.IsDoubleHi());
697   }
698   {
699     // imprecise cst hi MERGE hi => hi.
700     const RegType& merged = imprecise_cst_hi.Merge(
701         double_hi_type, &cache_new, /* verifier= */ nullptr);
702     EXPECT_TRUE(merged.IsDoubleHi());
703   }
704 }
705 
706 // Without a running MethodVerifier, the class-bearing register types may become stale as the GC
707 // will not visit them. It is easiest to disable moving GC.
708 //
709 // For some of the tests we need (or want) a working RegTypeCache that can load classes. So it is
710 // not generally possible to disable GC using ScopedGCCriticalSection (as it blocks GC and
711 // suspension completely).
712 struct ScopedDisableMovingGC {
ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC713   explicit ScopedDisableMovingGC(Thread* t) : self(t) {
714     Runtime::Current()->GetHeap()->IncrementDisableMovingGC(self);
715   }
~ScopedDisableMovingGCart::verifier::ScopedDisableMovingGC716   ~ScopedDisableMovingGC() {
717     Runtime::Current()->GetHeap()->DecrementDisableMovingGC(self);
718   }
719 
720   Thread* self;
721 };
722 
TEST_F(RegTypeTest,MergeSemiLatticeRef)723 TEST_F(RegTypeTest, MergeSemiLatticeRef) {
724   //  (Incomplete) semilattice:
725   //
726   //  Excluded for now: * category-2 types
727   //                    * interfaces
728   //                    * all of category-1 primitive types, including constants.
729   //  This is to demonstrate/codify the reference side, mostly.
730   //
731   //  Note: It is not a real semilattice because int = float makes this wonky. :-(
732   //
733   //                                       Conflict
734   //                                           |
735   //      #---------#--------------------------#-----------------------------#
736   //      |         |                                                        |
737   //      |         |                                                      Object
738   //      |         |                                                        |
739   //     int   uninit types              #---------------#--------#------------------#---------#
740   //      |                              |               |        |                  |         |
741   //      |                  unresolved-merge-types      |      Object[]           char[]   byte[]
742   //      |                              |    |  |       |        |                  |         |
743   //      |                  unresolved-types |  #------Number    #---------#        |         |
744   //      |                              |    |          |        |         |        |         |
745   //      |                              |    #--------Integer  Number[] Number[][]  |         |
746   //      |                              |               |        |         |        |         |
747   //      |                              #---------------#--------#---------#--------#---------#
748   //      |                                                       |
749   //      |                                                     null
750   //      |                                                       |
751   //      #--------------------------#----------------------------#
752   //                                 |
753   //                                 0
754 
755   ArenaStack stack(Runtime::Current()->GetArenaPool());
756   ScopedArenaAllocator allocator(&stack);
757   ScopedObjectAccess soa(Thread::Current());
758 
759   ScopedDisableMovingGC no_gc(soa.Self());
760 
761   ScopedNullHandle<mirror::ClassLoader> loader;
762   RegTypeCache cache(
763       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
764 
765   const RegType& conflict = cache.Conflict();
766   const RegType& zero = cache.Zero();
767   const RegType& null = cache.Null();
768   const RegType& int_type = cache.Integer();
769 
770   const RegType& obj = cache.JavaLangObject(false);
771   const RegType& obj_arr = cache.FromDescriptor(loader, "[Ljava/lang/Object;");
772   ASSERT_FALSE(obj_arr.IsUnresolvedReference());
773 
774   const RegType& unresolved_a = cache.FromDescriptor(loader, "Ldoes/not/resolve/A;");
775   ASSERT_TRUE(unresolved_a.IsUnresolvedReference());
776   const RegType& unresolved_b = cache.FromDescriptor(loader, "Ldoes/not/resolve/B;");
777   ASSERT_TRUE(unresolved_b.IsUnresolvedReference());
778   const RegType& unresolved_ab = cache.FromUnresolvedMerge(unresolved_a, unresolved_b, nullptr);
779   ASSERT_TRUE(unresolved_ab.IsUnresolvedMergedReference());
780 
781   const RegType& uninit_this = cache.UninitializedThisArgument(obj);
782   const RegType& uninit_obj_0 = cache.Uninitialized(obj, 0u);
783   const RegType& uninit_obj_1 = cache.Uninitialized(obj, 1u);
784 
785   const RegType& uninit_unres_this = cache.UninitializedThisArgument(unresolved_a);
786   const RegType& uninit_unres_a_0 = cache.Uninitialized(unresolved_a, 0);
787   const RegType& uninit_unres_b_0 = cache.Uninitialized(unresolved_b, 0);
788 
789   const RegType& number = cache.FromDescriptor(loader, "Ljava/lang/Number;");
790   ASSERT_FALSE(number.IsUnresolvedReference());
791   const RegType& integer = cache.FromDescriptor(loader, "Ljava/lang/Integer;");
792   ASSERT_FALSE(integer.IsUnresolvedReference());
793 
794   const RegType& uninit_number_0 = cache.Uninitialized(number, 0u);
795   const RegType& uninit_integer_0 = cache.Uninitialized(integer, 0u);
796 
797   const RegType& number_arr = cache.FromDescriptor(loader, "[Ljava/lang/Number;");
798   ASSERT_FALSE(number_arr.IsUnresolvedReference());
799   const RegType& integer_arr = cache.FromDescriptor(loader, "[Ljava/lang/Integer;");
800   ASSERT_FALSE(integer_arr.IsUnresolvedReference());
801 
802   const RegType& number_arr_arr = cache.FromDescriptor(loader, "[[Ljava/lang/Number;");
803   ASSERT_FALSE(number_arr_arr.IsUnresolvedReference());
804 
805   const RegType& char_arr = cache.FromDescriptor(loader, "[C");
806   ASSERT_FALSE(char_arr.IsUnresolvedReference());
807   const RegType& byte_arr = cache.FromDescriptor(loader, "[B");
808   ASSERT_FALSE(byte_arr.IsUnresolvedReference());
809 
810   const RegType& unresolved_a_num = cache.FromUnresolvedMerge(unresolved_a, number, nullptr);
811   ASSERT_TRUE(unresolved_a_num.IsUnresolvedMergedReference());
812   const RegType& unresolved_b_num = cache.FromUnresolvedMerge(unresolved_b, number, nullptr);
813   ASSERT_TRUE(unresolved_b_num.IsUnresolvedMergedReference());
814   const RegType& unresolved_ab_num = cache.FromUnresolvedMerge(unresolved_ab, number, nullptr);
815   ASSERT_TRUE(unresolved_ab_num.IsUnresolvedMergedReference());
816 
817   const RegType& unresolved_a_int = cache.FromUnresolvedMerge(unresolved_a, integer, nullptr);
818   ASSERT_TRUE(unresolved_a_int.IsUnresolvedMergedReference());
819   const RegType& unresolved_b_int = cache.FromUnresolvedMerge(unresolved_b, integer, nullptr);
820   ASSERT_TRUE(unresolved_b_int.IsUnresolvedMergedReference());
821   const RegType& unresolved_ab_int = cache.FromUnresolvedMerge(unresolved_ab, integer, nullptr);
822   ASSERT_TRUE(unresolved_ab_int.IsUnresolvedMergedReference());
823   std::vector<const RegType*> uninitialized_types = {
824       &uninit_this, &uninit_obj_0, &uninit_obj_1, &uninit_number_0, &uninit_integer_0
825   };
826   std::vector<const RegType*> unresolved_types = {
827       &unresolved_a,
828       &unresolved_b,
829       &unresolved_ab,
830       &unresolved_a_num,
831       &unresolved_b_num,
832       &unresolved_ab_num,
833       &unresolved_a_int,
834       &unresolved_b_int,
835       &unresolved_ab_int
836   };
837   std::vector<const RegType*> uninit_unresolved_types = {
838       &uninit_unres_this, &uninit_unres_a_0, &uninit_unres_b_0
839   };
840   std::vector<const RegType*> plain_nonobj_classes = { &number, &integer };
841   std::vector<const RegType*> plain_nonobj_arr_classes = {
842       &number_arr,
843       &number_arr_arr,
844       &integer_arr,
845       &char_arr,
846   };
847   // std::vector<const RegType*> others = { &conflict, &zero, &null, &obj, &int_type };
848 
849   std::vector<const RegType*> all_minus_uninit_conflict;
850   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
851                                    unresolved_types.begin(),
852                                    unresolved_types.end());
853   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
854                                    plain_nonobj_classes.begin(),
855                                    plain_nonobj_classes.end());
856   all_minus_uninit_conflict.insert(all_minus_uninit_conflict.end(),
857                                    plain_nonobj_arr_classes.begin(),
858                                    plain_nonobj_arr_classes.end());
859   all_minus_uninit_conflict.push_back(&zero);
860   all_minus_uninit_conflict.push_back(&null);
861   all_minus_uninit_conflict.push_back(&obj);
862 
863   std::vector<const RegType*> all_minus_uninit;
864   all_minus_uninit.insert(all_minus_uninit.end(),
865                           all_minus_uninit_conflict.begin(),
866                           all_minus_uninit_conflict.end());
867   all_minus_uninit.push_back(&conflict);
868 
869 
870   std::vector<const RegType*> all;
871   all.insert(all.end(), uninitialized_types.begin(), uninitialized_types.end());
872   all.insert(all.end(), uninit_unresolved_types.begin(), uninit_unresolved_types.end());
873   all.insert(all.end(), all_minus_uninit.begin(), all_minus_uninit.end());
874   all.push_back(&int_type);
875 
876   auto check = [&](const RegType& in1, const RegType& in2, const RegType& expected_out)
877       REQUIRES_SHARED(Locks::mutator_lock_) {
878     const RegType& merge_result = in1.SafeMerge(in2, &cache, nullptr);
879     EXPECT_EQ(&expected_out, &merge_result)
880         << in1.Dump() << " x " << in2.Dump() << " = " << merge_result.Dump()
881         << " != " << expected_out.Dump();
882   };
883 
884   // Identity.
885   {
886     for (auto r : all) {
887       check(*r, *r, *r);
888     }
889   }
890 
891   // Define a covering relation through a list of Edges. We'll then derive LUBs from this and
892   // create checks for every pair of types.
893 
894   struct Edge {
895     const RegType& from;
896     const RegType& to;
897 
898     Edge(const RegType& from_, const RegType& to_) : from(from_), to(to_) {}
899   };
900   std::vector<Edge> edges;
901 #define ADD_EDGE(from, to) edges.emplace_back((from), (to))
902 
903   // To Conflict.
904   {
905     for (auto r : uninitialized_types) {
906       ADD_EDGE(*r, conflict);
907     }
908     for (auto r : uninit_unresolved_types) {
909       ADD_EDGE(*r, conflict);
910     }
911     ADD_EDGE(obj, conflict);
912     ADD_EDGE(int_type, conflict);
913   }
914 
915   ADD_EDGE(zero, null);
916 
917   // Unresolved.
918   {
919     ADD_EDGE(null, unresolved_a);
920     ADD_EDGE(null, unresolved_b);
921     ADD_EDGE(unresolved_a, unresolved_ab);
922     ADD_EDGE(unresolved_b, unresolved_ab);
923 
924     ADD_EDGE(number, unresolved_a_num);
925     ADD_EDGE(unresolved_a, unresolved_a_num);
926     ADD_EDGE(number, unresolved_b_num);
927     ADD_EDGE(unresolved_b, unresolved_b_num);
928     ADD_EDGE(number, unresolved_ab_num);
929     ADD_EDGE(unresolved_a_num, unresolved_ab_num);
930     ADD_EDGE(unresolved_b_num, unresolved_ab_num);
931     ADD_EDGE(unresolved_ab, unresolved_ab_num);
932 
933     ADD_EDGE(integer, unresolved_a_int);
934     ADD_EDGE(unresolved_a, unresolved_a_int);
935     ADD_EDGE(integer, unresolved_b_int);
936     ADD_EDGE(unresolved_b, unresolved_b_int);
937     ADD_EDGE(integer, unresolved_ab_int);
938     ADD_EDGE(unresolved_a_int, unresolved_ab_int);
939     ADD_EDGE(unresolved_b_int, unresolved_ab_int);
940     ADD_EDGE(unresolved_ab, unresolved_ab_int);
941 
942     ADD_EDGE(unresolved_a_int, unresolved_a_num);
943     ADD_EDGE(unresolved_b_int, unresolved_b_num);
944     ADD_EDGE(unresolved_ab_int, unresolved_ab_num);
945 
946     ADD_EDGE(unresolved_ab_num, obj);
947   }
948 
949   // Classes.
950   {
951     ADD_EDGE(null, integer);
952     ADD_EDGE(integer, number);
953     ADD_EDGE(number, obj);
954   }
955 
956   // Arrays.
957   {
958     ADD_EDGE(integer_arr, number_arr);
959     ADD_EDGE(number_arr, obj_arr);
960     ADD_EDGE(obj_arr, obj);
961     ADD_EDGE(number_arr_arr, obj_arr);
962 
963     ADD_EDGE(char_arr, obj);
964     ADD_EDGE(byte_arr, obj);
965 
966     ADD_EDGE(null, integer_arr);
967     ADD_EDGE(null, number_arr_arr);
968     ADD_EDGE(null, char_arr);
969     ADD_EDGE(null, byte_arr);
970   }
971 
972   // Primitive.
973   {
974     ADD_EDGE(zero, int_type);
975   }
976 #undef ADD_EDGE
977 
978   // Create merge triples by using the covering relation established by edges to derive the
979   // expected merge for any pair of types.
980 
981   // Expect merge(in1, in2) == out.
982   struct MergeExpectation {
983     const RegType& in1;
984     const RegType& in2;
985     const RegType& out;
986 
987     MergeExpectation(const RegType& in1_, const RegType& in2_, const RegType& out_)
988         : in1(in1_), in2(in2_), out(out_) {}
989   };
990   std::vector<MergeExpectation> expectations;
991 
992   for (auto r1 : all) {
993     for (auto r2 : all) {
994       if (r1 == r2) {
995         continue;
996       }
997 
998       // Very simple algorithm here that is usually used with adjacency lists. Our graph is
999       // small, it didn't make sense to have lists per node. Thus, the regular guarantees
1000       // of O(n + |e|) don't apply, but that is acceptable.
1001       //
1002       // To compute r1 lub r2 = merge(r1, r2):
1003       //   1) Generate the reachable set of r1, name it grey.
1004       //   2) Mark all grey reachable nodes of r2 as black.
1005       //   3) Find black nodes with no in-edges from other black nodes.
1006       //   4) If |3)| == 1, that's the lub.
1007 
1008       // Generic BFS of the graph induced by edges, starting at start. new_node will be called
1009       // with any discovered node, in order.
1010       auto bfs = [&](auto new_node, const RegType* start) {
1011         std::unordered_set<const RegType*> seen;
1012         std::queue<const RegType*> work_list;
1013         work_list.push(start);
1014         while (!work_list.empty()) {
1015           const RegType* cur = work_list.front();
1016           work_list.pop();
1017           auto it = seen.find(cur);
1018           if (it != seen.end()) {
1019             continue;
1020           }
1021           seen.insert(cur);
1022           new_node(cur);
1023 
1024           for (const Edge& edge : edges) {
1025             if (&edge.from == cur) {
1026               work_list.push(&edge.to);
1027             }
1028           }
1029         }
1030       };
1031 
1032       std::unordered_set<const RegType*> grey;
1033       auto compute_grey = [&](const RegType* cur) {
1034         grey.insert(cur);  // Mark discovered node as grey.
1035       };
1036       bfs(compute_grey, r1);
1037 
1038       std::set<const RegType*> black;
1039       auto compute_black = [&](const RegType* cur) {
1040         // Mark discovered grey node as black.
1041         if (grey.find(cur) != grey.end()) {
1042           black.insert(cur);
1043         }
1044       };
1045       bfs(compute_black, r2);
1046 
1047       std::set<const RegType*> no_in_edge(black);  // Copy of black, remove nodes with in-edges.
1048       for (auto r : black) {
1049         for (Edge& e : edges) {
1050           if (&e.from == r) {
1051             no_in_edge.erase(&e.to);  // It doesn't matter whether "to" is black or not, just
1052                                       // attempt to remove it.
1053           }
1054         }
1055       }
1056 
1057       // Helper to print sets when something went wrong.
1058       auto print_set = [](auto& container) REQUIRES_SHARED(Locks::mutator_lock_) {
1059         std::string result;
1060         for (auto r : container) {
1061           result.append(" + ");
1062           result.append(r->Dump());
1063         }
1064         return result;
1065       };
1066       ASSERT_EQ(no_in_edge.size(), 1u) << r1->Dump() << " u " << r2->Dump()
1067                                        << " grey=" << print_set(grey)
1068                                        << " black=" << print_set(black)
1069                                        << " no-in-edge=" << print_set(no_in_edge);
1070       expectations.emplace_back(*r1, *r2, **no_in_edge.begin());
1071     }
1072   }
1073 
1074   // Evaluate merge expectations. The merge is expected to be commutative.
1075 
1076   for (auto& triple : expectations) {
1077     check(triple.in1, triple.in2, triple.out);
1078     check(triple.in2, triple.in1, triple.out);
1079   }
1080 }
1081 
TEST_F(RegTypeTest,ConstPrecision)1082 TEST_F(RegTypeTest, ConstPrecision) {
1083   // Tests creating primitive types types.
1084   ArenaStack stack(Runtime::Current()->GetArenaPool());
1085   ScopedArenaAllocator allocator(&stack);
1086   ScopedObjectAccess soa(Thread::Current());
1087   RegTypeCache cache_new(
1088       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
1089   const RegType& imprecise_const = cache_new.FromCat1Const(10, false);
1090   const RegType& precise_const = cache_new.FromCat1Const(10, true);
1091 
1092   EXPECT_TRUE(imprecise_const.IsImpreciseConstant());
1093   EXPECT_TRUE(precise_const.IsPreciseConstant());
1094   EXPECT_FALSE(imprecise_const.Equals(precise_const));
1095 }
1096 
1097 class RegTypeOOMTest : public RegTypeTest {
1098  protected:
SetUpRuntimeOptions(RuntimeOptions * options)1099   void SetUpRuntimeOptions(RuntimeOptions *options) override {
1100     SetUpRuntimeOptionsForFillHeap(options);
1101 
1102     // We must not appear to be a compiler, or we'll abort on the host.
1103     callbacks_.reset();
1104   }
1105 };
1106 
TEST_F(RegTypeOOMTest,ClassJoinOOM)1107 TEST_F(RegTypeOOMTest, ClassJoinOOM) {
1108   // TODO: Figure out why FillHeap isn't good enough under CMS.
1109   TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
1110 
1111   // Tests that we don't abort with OOMs.
1112 
1113   ArenaStack stack(Runtime::Current()->GetArenaPool());
1114   ScopedArenaAllocator allocator(&stack);
1115   ScopedObjectAccess soa(Thread::Current());
1116 
1117   ScopedDisableMovingGC no_gc(soa.Self());
1118 
1119   // We merge nested array of primitive wrappers. These have a join type of an array of Number of
1120   // the same depth. We start with depth five, as we want at least two newly created classes to
1121   // test recursion (it's just more likely that nobody uses such deep arrays in runtime bringup).
1122   constexpr const char* kIntArrayFive = "[[[[[Ljava/lang/Integer;";
1123   constexpr const char* kFloatArrayFive = "[[[[[Ljava/lang/Float;";
1124   constexpr const char* kNumberArrayFour = "[[[[Ljava/lang/Number;";
1125   constexpr const char* kNumberArrayFive = "[[[[[Ljava/lang/Number;";
1126 
1127   ScopedNullHandle<mirror::ClassLoader> loader;
1128   RegTypeCache cache(
1129       soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
1130   const RegType& int_array_array = cache.FromDescriptor(loader, kIntArrayFive);
1131   ASSERT_TRUE(int_array_array.HasClass());
1132   const RegType& float_array_array = cache.FromDescriptor(loader, kFloatArrayFive);
1133   ASSERT_TRUE(float_array_array.HasClass());
1134 
1135   // Check assumptions: the joined classes don't exist, yet.
1136   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1137   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFour, nullptr) == nullptr);
1138   ASSERT_TRUE(class_linker->LookupClass(soa.Self(), kNumberArrayFive, nullptr) == nullptr);
1139 
1140   // Fill the heap.
1141   VariableSizedHandleScope hs(soa.Self());
1142   FillHeap(soa.Self(), class_linker, &hs);
1143 
1144   const RegType& join_type = int_array_array.Merge(float_array_array, &cache, nullptr);
1145   ASSERT_TRUE(join_type.IsUnresolvedReference());
1146 }
1147 
1148 class RegTypeClassJoinTest : public RegTypeTest {
1149  protected:
TestClassJoin(const char * in1,const char * in2,const char * out)1150   void TestClassJoin(const char* in1, const char* in2, const char* out) {
1151     ArenaStack stack(Runtime::Current()->GetArenaPool());
1152     ScopedArenaAllocator allocator(&stack);
1153 
1154     ScopedObjectAccess soa(Thread::Current());
1155     jobject jclass_loader = LoadDex("Interfaces");
1156     StackHandleScope<4> hs(soa.Self());
1157     Handle<mirror::ClassLoader> class_loader(
1158         hs.NewHandle(soa.Decode<mirror::ClassLoader>(jclass_loader)));
1159 
1160     Handle<mirror::Class> c1(hs.NewHandle(
1161         class_linker_->FindClass(soa.Self(), in1, class_loader)));
1162     Handle<mirror::Class> c2(hs.NewHandle(
1163         class_linker_->FindClass(soa.Self(), in2, class_loader)));
1164     ASSERT_TRUE(c1 != nullptr);
1165     ASSERT_TRUE(c2 != nullptr);
1166 
1167     ScopedDisableMovingGC no_gc(soa.Self());
1168 
1169     RegTypeCache cache(
1170         soa.Self(), Runtime::Current()->GetClassLinker(), /* can_load_classes= */ true, allocator);
1171     const RegType& c1_reg_type = *cache.InsertClass(in1, c1.Get(), false);
1172     const RegType& c2_reg_type = *cache.InsertClass(in2, c2.Get(), false);
1173 
1174     const RegType& join_type = c1_reg_type.Merge(c2_reg_type, &cache, nullptr);
1175     EXPECT_TRUE(join_type.HasClass());
1176     EXPECT_EQ(join_type.GetDescriptor(), std::string_view(out));
1177   }
1178 };
1179 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaces)1180 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaces) {
1181   TestClassJoin("LInterfaces$K;", "LInterfaces$L;", "LInterfaces$J;");
1182 }
1183 
TEST_F(RegTypeClassJoinTest,ClassJoinInterfaceClass)1184 TEST_F(RegTypeClassJoinTest, ClassJoinInterfaceClass) {
1185   TestClassJoin("LInterfaces$B;", "LInterfaces$L;", "LInterfaces$J;");
1186 }
1187 
TEST_F(RegTypeClassJoinTest,ClassJoinClassClass)1188 TEST_F(RegTypeClassJoinTest, ClassJoinClassClass) {
1189   // This test codifies that we prefer the class hierarchy over interfaces. It's a mostly
1190   // arbitrary choice, optimally we'd have set types and could handle multi-inheritance precisely.
1191   TestClassJoin("LInterfaces$A;", "LInterfaces$B;", "Ljava/lang/Object;");
1192 }
1193 
1194 }  // namespace verifier
1195 }  // namespace art
1196