1 /*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
19
20 #include "register_line.h"
21
22 #include "base/logging.h" // For VLOG.
23 #include "method_verifier.h"
24 #include "reg_type_cache-inl.h"
25
26 namespace art HIDDEN {
27 namespace verifier {
28
29 // Should we dump a warning on failures to verify balanced locking? That would be an indication to
30 // developers that their code will be slow.
31 static constexpr bool kDumpLockFailures = true;
32
GetRegisterType(MethodVerifier * verifier,uint32_t vsrc)33 inline const RegType& RegisterLine::GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const {
34 // The register index was validated during the static pass, so we don't need to check it here.
35 DCHECK_LT(vsrc, num_regs_);
36 return verifier->GetRegTypeCache()->GetFromId(line_[vsrc]);
37 }
38
39 template <LockOp kLockOp>
SetRegisterType(uint32_t vdst,const RegType & new_type)40 inline void RegisterLine::SetRegisterType(uint32_t vdst, const RegType& new_type) {
41 DCHECK_LT(vdst, num_regs_);
42 DCHECK(!new_type.IsLowHalf());
43 DCHECK(!new_type.IsHighHalf());
44 // Note: previously we failed when asked to set a conflict. However, conflicts are OK as long
45 // as they are not accessed, and our backends can handle this nowadays.
46 line_[vdst] = new_type.GetId();
47 switch (kLockOp) {
48 case LockOp::kClear:
49 // Clear the monitor entry bits for this register.
50 ClearAllRegToLockDepths(vdst);
51 break;
52 case LockOp::kKeep:
53 // Should only be doing this with reference types.
54 DCHECK(new_type.IsReferenceTypes());
55 break;
56 }
57 }
58
SetRegisterTypeWide(uint32_t vdst,const RegType & new_type1,const RegType & new_type2)59 inline void RegisterLine::SetRegisterTypeWide(uint32_t vdst,
60 const RegType& new_type1,
61 const RegType& new_type2) {
62 DCHECK_LT(vdst + 1, num_regs_);
63 DCHECK(new_type1.CheckWidePair(new_type2));
64 line_[vdst] = new_type1.GetId();
65 line_[vdst + 1] = new_type2.GetId();
66 // Clear the monitor entry bits for this register.
67 ClearAllRegToLockDepths(vdst);
68 ClearAllRegToLockDepths(vdst + 1);
69 }
70
SetResultTypeToUnknown(RegTypeCache * reg_types)71 inline void RegisterLine::SetResultTypeToUnknown(RegTypeCache* reg_types) {
72 result_[0] = reg_types->Undefined().GetId();
73 result_[1] = result_[0];
74 }
75
SetResultRegisterType(MethodVerifier * verifier,const RegType & new_type)76 inline void RegisterLine::SetResultRegisterType(MethodVerifier* verifier, const RegType& new_type) {
77 DCHECK(!new_type.IsLowHalf());
78 DCHECK(!new_type.IsHighHalf());
79 result_[0] = new_type.GetId();
80 result_[1] = verifier->GetRegTypeCache()->Undefined().GetId();
81 }
82
SetResultRegisterTypeWide(const RegType & new_type1,const RegType & new_type2)83 inline void RegisterLine::SetResultRegisterTypeWide(const RegType& new_type1,
84 const RegType& new_type2) {
85 DCHECK(new_type1.CheckWidePair(new_type2));
86 result_[0] = new_type1.GetId();
87 result_[1] = new_type2.GetId();
88 }
89
CopyRegister1(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc,TypeCategory cat)90 inline void RegisterLine::CopyRegister1(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc,
91 TypeCategory cat) {
92 DCHECK(cat == kTypeCategory1nr || cat == kTypeCategoryRef);
93 const RegType& type = GetRegisterType(verifier, vsrc);
94 if (type.IsLowHalf() || type.IsHighHalf()) {
95 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Expected category1 register type not '"
96 << type << "'";
97 return;
98 }
99 SetRegisterType<LockOp::kClear>(vdst, type);
100 if (!type.IsConflict() && // Allow conflicts to be copied around.
101 ((cat == kTypeCategory1nr && !type.IsCategory1Types()) ||
102 (cat == kTypeCategoryRef && !type.IsReferenceTypes()))) {
103 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy1 v" << vdst << "<-v" << vsrc << " type=" << type
104 << " cat=" << static_cast<int>(cat);
105 } else if (cat == kTypeCategoryRef) {
106 CopyRegToLockDepth(vdst, vsrc);
107 }
108 }
109
CopyRegister2(MethodVerifier * verifier,uint32_t vdst,uint32_t vsrc)110 inline void RegisterLine::CopyRegister2(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc) {
111 const RegType& type_l = GetRegisterType(verifier, vsrc);
112 const RegType& type_h = GetRegisterType(verifier, vsrc + 1);
113
114 if (!type_l.CheckWidePair(type_h)) {
115 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy2 v" << vdst << "<-v" << vsrc
116 << " type=" << type_l << "/" << type_h;
117 } else {
118 SetRegisterTypeWide(vdst, type_l, type_h);
119 }
120 }
121
VerifyRegisterType(MethodVerifier * verifier,uint32_t vsrc,const RegType & check_type)122 inline bool RegisterLine::VerifyRegisterType(MethodVerifier* verifier, uint32_t vsrc,
123 const RegType& check_type) {
124 // Verify the src register type against the check type refining the type of the register
125 const RegType& src_type = GetRegisterType(verifier, vsrc);
126 if (UNLIKELY(!check_type.IsAssignableFrom(src_type, verifier))) {
127 enum VerifyError fail_type;
128 if (!check_type.IsNonZeroReferenceTypes() || !src_type.IsNonZeroReferenceTypes()) {
129 // Hard fail if one of the types is primitive, since they are concretely known.
130 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
131 } else if (check_type.IsUninitializedTypes() || src_type.IsUninitializedTypes()) {
132 // Hard fail for uninitialized types, which don't match anything but themselves.
133 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
134 } else if (check_type.IsUnresolvedTypes() || src_type.IsUnresolvedTypes()) {
135 fail_type = VERIFY_ERROR_UNRESOLVED_TYPE_CHECK;
136 } else {
137 fail_type = VERIFY_ERROR_BAD_CLASS_HARD;
138 }
139 verifier->Fail(fail_type) << "register v" << vsrc << " has type "
140 << src_type << " but expected " << check_type;
141 return false;
142 }
143 if (check_type.IsLowHalf()) {
144 const RegType& src_type_h = GetRegisterType(verifier, vsrc + 1);
145 if (UNLIKELY(!src_type.CheckWidePair(src_type_h))) {
146 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "wide register v" << vsrc << " has type "
147 << src_type << "/" << src_type_h;
148 return false;
149 }
150 }
151 // The register at vsrc has a defined type, we know the lower-upper-bound, but this is less
152 // precise than the subtype in vsrc so leave it for reference types. For primitive types
153 // if they are a defined type then they are as precise as we can get, however, for constant
154 // types we may wish to refine them. Unfortunately constant propagation has rendered this useless.
155 return true;
156 }
157
VerifyMonitorStackEmpty(MethodVerifier * verifier)158 inline void RegisterLine::VerifyMonitorStackEmpty(MethodVerifier* verifier) const {
159 if (MonitorStackDepth() != 0) {
160 verifier->Fail(VERIFY_ERROR_LOCKING, /*pending_exc=*/ false);
161 if (kDumpLockFailures) {
162 VLOG(verifier) << "expected empty monitor stack in "
163 << verifier->GetMethodReference().PrettyMethod();
164 }
165 }
166 }
167
ComputeSize(size_t num_regs)168 inline size_t RegisterLine::ComputeSize(size_t num_regs) {
169 return OFFSETOF_MEMBER(RegisterLine, line_) + num_regs * sizeof(uint16_t);
170 }
171
Create(size_t num_regs,ScopedArenaAllocator & allocator,RegTypeCache * reg_types)172 inline RegisterLine* RegisterLine::Create(size_t num_regs,
173 ScopedArenaAllocator& allocator,
174 RegTypeCache* reg_types) {
175 void* memory = allocator.Alloc(ComputeSize(num_regs));
176 return new (memory) RegisterLine(num_regs, allocator, reg_types);
177 }
178
RegisterLine(size_t num_regs,ScopedArenaAllocator & allocator,RegTypeCache * reg_types)179 inline RegisterLine::RegisterLine(size_t num_regs,
180 ScopedArenaAllocator& allocator,
181 RegTypeCache* reg_types)
182 : num_regs_(num_regs),
183 monitors_(allocator.Adapter(kArenaAllocVerifier)),
184 reg_to_lock_depths_(std::less<uint32_t>(),
185 allocator.Adapter(kArenaAllocVerifier)),
186 this_initialized_(false) {
187 std::uninitialized_fill_n(line_, num_regs_, RegTypeCache::kUndefinedCacheId);
188 SetResultTypeToUnknown(reg_types);
189 }
190
ClearRegToLockDepth(size_t reg,size_t depth)191 inline void RegisterLine::ClearRegToLockDepth(size_t reg, size_t depth) {
192 CHECK_LT(depth, 32u);
193 DCHECK(IsSetLockDepth(reg, depth));
194 auto it = reg_to_lock_depths_.find(reg);
195 DCHECK(it != reg_to_lock_depths_.end());
196 uint32_t depths = it->second ^ (1 << depth);
197 if (depths != 0) {
198 it->second = depths;
199 } else {
200 reg_to_lock_depths_.erase(it);
201 }
202 // Need to unlock every register at the same lock depth. These are aliased locks.
203 uint32_t mask = 1 << depth;
204 for (auto& pair : reg_to_lock_depths_) {
205 if ((pair.second & mask) != 0) {
206 VLOG(verifier) << "Also unlocking " << pair.first;
207 pair.second ^= mask;
208 }
209 }
210 }
211
operator()212 inline void RegisterLineArenaDelete::operator()(RegisterLine* ptr) const {
213 if (ptr != nullptr) {
214 ptr->~RegisterLine();
215 ProtectMemory(ptr, RegisterLine::ComputeSize(ptr->NumRegs()));
216 }
217 }
218
219 } // namespace verifier
220 } // namespace art
221
222 #endif // ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_
223