1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "local_value_numbering.h"
18
19 #include "base/bit_utils.h"
20 #include "global_value_numbering.h"
21 #include "mir_field_info.h"
22 #include "mir_graph.h"
23 #include "utils.h"
24
25 namespace art {
26
27 namespace { // anonymous namespace
28
29 // Operations used for value map keys instead of actual opcode.
30 static constexpr uint16_t kInvokeMemoryVersionBumpOp = Instruction::INVOKE_VIRTUAL;
31 static constexpr uint16_t kUnresolvedSFieldOp = Instruction::SGET;
32 static constexpr uint16_t kResolvedSFieldOp = Instruction::SGET_WIDE;
33 static constexpr uint16_t kUnresolvedIFieldOp = Instruction::IGET;
34 static constexpr uint16_t kNonAliasingIFieldLocOp = Instruction::IGET_WIDE;
35 static constexpr uint16_t kNonAliasingIFieldInitialOp = Instruction::IGET_OBJECT;
36 static constexpr uint16_t kAliasingIFieldOp = Instruction::IGET_BOOLEAN;
37 static constexpr uint16_t kAliasingIFieldStartVersionOp = Instruction::IGET_BYTE;
38 static constexpr uint16_t kAliasingIFieldBumpVersionOp = Instruction::IGET_CHAR;
39 static constexpr uint16_t kNonAliasingArrayOp = Instruction::AGET;
40 static constexpr uint16_t kNonAliasingArrayStartVersionOp = Instruction::AGET_WIDE;
41 static constexpr uint16_t kNonAliasingArrayBumpVersionOp = Instruction::AGET_OBJECT;
42 static constexpr uint16_t kAliasingArrayOp = Instruction::AGET_BOOLEAN;
43 static constexpr uint16_t kAliasingArrayStartVersionOp = Instruction::AGET_BYTE;
44 static constexpr uint16_t kAliasingArrayBumpVersionOp = Instruction::AGET_CHAR;
45 static constexpr uint16_t kMergeBlockMemoryVersionBumpOp = Instruction::INVOKE_VIRTUAL_RANGE;
46 static constexpr uint16_t kMergeBlockAliasingIFieldVersionBumpOp = Instruction::IPUT;
47 static constexpr uint16_t kMergeBlockAliasingIFieldMergeLocationOp = Instruction::IPUT_WIDE;
48 static constexpr uint16_t kMergeBlockNonAliasingArrayVersionBumpOp = Instruction::APUT;
49 static constexpr uint16_t kMergeBlockNonAliasingArrayMergeLocationOp = Instruction::APUT_WIDE;
50 static constexpr uint16_t kMergeBlockAliasingArrayVersionBumpOp = Instruction::APUT_OBJECT;
51 static constexpr uint16_t kMergeBlockAliasingArrayMergeLocationOp = Instruction::APUT_BOOLEAN;
52 static constexpr uint16_t kMergeBlockNonAliasingIFieldVersionBumpOp = Instruction::APUT_BYTE;
53 static constexpr uint16_t kMergeBlockSFieldVersionBumpOp = Instruction::APUT_CHAR;
54
55 } // anonymous namespace
56
57 class LocalValueNumbering::AliasingIFieldVersions {
58 public:
StartMemoryVersion(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t field_id)59 static uint16_t StartMemoryVersion(GlobalValueNumbering* gvn, const LocalValueNumbering* lvn,
60 uint16_t field_id) {
61 uint16_t type = gvn->GetIFieldType(field_id);
62 return gvn->LookupValue(kAliasingIFieldStartVersionOp, field_id,
63 lvn->global_memory_version_, lvn->unresolved_ifield_version_[type]);
64 }
65
BumpMemoryVersion(GlobalValueNumbering * gvn,uint16_t old_version,uint16_t store_ref_set_id,uint16_t stored_value)66 static uint16_t BumpMemoryVersion(GlobalValueNumbering* gvn, uint16_t old_version,
67 uint16_t store_ref_set_id, uint16_t stored_value) {
68 return gvn->LookupValue(kAliasingIFieldBumpVersionOp, old_version,
69 store_ref_set_id, stored_value);
70 }
71
LookupGlobalValue(GlobalValueNumbering * gvn,uint16_t field_id,uint16_t base,uint16_t memory_version)72 static uint16_t LookupGlobalValue(GlobalValueNumbering* gvn,
73 uint16_t field_id, uint16_t base, uint16_t memory_version) {
74 return gvn->LookupValue(kAliasingIFieldOp, field_id, base, memory_version);
75 }
76
LookupMergeValue(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t field_id,uint16_t base)77 static uint16_t LookupMergeValue(GlobalValueNumbering* gvn, const LocalValueNumbering* lvn,
78 uint16_t field_id, uint16_t base) {
79 // If the base/field_id is non-aliasing in lvn, use the non-aliasing value.
80 uint16_t type = gvn->GetIFieldType(field_id);
81 if (lvn->IsNonAliasingIField(base, field_id, type)) {
82 uint16_t loc = gvn->LookupValue(kNonAliasingIFieldLocOp, base, field_id, type);
83 auto lb = lvn->non_aliasing_ifield_value_map_.find(loc);
84 return (lb != lvn->non_aliasing_ifield_value_map_.end())
85 ? lb->second
86 : gvn->LookupValue(kNonAliasingIFieldInitialOp, loc, kNoValue, kNoValue);
87 }
88 return AliasingValuesMergeGet<AliasingIFieldVersions>(
89 gvn, lvn, &lvn->aliasing_ifield_value_map_, field_id, base);
90 }
91
HasNewBaseVersion(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t field_id)92 static bool HasNewBaseVersion(GlobalValueNumbering* gvn, const LocalValueNumbering* lvn,
93 uint16_t field_id) {
94 uint16_t type = gvn->GetIFieldType(field_id);
95 return lvn->unresolved_ifield_version_[type] == lvn->merge_new_memory_version_ ||
96 lvn->global_memory_version_ == lvn->merge_new_memory_version_;
97 }
98
LookupMergeBlockValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t field_id)99 static uint16_t LookupMergeBlockValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
100 uint16_t field_id) {
101 return gvn->LookupValue(kMergeBlockAliasingIFieldVersionBumpOp, field_id, kNoValue, lvn_id);
102 }
103
LookupMergeLocationValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t field_id,uint16_t base)104 static uint16_t LookupMergeLocationValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
105 uint16_t field_id, uint16_t base) {
106 return gvn->LookupValue(kMergeBlockAliasingIFieldMergeLocationOp, field_id, base, lvn_id);
107 }
108 };
109
110 class LocalValueNumbering::NonAliasingArrayVersions {
111 public:
StartMemoryVersion(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn ATTRIBUTE_UNUSED,uint16_t array)112 static uint16_t StartMemoryVersion(GlobalValueNumbering* gvn,
113 const LocalValueNumbering* lvn ATTRIBUTE_UNUSED,
114 uint16_t array) {
115 return gvn->LookupValue(kNonAliasingArrayStartVersionOp, array, kNoValue, kNoValue);
116 }
117
BumpMemoryVersion(GlobalValueNumbering * gvn,uint16_t old_version,uint16_t store_ref_set_id,uint16_t stored_value)118 static uint16_t BumpMemoryVersion(GlobalValueNumbering* gvn, uint16_t old_version,
119 uint16_t store_ref_set_id, uint16_t stored_value) {
120 return gvn->LookupValue(kNonAliasingArrayBumpVersionOp, old_version,
121 store_ref_set_id, stored_value);
122 }
123
LookupGlobalValue(GlobalValueNumbering * gvn,uint16_t array,uint16_t index,uint16_t memory_version)124 static uint16_t LookupGlobalValue(GlobalValueNumbering* gvn,
125 uint16_t array, uint16_t index, uint16_t memory_version) {
126 return gvn->LookupValue(kNonAliasingArrayOp, array, index, memory_version);
127 }
128
LookupMergeValue(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t array,uint16_t index)129 static uint16_t LookupMergeValue(GlobalValueNumbering* gvn, const LocalValueNumbering* lvn,
130 uint16_t array, uint16_t index) {
131 return AliasingValuesMergeGet<NonAliasingArrayVersions>(
132 gvn, lvn, &lvn->non_aliasing_array_value_map_, array, index);
133 }
134
HasNewBaseVersion(GlobalValueNumbering * gvn ATTRIBUTE_UNUSED,const LocalValueNumbering * lvn ATTRIBUTE_UNUSED,uint16_t array ATTRIBUTE_UNUSED)135 static bool HasNewBaseVersion(GlobalValueNumbering* gvn ATTRIBUTE_UNUSED,
136 const LocalValueNumbering* lvn ATTRIBUTE_UNUSED,
137 uint16_t array ATTRIBUTE_UNUSED) {
138 return false; // Not affected by global_memory_version_.
139 }
140
LookupMergeBlockValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t array)141 static uint16_t LookupMergeBlockValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
142 uint16_t array) {
143 return gvn->LookupValue(kMergeBlockNonAliasingArrayVersionBumpOp, array, kNoValue, lvn_id);
144 }
145
LookupMergeLocationValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t array,uint16_t index)146 static uint16_t LookupMergeLocationValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
147 uint16_t array, uint16_t index) {
148 return gvn->LookupValue(kMergeBlockNonAliasingArrayMergeLocationOp, array, index, lvn_id);
149 }
150 };
151
152 class LocalValueNumbering::AliasingArrayVersions {
153 public:
StartMemoryVersion(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t type)154 static uint16_t StartMemoryVersion(GlobalValueNumbering* gvn, const LocalValueNumbering* lvn,
155 uint16_t type) {
156 return gvn->LookupValue(kAliasingArrayStartVersionOp, type, lvn->global_memory_version_,
157 kNoValue);
158 }
159
BumpMemoryVersion(GlobalValueNumbering * gvn,uint16_t old_version,uint16_t store_ref_set_id,uint16_t stored_value)160 static uint16_t BumpMemoryVersion(GlobalValueNumbering* gvn, uint16_t old_version,
161 uint16_t store_ref_set_id, uint16_t stored_value) {
162 return gvn->LookupValue(kAliasingArrayBumpVersionOp, old_version,
163 store_ref_set_id, stored_value);
164 }
165
LookupGlobalValue(GlobalValueNumbering * gvn,uint16_t type,uint16_t location,uint16_t memory_version)166 static uint16_t LookupGlobalValue(GlobalValueNumbering* gvn,
167 uint16_t type, uint16_t location, uint16_t memory_version) {
168 return gvn->LookupValue(kAliasingArrayOp, type, location, memory_version);
169 }
170
LookupMergeValue(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,uint16_t type,uint16_t location)171 static uint16_t LookupMergeValue(GlobalValueNumbering* gvn,
172 const LocalValueNumbering* lvn,
173 uint16_t type, uint16_t location) {
174 // If the location is non-aliasing in lvn, use the non-aliasing value.
175 uint16_t array = gvn->GetArrayLocationBase(location);
176 if (lvn->IsNonAliasingArray(array, type)) {
177 uint16_t index = gvn->GetArrayLocationIndex(location);
178 return NonAliasingArrayVersions::LookupMergeValue(gvn, lvn, array, index);
179 }
180 return AliasingValuesMergeGet<AliasingArrayVersions>(
181 gvn, lvn, &lvn->aliasing_array_value_map_, type, location);
182 }
183
HasNewBaseVersion(GlobalValueNumbering * gvn ATTRIBUTE_UNUSED,const LocalValueNumbering * lvn,uint16_t type ATTRIBUTE_UNUSED)184 static bool HasNewBaseVersion(GlobalValueNumbering* gvn ATTRIBUTE_UNUSED,
185 const LocalValueNumbering* lvn,
186 uint16_t type ATTRIBUTE_UNUSED) {
187 return lvn->global_memory_version_ == lvn->merge_new_memory_version_;
188 }
189
LookupMergeBlockValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t type)190 static uint16_t LookupMergeBlockValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
191 uint16_t type) {
192 return gvn->LookupValue(kMergeBlockAliasingArrayVersionBumpOp, type, kNoValue, lvn_id);
193 }
194
LookupMergeLocationValue(GlobalValueNumbering * gvn,uint16_t lvn_id,uint16_t type,uint16_t location)195 static uint16_t LookupMergeLocationValue(GlobalValueNumbering* gvn, uint16_t lvn_id,
196 uint16_t type, uint16_t location) {
197 return gvn->LookupValue(kMergeBlockAliasingArrayMergeLocationOp, type, location, lvn_id);
198 }
199 };
200
201 template <typename Map>
GetAliasingValues(Map * map,const typename Map::key_type & key)202 LocalValueNumbering::AliasingValues* LocalValueNumbering::GetAliasingValues(
203 Map* map, const typename Map::key_type& key) {
204 auto lb = map->lower_bound(key);
205 if (lb == map->end() || map->key_comp()(key, lb->first)) {
206 lb = map->PutBefore(lb, key, AliasingValues(this));
207 }
208 return &lb->second;
209 }
210
211 template <typename Versions, typename KeyType>
UpdateAliasingValuesLoadVersion(const KeyType & key,AliasingValues * values)212 void LocalValueNumbering::UpdateAliasingValuesLoadVersion(const KeyType& key,
213 AliasingValues* values) {
214 if (values->last_load_memory_version == kNoValue) {
215 // Get the start version that accounts for aliasing with unresolved fields of the same
216 // type and make it unique for the field by including the field_id.
217 uint16_t memory_version = values->memory_version_before_stores;
218 if (memory_version == kNoValue) {
219 memory_version = Versions::StartMemoryVersion(gvn_, this, key);
220 }
221 if (!values->store_loc_set.empty()) {
222 uint16_t ref_set_id = gvn_->GetRefSetId(values->store_loc_set);
223 memory_version = Versions::BumpMemoryVersion(gvn_, memory_version, ref_set_id,
224 values->last_stored_value);
225 }
226 values->last_load_memory_version = memory_version;
227 }
228 }
229
230 template <typename Versions, typename Map>
AliasingValuesMergeGet(GlobalValueNumbering * gvn,const LocalValueNumbering * lvn,Map * map,const typename Map::key_type & key,uint16_t location)231 uint16_t LocalValueNumbering::AliasingValuesMergeGet(GlobalValueNumbering* gvn,
232 const LocalValueNumbering* lvn,
233 Map* map, const typename Map::key_type& key,
234 uint16_t location) {
235 // Retrieve the value name that we would get from
236 // const_cast<LocalValueNumbering*>(lvn)->HandleAliasingValueGet(map. key, location)
237 // but don't modify the map.
238 uint16_t value_name;
239 auto it = map->find(key);
240 if (it == map->end()) {
241 uint16_t start_version = Versions::StartMemoryVersion(gvn, lvn, key);
242 value_name = Versions::LookupGlobalValue(gvn, key, location, start_version);
243 } else if (it->second.store_loc_set.count(location) != 0u) {
244 value_name = it->second.last_stored_value;
245 } else {
246 auto load_it = it->second.load_value_map.find(location);
247 if (load_it != it->second.load_value_map.end()) {
248 value_name = load_it->second;
249 } else {
250 value_name = Versions::LookupGlobalValue(gvn, key, location, it->second.last_load_memory_version);
251 }
252 }
253 return value_name;
254 }
255
256 template <typename Versions, typename Map>
HandleAliasingValuesGet(Map * map,const typename Map::key_type & key,uint16_t location)257 uint16_t LocalValueNumbering::HandleAliasingValuesGet(Map* map, const typename Map::key_type& key,
258 uint16_t location) {
259 // Retrieve the value name for IGET/SGET/AGET, update the map with new value if any.
260 uint16_t res;
261 AliasingValues* values = GetAliasingValues(map, key);
262 if (values->store_loc_set.count(location) != 0u) {
263 res = values->last_stored_value;
264 } else {
265 UpdateAliasingValuesLoadVersion<Versions>(key, values);
266 auto lb = values->load_value_map.lower_bound(location);
267 if (lb != values->load_value_map.end() && lb->first == location) {
268 res = lb->second;
269 } else {
270 res = Versions::LookupGlobalValue(gvn_, key, location, values->last_load_memory_version);
271 values->load_value_map.PutBefore(lb, location, res);
272 }
273 }
274 return res;
275 }
276
277 template <typename Versions, typename Map>
HandleAliasingValuesPut(Map * map,const typename Map::key_type & key,uint16_t location,uint16_t value)278 bool LocalValueNumbering::HandleAliasingValuesPut(Map* map, const typename Map::key_type& key,
279 uint16_t location, uint16_t value) {
280 AliasingValues* values = GetAliasingValues(map, key);
281 auto load_values_it = values->load_value_map.find(location);
282 if (load_values_it != values->load_value_map.end() && load_values_it->second == value) {
283 // This insn can be eliminated, it stores the same value that's already in the field.
284 return false;
285 }
286 if (value == values->last_stored_value) {
287 auto store_loc_lb = values->store_loc_set.lower_bound(location);
288 if (store_loc_lb != values->store_loc_set.end() && *store_loc_lb == location) {
289 // This insn can be eliminated, it stores the same value that's already in the field.
290 return false;
291 }
292 values->store_loc_set.emplace_hint(store_loc_lb, location);
293 } else {
294 UpdateAliasingValuesLoadVersion<Versions>(key, values);
295 values->memory_version_before_stores = values->last_load_memory_version;
296 values->last_stored_value = value;
297 values->store_loc_set.clear();
298 values->store_loc_set.insert(location);
299 }
300 // Clear the last load memory version and remove all potentially overwritten values.
301 values->last_load_memory_version = kNoValue;
302 auto it = values->load_value_map.begin(), end = values->load_value_map.end();
303 while (it != end) {
304 if (it->second == value) {
305 ++it;
306 } else {
307 it = values->load_value_map.erase(it);
308 }
309 }
310 return true;
311 }
312
313 template <typename K>
CopyAliasingValuesMap(ScopedArenaSafeMap<K,AliasingValues> * dest,const ScopedArenaSafeMap<K,AliasingValues> & src)314 void LocalValueNumbering::CopyAliasingValuesMap(ScopedArenaSafeMap<K, AliasingValues>* dest,
315 const ScopedArenaSafeMap<K, AliasingValues>& src) {
316 // We need each new AliasingValues (or rather its map members) to be constructed
317 // with our allocator, rather than the allocator of the source.
318 for (const auto& entry : src) {
319 auto it = dest->PutBefore(dest->end(), entry.first, AliasingValues(this));
320 it->second = entry.second; // Map assignments preserve current allocator.
321 }
322 }
323
LocalValueNumbering(GlobalValueNumbering * gvn,uint16_t id,ScopedArenaAllocator * allocator)324 LocalValueNumbering::LocalValueNumbering(GlobalValueNumbering* gvn, uint16_t id,
325 ScopedArenaAllocator* allocator)
326 : gvn_(gvn),
327 id_(id),
328 sreg_value_map_(std::less<uint16_t>(), allocator->Adapter()),
329 sreg_wide_value_map_(std::less<uint16_t>(), allocator->Adapter()),
330 sfield_value_map_(std::less<uint16_t>(), allocator->Adapter()),
331 non_aliasing_ifield_value_map_(std::less<uint16_t>(), allocator->Adapter()),
332 aliasing_ifield_value_map_(std::less<uint16_t>(), allocator->Adapter()),
333 non_aliasing_array_value_map_(std::less<uint16_t>(), allocator->Adapter()),
334 aliasing_array_value_map_(std::less<uint16_t>(), allocator->Adapter()),
335 global_memory_version_(0u),
336 non_aliasing_refs_(std::less<uint16_t>(), allocator->Adapter()),
337 escaped_refs_(std::less<uint16_t>(), allocator->Adapter()),
338 escaped_ifield_clobber_set_(EscapedIFieldClobberKeyComparator(), allocator->Adapter()),
339 escaped_array_clobber_set_(EscapedArrayClobberKeyComparator(), allocator->Adapter()),
340 range_checked_(RangeCheckKeyComparator() , allocator->Adapter()),
341 null_checked_(std::less<uint16_t>(), allocator->Adapter()),
342 div_zero_checked_(std::less<uint16_t>(), allocator->Adapter()),
343 merge_names_(allocator->Adapter()),
344 merge_map_(std::less<ScopedArenaVector<BasicBlockId>>(), allocator->Adapter()),
345 merge_new_memory_version_(kNoValue) {
346 std::fill_n(unresolved_sfield_version_, arraysize(unresolved_sfield_version_), 0u);
347 std::fill_n(unresolved_ifield_version_, arraysize(unresolved_ifield_version_), 0u);
348 }
349
Equals(const LocalValueNumbering & other) const350 bool LocalValueNumbering::Equals(const LocalValueNumbering& other) const {
351 DCHECK(gvn_ == other.gvn_);
352 // Compare the maps/sets and memory versions.
353 return sreg_value_map_ == other.sreg_value_map_ &&
354 sreg_wide_value_map_ == other.sreg_wide_value_map_ &&
355 sfield_value_map_ == other.sfield_value_map_ &&
356 non_aliasing_ifield_value_map_ == other.non_aliasing_ifield_value_map_ &&
357 aliasing_ifield_value_map_ == other.aliasing_ifield_value_map_ &&
358 non_aliasing_array_value_map_ == other.non_aliasing_array_value_map_ &&
359 aliasing_array_value_map_ == other.aliasing_array_value_map_ &&
360 SameMemoryVersion(other) &&
361 non_aliasing_refs_ == other.non_aliasing_refs_ &&
362 escaped_refs_ == other.escaped_refs_ &&
363 escaped_ifield_clobber_set_ == other.escaped_ifield_clobber_set_ &&
364 escaped_array_clobber_set_ == other.escaped_array_clobber_set_ &&
365 range_checked_ == other.range_checked_ &&
366 null_checked_ == other.null_checked_ &&
367 div_zero_checked_ == other.div_zero_checked_;
368 }
369
MergeOne(const LocalValueNumbering & other,MergeType merge_type)370 void LocalValueNumbering::MergeOne(const LocalValueNumbering& other, MergeType merge_type) {
371 CopyLiveSregValues(&sreg_value_map_, other.sreg_value_map_);
372 CopyLiveSregValues(&sreg_wide_value_map_, other.sreg_wide_value_map_);
373
374 if (merge_type == kReturnMerge) {
375 // RETURN or PHI+RETURN. We need only sreg value maps.
376 return;
377 }
378
379 non_aliasing_ifield_value_map_ = other.non_aliasing_ifield_value_map_;
380 CopyAliasingValuesMap(&non_aliasing_array_value_map_, other.non_aliasing_array_value_map_);
381 non_aliasing_refs_ = other.non_aliasing_refs_;
382 range_checked_ = other.range_checked_;
383 null_checked_ = other.null_checked_;
384 div_zero_checked_ = other.div_zero_checked_;
385
386 const BasicBlock* pred_bb = gvn_->GetBasicBlock(other.Id());
387 if (GlobalValueNumbering::HasNullCheckLastInsn(pred_bb, Id())) {
388 int s_reg = pred_bb->last_mir_insn->ssa_rep->uses[0];
389 null_checked_.insert(other.GetOperandValue(s_reg));
390 }
391
392 if (merge_type == kCatchMerge) {
393 // Memory is clobbered. Use new memory version and don't merge aliasing locations.
394 global_memory_version_ = NewMemoryVersion(&merge_new_memory_version_);
395 std::fill_n(unresolved_sfield_version_, arraysize(unresolved_sfield_version_),
396 global_memory_version_);
397 std::fill_n(unresolved_ifield_version_, arraysize(unresolved_ifield_version_),
398 global_memory_version_);
399 PruneNonAliasingRefsForCatch();
400 return;
401 }
402
403 DCHECK(merge_type == kNormalMerge);
404 global_memory_version_ = other.global_memory_version_;
405 std::copy_n(other.unresolved_ifield_version_, arraysize(unresolved_sfield_version_),
406 unresolved_ifield_version_);
407 std::copy_n(other.unresolved_sfield_version_, arraysize(unresolved_ifield_version_),
408 unresolved_sfield_version_);
409 sfield_value_map_ = other.sfield_value_map_;
410 CopyAliasingValuesMap(&aliasing_ifield_value_map_, other.aliasing_ifield_value_map_);
411 CopyAliasingValuesMap(&aliasing_array_value_map_, other.aliasing_array_value_map_);
412 escaped_refs_ = other.escaped_refs_;
413 escaped_ifield_clobber_set_ = other.escaped_ifield_clobber_set_;
414 escaped_array_clobber_set_ = other.escaped_array_clobber_set_;
415 }
416
SameMemoryVersion(const LocalValueNumbering & other) const417 bool LocalValueNumbering::SameMemoryVersion(const LocalValueNumbering& other) const {
418 return
419 global_memory_version_ == other.global_memory_version_ &&
420 std::equal(unresolved_ifield_version_,
421 unresolved_ifield_version_ + arraysize(unresolved_ifield_version_),
422 other.unresolved_ifield_version_) &&
423 std::equal(unresolved_sfield_version_,
424 unresolved_sfield_version_ + arraysize(unresolved_sfield_version_),
425 other.unresolved_sfield_version_);
426 }
427
NewMemoryVersion(uint16_t * new_version)428 uint16_t LocalValueNumbering::NewMemoryVersion(uint16_t* new_version) {
429 if (*new_version == kNoValue) {
430 *new_version = gvn_->LookupValue(kMergeBlockMemoryVersionBumpOp, 0u, 0u, id_);
431 }
432 return *new_version;
433 }
434
MergeMemoryVersions(bool clobbered_catch)435 void LocalValueNumbering::MergeMemoryVersions(bool clobbered_catch) {
436 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
437 const LocalValueNumbering* cmp = gvn_->merge_lvns_[0];
438 // Check if the global version has changed.
439 bool new_global_version = clobbered_catch;
440 if (!new_global_version) {
441 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
442 if (lvn->global_memory_version_ != cmp->global_memory_version_) {
443 // Use a new version for everything.
444 new_global_version = true;
445 break;
446 }
447 }
448 }
449 if (new_global_version) {
450 global_memory_version_ = NewMemoryVersion(&merge_new_memory_version_);
451 std::fill_n(unresolved_sfield_version_, arraysize(unresolved_sfield_version_),
452 merge_new_memory_version_);
453 std::fill_n(unresolved_ifield_version_, arraysize(unresolved_ifield_version_),
454 merge_new_memory_version_);
455 } else {
456 // Initialize with a copy of memory versions from the comparison LVN.
457 global_memory_version_ = cmp->global_memory_version_;
458 std::copy_n(cmp->unresolved_ifield_version_, arraysize(unresolved_sfield_version_),
459 unresolved_ifield_version_);
460 std::copy_n(cmp->unresolved_sfield_version_, arraysize(unresolved_ifield_version_),
461 unresolved_sfield_version_);
462 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
463 if (lvn == cmp) {
464 continue;
465 }
466 for (size_t i = 0; i != kDexMemAccessTypeCount; ++i) {
467 if (lvn->unresolved_ifield_version_[i] != cmp->unresolved_ifield_version_[i]) {
468 unresolved_ifield_version_[i] = NewMemoryVersion(&merge_new_memory_version_);
469 }
470 if (lvn->unresolved_sfield_version_[i] != cmp->unresolved_sfield_version_[i]) {
471 unresolved_sfield_version_[i] = NewMemoryVersion(&merge_new_memory_version_);
472 }
473 }
474 }
475 }
476 }
477
PruneNonAliasingRefsForCatch()478 void LocalValueNumbering::PruneNonAliasingRefsForCatch() {
479 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
480 const BasicBlock* bb = gvn_->GetBasicBlock(lvn->Id());
481 if (UNLIKELY(bb->taken == id_) || UNLIKELY(bb->fall_through == id_)) {
482 // Non-exceptional path to a catch handler means that the catch block was actually
483 // empty and all exceptional paths lead to the shared path after that empty block.
484 continue;
485 }
486 DCHECK_EQ(bb->taken, kNullBlock);
487 DCHECK_NE(bb->fall_through, kNullBlock);
488 const BasicBlock* fall_through_bb = gvn_->GetBasicBlock(bb->fall_through);
489 const MIR* mir = fall_through_bb->first_mir_insn;
490 DCHECK(mir != nullptr);
491 // Only INVOKEs can leak and clobber non-aliasing references if they throw.
492 if ((mir->dalvikInsn.FlagsOf() & Instruction::kInvoke) != 0) {
493 HandleInvokeArgs(mir, lvn);
494 }
495 }
496 }
497
498
499 template <typename Set, Set LocalValueNumbering::* set_ptr>
IntersectSets()500 void LocalValueNumbering::IntersectSets() {
501 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
502
503 // Find the LVN with the least entries in the set.
504 const LocalValueNumbering* least_entries_lvn = gvn_->merge_lvns_[0];
505 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
506 if ((lvn->*set_ptr).size() < (least_entries_lvn->*set_ptr).size()) {
507 least_entries_lvn = lvn;
508 }
509 }
510
511 // For each key check if it's in all the LVNs.
512 for (const auto& key : least_entries_lvn->*set_ptr) {
513 bool checked = true;
514 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
515 if (lvn != least_entries_lvn && (lvn->*set_ptr).count(key) == 0u) {
516 checked = false;
517 break;
518 }
519 }
520 if (checked) {
521 (this->*set_ptr).emplace_hint((this->*set_ptr).end(), key);
522 }
523 }
524 }
525
CopyLiveSregValues(SregValueMap * dest,const SregValueMap & src)526 void LocalValueNumbering::CopyLiveSregValues(SregValueMap* dest, const SregValueMap& src) {
527 auto dest_end = dest->end();
528 ArenaBitVector* live_in_v = gvn_->GetMirGraph()->GetBasicBlock(id_)->data_flow_info->live_in_v;
529 DCHECK(live_in_v != nullptr);
530 for (const auto& entry : src) {
531 bool live = live_in_v->IsBitSet(gvn_->GetMirGraph()->SRegToVReg(entry.first));
532 if (live) {
533 dest->PutBefore(dest_end, entry.first, entry.second);
534 }
535 }
536 }
537
538 template <LocalValueNumbering::SregValueMap LocalValueNumbering::* map_ptr>
IntersectSregValueMaps()539 void LocalValueNumbering::IntersectSregValueMaps() {
540 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
541
542 // Find the LVN with the least entries in the set.
543 const LocalValueNumbering* least_entries_lvn = gvn_->merge_lvns_[0];
544 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
545 if ((lvn->*map_ptr).size() < (least_entries_lvn->*map_ptr).size()) {
546 least_entries_lvn = lvn;
547 }
548 }
549
550 // For each key check if it's in all the LVNs.
551 ArenaBitVector* live_in_v = gvn_->GetMirGraph()->GetBasicBlock(id_)->data_flow_info->live_in_v;
552 DCHECK(live_in_v != nullptr);
553 for (const auto& entry : least_entries_lvn->*map_ptr) {
554 bool live_and_same = live_in_v->IsBitSet(gvn_->GetMirGraph()->SRegToVReg(entry.first));
555 if (live_and_same) {
556 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
557 if (lvn != least_entries_lvn) {
558 auto it = (lvn->*map_ptr).find(entry.first);
559 if (it == (lvn->*map_ptr).end() || !(it->second == entry.second)) {
560 live_and_same = false;
561 break;
562 }
563 }
564 }
565 }
566 if (live_and_same) {
567 (this->*map_ptr).PutBefore((this->*map_ptr).end(), entry.first, entry.second);
568 }
569 }
570 }
571
572 // Intersect maps as sets. The value type must be equality-comparable.
573 template <typename Map>
InPlaceIntersectMaps(Map * work_map,const Map & other_map)574 void LocalValueNumbering::InPlaceIntersectMaps(Map* work_map, const Map& other_map) {
575 auto work_it = work_map->begin(), work_end = work_map->end();
576 auto cmp = work_map->value_comp();
577 for (const auto& entry : other_map) {
578 while (work_it != work_end &&
579 (cmp(*work_it, entry) ||
580 (!cmp(entry, *work_it) && !(work_it->second == entry.second)))) {
581 work_it = work_map->erase(work_it);
582 }
583 if (work_it == work_end) {
584 return;
585 }
586 ++work_it;
587 }
588 }
589
590 template <typename Set, Set LocalValueNumbering::*set_ptr, void (LocalValueNumbering::*MergeFn)(
591 const typename Set::value_type& entry, typename Set::iterator hint)>
MergeSets()592 void LocalValueNumbering::MergeSets() {
593 auto cmp = (this->*set_ptr).value_comp();
594 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
595 auto my_it = (this->*set_ptr).begin(), my_end = (this->*set_ptr).end();
596 for (const auto& entry : lvn->*set_ptr) {
597 while (my_it != my_end && cmp(*my_it, entry)) {
598 ++my_it;
599 }
600 if (my_it != my_end && !cmp(entry, *my_it)) {
601 // Already handled.
602 ++my_it;
603 } else {
604 // Merge values for this field_id.
605 (this->*MergeFn)(entry, my_it); // my_it remains valid across inserts to std::set/SafeMap.
606 }
607 }
608 }
609 }
610
IntersectAliasingValueLocations(AliasingValues * work_values,const AliasingValues * values)611 void LocalValueNumbering::IntersectAliasingValueLocations(AliasingValues* work_values,
612 const AliasingValues* values) {
613 auto cmp = work_values->load_value_map.key_comp();
614 auto work_it = work_values->load_value_map.begin(), work_end = work_values->load_value_map.end();
615 auto store_it = values->store_loc_set.begin(), store_end = values->store_loc_set.end();
616 auto load_it = values->load_value_map.begin(), load_end = values->load_value_map.end();
617 while (store_it != store_end || load_it != load_end) {
618 uint16_t loc;
619 if (store_it != store_end && (load_it == load_end || *store_it < load_it->first)) {
620 loc = *store_it;
621 ++store_it;
622 } else {
623 loc = load_it->first;
624 ++load_it;
625 DCHECK(store_it == store_end || cmp(loc, *store_it));
626 }
627 while (work_it != work_end && cmp(work_it->first, loc)) {
628 work_it = work_values->load_value_map.erase(work_it);
629 }
630 if (work_it != work_end && !cmp(loc, work_it->first)) {
631 // The location matches, keep it.
632 ++work_it;
633 }
634 }
635 while (work_it != work_end) {
636 work_it = work_values->load_value_map.erase(work_it);
637 }
638 }
639
MergeEscapedRefs(const ValueNameSet::value_type & entry,ValueNameSet::iterator hint)640 void LocalValueNumbering::MergeEscapedRefs(const ValueNameSet::value_type& entry,
641 ValueNameSet::iterator hint) {
642 // See if the ref is either escaped or non-aliasing in each predecessor.
643 bool is_escaped = true;
644 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
645 if (lvn->non_aliasing_refs_.count(entry) == 0u &&
646 lvn->escaped_refs_.count(entry) == 0u) {
647 is_escaped = false;
648 break;
649 }
650 }
651 if (is_escaped) {
652 escaped_refs_.emplace_hint(hint, entry);
653 }
654 }
655
MergeEscapedIFieldTypeClobberSets(const EscapedIFieldClobberSet::value_type & entry,EscapedIFieldClobberSet::iterator hint)656 void LocalValueNumbering::MergeEscapedIFieldTypeClobberSets(
657 const EscapedIFieldClobberSet::value_type& entry, EscapedIFieldClobberSet::iterator hint) {
658 // Insert only type-clobber entries (field_id == kNoValue) of escaped refs.
659 if (entry.field_id == kNoValue && escaped_refs_.count(entry.base) != 0u) {
660 escaped_ifield_clobber_set_.emplace_hint(hint, entry);
661 }
662 }
663
MergeEscapedIFieldClobberSets(const EscapedIFieldClobberSet::value_type & entry,EscapedIFieldClobberSet::iterator hint)664 void LocalValueNumbering::MergeEscapedIFieldClobberSets(
665 const EscapedIFieldClobberSet::value_type& entry, EscapedIFieldClobberSet::iterator hint) {
666 // Insert only those entries of escaped refs that are not overridden by a type clobber.
667 if (!(hint == escaped_ifield_clobber_set_.end() &&
668 hint->base == entry.base && hint->type == entry.type) &&
669 escaped_refs_.count(entry.base) != 0u) {
670 escaped_ifield_clobber_set_.emplace_hint(hint, entry);
671 }
672 }
673
MergeEscapedArrayClobberSets(const EscapedArrayClobberSet::value_type & entry,EscapedArrayClobberSet::iterator hint)674 void LocalValueNumbering::MergeEscapedArrayClobberSets(
675 const EscapedArrayClobberSet::value_type& entry, EscapedArrayClobberSet::iterator hint) {
676 if (escaped_refs_.count(entry.base) != 0u) {
677 escaped_array_clobber_set_.emplace_hint(hint, entry);
678 }
679 }
680
MergeNullChecked()681 void LocalValueNumbering::MergeNullChecked() {
682 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
683
684 // Find the LVN with the least entries in the set.
685 const LocalValueNumbering* least_entries_lvn = gvn_->merge_lvns_[0];
686 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
687 if (lvn->null_checked_.size() < least_entries_lvn->null_checked_.size()) {
688 least_entries_lvn = lvn;
689 }
690 }
691
692 // For each null-checked value name check if it's null-checked in all the LVNs.
693 for (const auto& value_name : least_entries_lvn->null_checked_) {
694 // Merge null_checked_ for this ref.
695 merge_names_.clear();
696 merge_names_.resize(gvn_->merge_lvns_.size(), value_name);
697 if (gvn_->NullCheckedInAllPredecessors(merge_names_)) {
698 null_checked_.insert(null_checked_.end(), value_name);
699 }
700 }
701
702 // Now check if the least_entries_lvn has a null-check as the last insn.
703 const BasicBlock* least_entries_bb = gvn_->GetBasicBlock(least_entries_lvn->Id());
704 if (gvn_->HasNullCheckLastInsn(least_entries_bb, id_)) {
705 int s_reg = least_entries_bb->last_mir_insn->ssa_rep->uses[0];
706 uint32_t value_name = least_entries_lvn->GetOperandValue(s_reg);
707 merge_names_.clear();
708 merge_names_.resize(gvn_->merge_lvns_.size(), value_name);
709 if (gvn_->NullCheckedInAllPredecessors(merge_names_)) {
710 null_checked_.insert(value_name);
711 }
712 }
713 }
714
MergeDivZeroChecked()715 void LocalValueNumbering::MergeDivZeroChecked() {
716 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
717
718 // Find the LVN with the least entries in the set.
719 const LocalValueNumbering* least_entries_lvn = gvn_->merge_lvns_[0];
720 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
721 if (lvn->div_zero_checked_.size() < least_entries_lvn->div_zero_checked_.size()) {
722 least_entries_lvn = lvn;
723 }
724 }
725
726 // For each div-zero value name check if it's div-zero checked in all the LVNs.
727 for (const auto& value_name : least_entries_lvn->div_zero_checked_) {
728 // Merge null_checked_ for this ref.
729 merge_names_.clear();
730 merge_names_.resize(gvn_->merge_lvns_.size(), value_name);
731 if (gvn_->DivZeroCheckedInAllPredecessors(merge_names_)) {
732 div_zero_checked_.insert(div_zero_checked_.end(), value_name);
733 }
734 }
735 }
736
MergeSFieldValues(const SFieldToValueMap::value_type & entry,SFieldToValueMap::iterator hint)737 void LocalValueNumbering::MergeSFieldValues(const SFieldToValueMap::value_type& entry,
738 SFieldToValueMap::iterator hint) {
739 uint16_t field_id = entry.first;
740 merge_names_.clear();
741 uint16_t value_name = kNoValue;
742 bool same_values = true;
743 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
744 // Get the value name as in HandleSGet() but don't modify *lvn.
745 auto it = lvn->sfield_value_map_.find(field_id);
746 if (it != lvn->sfield_value_map_.end()) {
747 value_name = it->second;
748 } else {
749 uint16_t type = gvn_->GetSFieldType(field_id);
750 value_name = gvn_->LookupValue(kResolvedSFieldOp, field_id,
751 lvn->unresolved_sfield_version_[type],
752 lvn->global_memory_version_);
753 }
754
755 same_values = same_values && (merge_names_.empty() || value_name == merge_names_.back());
756 merge_names_.push_back(value_name);
757 }
758 if (same_values) {
759 // value_name already contains the result.
760 } else {
761 auto lb = merge_map_.lower_bound(merge_names_);
762 if (lb != merge_map_.end() && !merge_map_.key_comp()(merge_names_, lb->first)) {
763 value_name = lb->second;
764 } else {
765 value_name = gvn_->LookupValue(kMergeBlockSFieldVersionBumpOp, field_id, id_, kNoValue);
766 merge_map_.PutBefore(lb, merge_names_, value_name);
767 if (gvn_->NullCheckedInAllPredecessors(merge_names_)) {
768 null_checked_.insert(value_name);
769 }
770 }
771 }
772 sfield_value_map_.PutBefore(hint, field_id, value_name);
773 }
774
MergeNonAliasingIFieldValues(const IFieldLocToValueMap::value_type & entry,IFieldLocToValueMap::iterator hint)775 void LocalValueNumbering::MergeNonAliasingIFieldValues(const IFieldLocToValueMap::value_type& entry,
776 IFieldLocToValueMap::iterator hint) {
777 uint16_t field_loc = entry.first;
778 merge_names_.clear();
779 uint16_t value_name = kNoValue;
780 bool same_values = true;
781 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
782 // Get the value name as in HandleIGet() but don't modify *lvn.
783 auto it = lvn->non_aliasing_ifield_value_map_.find(field_loc);
784 if (it != lvn->non_aliasing_ifield_value_map_.end()) {
785 value_name = it->second;
786 } else {
787 value_name = gvn_->LookupValue(kNonAliasingIFieldInitialOp, field_loc, kNoValue, kNoValue);
788 }
789
790 same_values = same_values && (merge_names_.empty() || value_name == merge_names_.back());
791 merge_names_.push_back(value_name);
792 }
793 if (same_values) {
794 // value_name already contains the result.
795 } else {
796 auto lb = merge_map_.lower_bound(merge_names_);
797 if (lb != merge_map_.end() && !merge_map_.key_comp()(merge_names_, lb->first)) {
798 value_name = lb->second;
799 } else {
800 value_name = gvn_->LookupValue(kMergeBlockNonAliasingIFieldVersionBumpOp, field_loc,
801 id_, kNoValue);
802 merge_map_.PutBefore(lb, merge_names_, value_name);
803 if (gvn_->NullCheckedInAllPredecessors(merge_names_)) {
804 null_checked_.insert(value_name);
805 }
806 }
807 }
808 non_aliasing_ifield_value_map_.PutBefore(hint, field_loc, value_name);
809 }
810
811 template <typename Map, Map LocalValueNumbering::*map_ptr, typename Versions>
MergeAliasingValues(const typename Map::value_type & entry,typename Map::iterator hint)812 void LocalValueNumbering::MergeAliasingValues(const typename Map::value_type& entry,
813 typename Map::iterator hint) {
814 const typename Map::key_type& key = entry.first;
815
816 auto it = (this->*map_ptr).PutBefore(hint, key, AliasingValues(this));
817 AliasingValues* my_values = &it->second;
818
819 const AliasingValues* cmp_values = nullptr;
820 bool same_version = !Versions::HasNewBaseVersion(gvn_, this, key);
821 uint16_t load_memory_version_for_same_version = kNoValue;
822 if (same_version) {
823 // Find the first non-null values.
824 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
825 auto value = (lvn->*map_ptr).find(key);
826 if (value != (lvn->*map_ptr).end()) {
827 cmp_values = &value->second;
828 break;
829 }
830 }
831 DCHECK(cmp_values != nullptr); // There must be at least one non-null values.
832
833 // Check if we have identical memory versions, i.e. the global memory version, unresolved
834 // field version and the values' memory_version_before_stores, last_stored_value
835 // and store_loc_set are identical.
836 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
837 auto value = (lvn->*map_ptr).find(key);
838 if (value == (lvn->*map_ptr).end()) {
839 if (cmp_values->memory_version_before_stores != kNoValue) {
840 same_version = false;
841 break;
842 }
843 } else if (cmp_values->last_stored_value != value->second.last_stored_value ||
844 cmp_values->memory_version_before_stores != value->second.memory_version_before_stores ||
845 cmp_values->store_loc_set != value->second.store_loc_set) {
846 same_version = false;
847 break;
848 } else if (value->second.last_load_memory_version != kNoValue) {
849 DCHECK(load_memory_version_for_same_version == kNoValue ||
850 load_memory_version_for_same_version == value->second.last_load_memory_version);
851 load_memory_version_for_same_version = value->second.last_load_memory_version;
852 }
853 }
854 }
855
856 if (same_version) {
857 // Copy the identical values.
858 my_values->memory_version_before_stores = cmp_values->memory_version_before_stores;
859 my_values->last_stored_value = cmp_values->last_stored_value;
860 my_values->store_loc_set = cmp_values->store_loc_set;
861 my_values->last_load_memory_version = load_memory_version_for_same_version;
862 // Merge load values seen in all incoming arcs (i.e. an intersection).
863 if (!cmp_values->load_value_map.empty()) {
864 my_values->load_value_map = cmp_values->load_value_map;
865 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
866 auto value = (lvn->*map_ptr).find(key);
867 if (value == (lvn->*map_ptr).end() || value->second.load_value_map.empty()) {
868 my_values->load_value_map.clear();
869 break;
870 }
871 InPlaceIntersectMaps(&my_values->load_value_map, value->second.load_value_map);
872 if (my_values->load_value_map.empty()) {
873 break;
874 }
875 }
876 }
877 } else {
878 // Bump version number for the merge.
879 my_values->memory_version_before_stores = my_values->last_load_memory_version =
880 Versions::LookupMergeBlockValue(gvn_, id_, key);
881
882 // Calculate the locations that have been either read from or written to in each incoming LVN.
883 bool first_lvn = true;
884 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
885 auto value = (lvn->*map_ptr).find(key);
886 if (value == (lvn->*map_ptr).end()) {
887 my_values->load_value_map.clear();
888 break;
889 }
890 if (first_lvn) {
891 first_lvn = false;
892 // Copy the first LVN's locations. Values will be overwritten later.
893 my_values->load_value_map = value->second.load_value_map;
894 for (uint16_t location : value->second.store_loc_set) {
895 my_values->load_value_map.Put(location, 0u);
896 }
897 } else {
898 IntersectAliasingValueLocations(my_values, &value->second);
899 }
900 }
901 // Calculate merged values for the intersection.
902 for (auto& load_value_entry : my_values->load_value_map) {
903 uint16_t location = load_value_entry.first;
904 merge_names_.clear();
905 uint16_t value_name = kNoValue;
906 bool same_values = true;
907 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
908 value_name = Versions::LookupMergeValue(gvn_, lvn, key, location);
909 same_values = same_values && (merge_names_.empty() || value_name == merge_names_.back());
910 merge_names_.push_back(value_name);
911 }
912 if (same_values) {
913 // value_name already contains the result.
914 } else {
915 auto lb = merge_map_.lower_bound(merge_names_);
916 if (lb != merge_map_.end() && !merge_map_.key_comp()(merge_names_, lb->first)) {
917 value_name = lb->second;
918 } else {
919 // NOTE: In addition to the key and id_ which don't change on an LVN recalculation
920 // during GVN, we also add location which can actually change on recalculation, so the
921 // value_name below may change. This could lead to an infinite loop if the location
922 // value name always changed when the refereced value name changes. However, given that
923 // we assign unique value names for other merges, such as Phis, such a dependency is
924 // not possible in a well-formed SSA graph.
925 value_name = Versions::LookupMergeLocationValue(gvn_, id_, key, location);
926 merge_map_.PutBefore(lb, merge_names_, value_name);
927 if (gvn_->NullCheckedInAllPredecessors(merge_names_)) {
928 null_checked_.insert(value_name);
929 }
930 }
931 }
932 load_value_entry.second = value_name;
933 }
934 }
935 }
936
Merge(MergeType merge_type)937 void LocalValueNumbering::Merge(MergeType merge_type) {
938 DCHECK_GE(gvn_->merge_lvns_.size(), 2u);
939
940 // Always reserve space in merge_names_. Even if we don't use it in Merge() we may need it
941 // in GetStartingVregValueNumberImpl() when the merge_names_'s allocator is not the top.
942 merge_names_.reserve(gvn_->merge_lvns_.size());
943
944 IntersectSregValueMaps<&LocalValueNumbering::sreg_value_map_>();
945 IntersectSregValueMaps<&LocalValueNumbering::sreg_wide_value_map_>();
946 if (merge_type == kReturnMerge) {
947 // RETURN or PHI+RETURN. We need only sreg value maps.
948 return;
949 }
950
951 MergeMemoryVersions(merge_type == kCatchMerge);
952
953 // Merge non-aliasing maps/sets.
954 IntersectSets<ValueNameSet, &LocalValueNumbering::non_aliasing_refs_>();
955 if (!non_aliasing_refs_.empty() && merge_type == kCatchMerge) {
956 PruneNonAliasingRefsForCatch();
957 }
958 if (!non_aliasing_refs_.empty()) {
959 MergeSets<IFieldLocToValueMap, &LocalValueNumbering::non_aliasing_ifield_value_map_,
960 &LocalValueNumbering::MergeNonAliasingIFieldValues>();
961 MergeSets<NonAliasingArrayValuesMap, &LocalValueNumbering::non_aliasing_array_value_map_,
962 &LocalValueNumbering::MergeAliasingValues<
963 NonAliasingArrayValuesMap, &LocalValueNumbering::non_aliasing_array_value_map_,
964 NonAliasingArrayVersions>>();
965 }
966
967 // We won't do anything complicated for range checks, just calculate the intersection.
968 IntersectSets<RangeCheckSet, &LocalValueNumbering::range_checked_>();
969
970 // Merge null_checked_. We may later insert more, such as merged object field values.
971 MergeNullChecked();
972
973 // Now merge the div_zero_checked_.
974 MergeDivZeroChecked();
975
976 if (merge_type == kCatchMerge) {
977 // Memory is clobbered. New memory version already created, don't merge aliasing locations.
978 return;
979 }
980
981 DCHECK(merge_type == kNormalMerge);
982
983 // Merge escaped refs and clobber sets.
984 MergeSets<ValueNameSet, &LocalValueNumbering::escaped_refs_,
985 &LocalValueNumbering::MergeEscapedRefs>();
986 if (!escaped_refs_.empty()) {
987 MergeSets<EscapedIFieldClobberSet, &LocalValueNumbering::escaped_ifield_clobber_set_,
988 &LocalValueNumbering::MergeEscapedIFieldTypeClobberSets>();
989 MergeSets<EscapedIFieldClobberSet, &LocalValueNumbering::escaped_ifield_clobber_set_,
990 &LocalValueNumbering::MergeEscapedIFieldClobberSets>();
991 MergeSets<EscapedArrayClobberSet, &LocalValueNumbering::escaped_array_clobber_set_,
992 &LocalValueNumbering::MergeEscapedArrayClobberSets>();
993 }
994
995 MergeSets<SFieldToValueMap, &LocalValueNumbering::sfield_value_map_,
996 &LocalValueNumbering::MergeSFieldValues>();
997 MergeSets<AliasingIFieldValuesMap, &LocalValueNumbering::aliasing_ifield_value_map_,
998 &LocalValueNumbering::MergeAliasingValues<
999 AliasingIFieldValuesMap, &LocalValueNumbering::aliasing_ifield_value_map_,
1000 AliasingIFieldVersions>>();
1001 MergeSets<AliasingArrayValuesMap, &LocalValueNumbering::aliasing_array_value_map_,
1002 &LocalValueNumbering::MergeAliasingValues<
1003 AliasingArrayValuesMap, &LocalValueNumbering::aliasing_array_value_map_,
1004 AliasingArrayVersions>>();
1005 }
1006
PrepareEntryBlock()1007 void LocalValueNumbering::PrepareEntryBlock() {
1008 uint32_t vreg = gvn_->GetMirGraph()->GetFirstInVR();
1009 CompilationUnit* cu = gvn_->GetCompilationUnit();
1010 const char* shorty = cu->shorty;
1011 ++shorty; // Skip return value.
1012 if ((cu->access_flags & kAccStatic) == 0) {
1013 // If non-static method, mark "this" as non-null
1014 uint16_t value_name = GetOperandValue(vreg);
1015 ++vreg;
1016 null_checked_.insert(value_name);
1017 }
1018 for ( ; *shorty != 0; ++shorty, ++vreg) {
1019 if (*shorty == 'J' || *shorty == 'D') {
1020 uint16_t value_name = GetOperandValueWide(vreg);
1021 SetOperandValueWide(vreg, value_name);
1022 ++vreg;
1023 }
1024 }
1025 }
1026
MarkNonAliasingNonNull(MIR * mir)1027 uint16_t LocalValueNumbering::MarkNonAliasingNonNull(MIR* mir) {
1028 uint16_t res = GetOperandValue(mir->ssa_rep->defs[0]);
1029 DCHECK(null_checked_.find(res) == null_checked_.end());
1030 null_checked_.insert(res);
1031 non_aliasing_refs_.insert(res);
1032 return res;
1033 }
1034
IsNonAliasing(uint16_t reg) const1035 bool LocalValueNumbering::IsNonAliasing(uint16_t reg) const {
1036 return non_aliasing_refs_.find(reg) != non_aliasing_refs_.end();
1037 }
1038
IsNonAliasingIField(uint16_t reg,uint16_t field_id,uint16_t type) const1039 bool LocalValueNumbering::IsNonAliasingIField(uint16_t reg, uint16_t field_id,
1040 uint16_t type) const {
1041 if (IsNonAliasing(reg)) {
1042 return true;
1043 }
1044 if (escaped_refs_.find(reg) == escaped_refs_.end()) {
1045 return false;
1046 }
1047 // Check for IPUTs to unresolved fields.
1048 EscapedIFieldClobberKey key1 = { reg, type, kNoValue };
1049 if (escaped_ifield_clobber_set_.find(key1) != escaped_ifield_clobber_set_.end()) {
1050 return false;
1051 }
1052 // Check for aliased IPUTs to the same field.
1053 EscapedIFieldClobberKey key2 = { reg, type, field_id };
1054 return escaped_ifield_clobber_set_.find(key2) == escaped_ifield_clobber_set_.end();
1055 }
1056
IsNonAliasingArray(uint16_t reg,uint16_t type) const1057 bool LocalValueNumbering::IsNonAliasingArray(uint16_t reg, uint16_t type) const {
1058 if (IsNonAliasing(reg)) {
1059 return true;
1060 }
1061 if (escaped_refs_.count(reg) == 0u) {
1062 return false;
1063 }
1064 // Check for aliased APUTs.
1065 EscapedArrayClobberKey key = { reg, type };
1066 return escaped_array_clobber_set_.find(key) == escaped_array_clobber_set_.end();
1067 }
1068
HandleNullCheck(MIR * mir,uint16_t reg)1069 void LocalValueNumbering::HandleNullCheck(MIR* mir, uint16_t reg) {
1070 auto lb = null_checked_.lower_bound(reg);
1071 if (lb != null_checked_.end() && *lb == reg) {
1072 if (LIKELY(gvn_->CanModify())) {
1073 if (gvn_->GetCompilationUnit()->verbose) {
1074 LOG(INFO) << "Removing null check for 0x" << std::hex << mir->offset;
1075 }
1076 mir->optimization_flags |= MIR_IGNORE_NULL_CHECK;
1077 }
1078 } else {
1079 null_checked_.insert(lb, reg);
1080 }
1081 }
1082
HandleRangeCheck(MIR * mir,uint16_t array,uint16_t index)1083 void LocalValueNumbering::HandleRangeCheck(MIR* mir, uint16_t array, uint16_t index) {
1084 RangeCheckKey key = { array, index };
1085 auto lb = range_checked_.lower_bound(key);
1086 if (lb != range_checked_.end() && !RangeCheckKeyComparator()(key, *lb)) {
1087 if (LIKELY(gvn_->CanModify())) {
1088 if (gvn_->GetCompilationUnit()->verbose) {
1089 LOG(INFO) << "Removing range check for 0x" << std::hex << mir->offset;
1090 }
1091 mir->optimization_flags |= MIR_IGNORE_RANGE_CHECK;
1092 }
1093 } else {
1094 // Mark range check completed.
1095 range_checked_.insert(lb, key);
1096 }
1097 }
1098
HandleDivZeroCheck(MIR * mir,uint16_t reg)1099 void LocalValueNumbering::HandleDivZeroCheck(MIR* mir, uint16_t reg) {
1100 auto lb = div_zero_checked_.lower_bound(reg);
1101 if (lb != div_zero_checked_.end() && *lb == reg) {
1102 if (LIKELY(gvn_->CanModify())) {
1103 if (gvn_->GetCompilationUnit()->verbose) {
1104 LOG(INFO) << "Removing div zero check for 0x" << std::hex << mir->offset;
1105 }
1106 mir->optimization_flags |= MIR_IGNORE_DIV_ZERO_CHECK;
1107 }
1108 } else {
1109 div_zero_checked_.insert(lb, reg);
1110 }
1111 }
1112
HandlePutObject(MIR * mir)1113 void LocalValueNumbering::HandlePutObject(MIR* mir) {
1114 // If we're storing a non-aliasing reference, stop tracking it as non-aliasing now.
1115 uint16_t base = GetOperandValue(mir->ssa_rep->uses[0]);
1116 HandleEscapingRef(base);
1117 if (gvn_->CanModify() && null_checked_.count(base) != 0u) {
1118 if (gvn_->GetCompilationUnit()->verbose) {
1119 LOG(INFO) << "Removing GC card mark value null check for 0x" << std::hex << mir->offset;
1120 }
1121 mir->optimization_flags |= MIR_STORE_NON_NULL_VALUE;
1122 }
1123 }
1124
HandleEscapingRef(uint16_t base)1125 void LocalValueNumbering::HandleEscapingRef(uint16_t base) {
1126 auto it = non_aliasing_refs_.find(base);
1127 if (it != non_aliasing_refs_.end()) {
1128 non_aliasing_refs_.erase(it);
1129 escaped_refs_.insert(base);
1130 }
1131 }
1132
HandleInvokeArgs(const MIR * mir,const LocalValueNumbering * mir_lvn)1133 void LocalValueNumbering::HandleInvokeArgs(const MIR* mir, const LocalValueNumbering* mir_lvn) {
1134 const int32_t* uses = mir->ssa_rep->uses;
1135 const int32_t* uses_end = uses + mir->ssa_rep->num_uses;
1136 while (uses != uses_end) {
1137 uint16_t sreg = *uses;
1138 ++uses;
1139 // Avoid LookupValue() so that we don't store new values in the global value map.
1140 auto local_it = mir_lvn->sreg_value_map_.find(sreg);
1141 if (local_it != mir_lvn->sreg_value_map_.end()) {
1142 non_aliasing_refs_.erase(local_it->second);
1143 } else {
1144 uint16_t value_name = gvn_->FindValue(kNoValue, sreg, kNoValue, kNoValue);
1145 if (value_name != kNoValue) {
1146 non_aliasing_refs_.erase(value_name);
1147 }
1148 }
1149 }
1150 }
1151
HandlePhi(MIR * mir)1152 uint16_t LocalValueNumbering::HandlePhi(MIR* mir) {
1153 if (gvn_->merge_lvns_.empty()) {
1154 // Running LVN without a full GVN?
1155 return kNoValue;
1156 }
1157 // Determine if this Phi is merging wide regs.
1158 RegLocation raw_dest = gvn_->GetMirGraph()->GetRawDest(mir);
1159 if (raw_dest.high_word) {
1160 // This is the high part of a wide reg. Ignore the Phi.
1161 return kNoValue;
1162 }
1163 bool wide = raw_dest.wide;
1164 // Iterate over *merge_lvns_ and skip incoming sregs for BBs without associated LVN.
1165 merge_names_.clear();
1166 uint16_t value_name = kNoValue;
1167 bool same_values = true;
1168 BasicBlockId* incoming = mir->meta.phi_incoming;
1169 int32_t* uses = mir->ssa_rep->uses;
1170 int16_t pos = 0;
1171 for (const LocalValueNumbering* lvn : gvn_->merge_lvns_) {
1172 DCHECK_LT(pos, mir->ssa_rep->num_uses);
1173 while (incoming[pos] != lvn->Id()) {
1174 ++pos;
1175 DCHECK_LT(pos, mir->ssa_rep->num_uses);
1176 }
1177 int s_reg = uses[pos];
1178 ++pos;
1179 value_name = wide ? lvn->GetOperandValueWide(s_reg) : lvn->GetOperandValue(s_reg);
1180
1181 same_values = same_values && (merge_names_.empty() || value_name == merge_names_.back());
1182 merge_names_.push_back(value_name);
1183 }
1184 if (same_values) {
1185 // value_name already contains the result.
1186 } else {
1187 auto lb = merge_map_.lower_bound(merge_names_);
1188 if (lb != merge_map_.end() && !merge_map_.key_comp()(merge_names_, lb->first)) {
1189 value_name = lb->second;
1190 } else {
1191 value_name = gvn_->LookupValue(kNoValue, mir->ssa_rep->defs[0], kNoValue, kNoValue);
1192 merge_map_.PutBefore(lb, merge_names_, value_name);
1193 if (!wide && gvn_->NullCheckedInAllPredecessors(merge_names_)) {
1194 null_checked_.insert(value_name);
1195 }
1196 if (gvn_->DivZeroCheckedInAllPredecessors(merge_names_)) {
1197 div_zero_checked_.insert(value_name);
1198 }
1199 }
1200 }
1201 if (wide) {
1202 SetOperandValueWide(mir->ssa_rep->defs[0], value_name);
1203 } else {
1204 SetOperandValue(mir->ssa_rep->defs[0], value_name);
1205 }
1206 return value_name;
1207 }
1208
HandleConst(MIR * mir,uint32_t value)1209 uint16_t LocalValueNumbering::HandleConst(MIR* mir, uint32_t value) {
1210 RegLocation raw_dest = gvn_->GetMirGraph()->GetRawDest(mir);
1211 uint16_t res;
1212 if (value == 0u && raw_dest.ref) {
1213 res = GlobalValueNumbering::kNullValue;
1214 } else {
1215 Instruction::Code op = raw_dest.fp ? Instruction::CONST_HIGH16 : Instruction::CONST;
1216 res = gvn_->LookupValue(op, Low16Bits(value), High16Bits(value), 0);
1217 }
1218 SetOperandValue(mir->ssa_rep->defs[0], res);
1219 return res;
1220 }
1221
HandleConstWide(MIR * mir,uint64_t value)1222 uint16_t LocalValueNumbering::HandleConstWide(MIR* mir, uint64_t value) {
1223 RegLocation raw_dest = gvn_->GetMirGraph()->GetRawDest(mir);
1224 Instruction::Code op = raw_dest.fp ? Instruction::CONST_HIGH16 : Instruction::CONST;
1225 uint32_t low_word = Low32Bits(value);
1226 uint32_t high_word = High32Bits(value);
1227 uint16_t low_res = gvn_->LookupValue(op, Low16Bits(low_word), High16Bits(low_word), 1);
1228 uint16_t high_res = gvn_->LookupValue(op, Low16Bits(high_word), High16Bits(high_word), 2);
1229 uint16_t res = gvn_->LookupValue(op, low_res, high_res, 3);
1230 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1231 return res;
1232 }
1233
HandleAGet(MIR * mir,uint16_t opcode)1234 uint16_t LocalValueNumbering::HandleAGet(MIR* mir, uint16_t opcode) {
1235 uint16_t array = GetOperandValue(mir->ssa_rep->uses[0]);
1236 HandleNullCheck(mir, array);
1237 uint16_t index = GetOperandValue(mir->ssa_rep->uses[1]);
1238 HandleRangeCheck(mir, array, index);
1239 uint16_t type = AGetMemAccessType(static_cast<Instruction::Code>(opcode));
1240 // Establish value number for loaded register.
1241 uint16_t res;
1242 if (IsNonAliasingArray(array, type)) {
1243 res = HandleAliasingValuesGet<NonAliasingArrayVersions>(&non_aliasing_array_value_map_,
1244 array, index);
1245 } else {
1246 uint16_t location = gvn_->GetArrayLocation(array, index);
1247 res = HandleAliasingValuesGet<AliasingArrayVersions>(&aliasing_array_value_map_,
1248 type, location);
1249 }
1250 if (opcode == Instruction::AGET_WIDE) {
1251 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1252 } else {
1253 SetOperandValue(mir->ssa_rep->defs[0], res);
1254 }
1255 return res;
1256 }
1257
HandleAPut(MIR * mir,uint16_t opcode)1258 void LocalValueNumbering::HandleAPut(MIR* mir, uint16_t opcode) {
1259 int array_idx = (opcode == Instruction::APUT_WIDE) ? 2 : 1;
1260 int index_idx = array_idx + 1;
1261 uint16_t array = GetOperandValue(mir->ssa_rep->uses[array_idx]);
1262 HandleNullCheck(mir, array);
1263 uint16_t index = GetOperandValue(mir->ssa_rep->uses[index_idx]);
1264 HandleRangeCheck(mir, array, index);
1265
1266 uint16_t type = APutMemAccessType(static_cast<Instruction::Code>(opcode));
1267 uint16_t value = (opcode == Instruction::APUT_WIDE)
1268 ? GetOperandValueWide(mir->ssa_rep->uses[0])
1269 : GetOperandValue(mir->ssa_rep->uses[0]);
1270 if (IsNonAliasing(array)) {
1271 bool put_is_live = HandleAliasingValuesPut<NonAliasingArrayVersions>(
1272 &non_aliasing_array_value_map_, array, index, value);
1273 if (!put_is_live) {
1274 // This APUT can be eliminated, it stores the same value that's already in the field.
1275 // TODO: Eliminate the APUT.
1276 return;
1277 }
1278 } else {
1279 uint16_t location = gvn_->GetArrayLocation(array, index);
1280 bool put_is_live = HandleAliasingValuesPut<AliasingArrayVersions>(
1281 &aliasing_array_value_map_, type, location, value);
1282 if (!put_is_live) {
1283 // This APUT can be eliminated, it stores the same value that's already in the field.
1284 // TODO: Eliminate the APUT.
1285 return;
1286 }
1287
1288 // Clobber all escaped array refs for this type.
1289 for (uint16_t escaped_array : escaped_refs_) {
1290 EscapedArrayClobberKey clobber_key = { escaped_array, type };
1291 escaped_array_clobber_set_.insert(clobber_key);
1292 }
1293 }
1294 }
1295
HandleIGet(MIR * mir,uint16_t opcode)1296 uint16_t LocalValueNumbering::HandleIGet(MIR* mir, uint16_t opcode) {
1297 uint16_t base = GetOperandValue(mir->ssa_rep->uses[0]);
1298 HandleNullCheck(mir, base);
1299 const MirFieldInfo& field_info = gvn_->GetMirGraph()->GetIFieldLoweringInfo(mir);
1300 uint16_t res;
1301 if (!field_info.IsResolved() || field_info.IsVolatile()) {
1302 // Unresolved fields may be volatile, so handle them as such to be safe.
1303 HandleInvokeOrClInitOrAcquireOp(mir); // Volatile GETs have acquire semantics.
1304 // Volatile fields always get a new memory version; field id is irrelevant.
1305 // Use result s_reg - will be unique.
1306 res = gvn_->LookupValue(kNoValue, mir->ssa_rep->defs[0], kNoValue, kNoValue);
1307 } else {
1308 uint16_t type = IGetMemAccessType(static_cast<Instruction::Code>(opcode));
1309 uint16_t field_id = gvn_->GetIFieldId(mir);
1310 if (IsNonAliasingIField(base, field_id, type)) {
1311 uint16_t loc = gvn_->LookupValue(kNonAliasingIFieldLocOp, base, field_id, type);
1312 auto lb = non_aliasing_ifield_value_map_.lower_bound(loc);
1313 if (lb != non_aliasing_ifield_value_map_.end() && lb->first == loc) {
1314 res = lb->second;
1315 } else {
1316 res = gvn_->LookupValue(kNonAliasingIFieldInitialOp, loc, kNoValue, kNoValue);
1317 non_aliasing_ifield_value_map_.PutBefore(lb, loc, res);
1318 }
1319 } else {
1320 res = HandleAliasingValuesGet<AliasingIFieldVersions>(&aliasing_ifield_value_map_,
1321 field_id, base);
1322 }
1323 }
1324 if (opcode == Instruction::IGET_WIDE) {
1325 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1326 } else {
1327 SetOperandValue(mir->ssa_rep->defs[0], res);
1328 }
1329 return res;
1330 }
1331
HandleIPut(MIR * mir,uint16_t opcode)1332 void LocalValueNumbering::HandleIPut(MIR* mir, uint16_t opcode) {
1333 int base_reg = (opcode == Instruction::IPUT_WIDE) ? 2 : 1;
1334 uint16_t base = GetOperandValue(mir->ssa_rep->uses[base_reg]);
1335 HandleNullCheck(mir, base);
1336 uint16_t type = IPutMemAccessType(static_cast<Instruction::Code>(opcode));
1337 const MirFieldInfo& field_info = gvn_->GetMirGraph()->GetIFieldLoweringInfo(mir);
1338 if (!field_info.IsResolved()) {
1339 // Unresolved fields always alias with everything of the same type.
1340 // Use mir->offset as modifier; without elaborate inlining, it will be unique.
1341 unresolved_ifield_version_[type] =
1342 gvn_->LookupValue(kUnresolvedIFieldOp, kNoValue, kNoValue, mir->offset);
1343
1344 // For simplicity, treat base as escaped now.
1345 HandleEscapingRef(base);
1346
1347 // Clobber all fields of escaped references of the same type.
1348 for (uint16_t escaped_ref : escaped_refs_) {
1349 EscapedIFieldClobberKey clobber_key = { escaped_ref, type, kNoValue };
1350 escaped_ifield_clobber_set_.insert(clobber_key);
1351 }
1352
1353 // Aliasing fields of the same type may have been overwritten.
1354 auto it = aliasing_ifield_value_map_.begin(), end = aliasing_ifield_value_map_.end();
1355 while (it != end) {
1356 if (gvn_->GetIFieldType(it->first) != type) {
1357 ++it;
1358 } else {
1359 it = aliasing_ifield_value_map_.erase(it);
1360 }
1361 }
1362 } else if (field_info.IsVolatile()) {
1363 // Nothing to do, resolved volatile fields always get a new memory version anyway and
1364 // can't alias with resolved non-volatile fields.
1365 } else {
1366 uint16_t field_id = gvn_->GetIFieldId(mir);
1367 uint16_t value = (opcode == Instruction::IPUT_WIDE)
1368 ? GetOperandValueWide(mir->ssa_rep->uses[0])
1369 : GetOperandValue(mir->ssa_rep->uses[0]);
1370 if (IsNonAliasing(base)) {
1371 uint16_t loc = gvn_->LookupValue(kNonAliasingIFieldLocOp, base, field_id, type);
1372 auto lb = non_aliasing_ifield_value_map_.lower_bound(loc);
1373 if (lb != non_aliasing_ifield_value_map_.end() && lb->first == loc) {
1374 if (lb->second == value) {
1375 // This IPUT can be eliminated, it stores the same value that's already in the field.
1376 // TODO: Eliminate the IPUT.
1377 return;
1378 }
1379 lb->second = value; // Overwrite.
1380 } else {
1381 non_aliasing_ifield_value_map_.PutBefore(lb, loc, value);
1382 }
1383 } else {
1384 bool put_is_live = HandleAliasingValuesPut<AliasingIFieldVersions>(
1385 &aliasing_ifield_value_map_, field_id, base, value);
1386 if (!put_is_live) {
1387 // This IPUT can be eliminated, it stores the same value that's already in the field.
1388 // TODO: Eliminate the IPUT.
1389 return;
1390 }
1391
1392 // Clobber all fields of escaped references for this field.
1393 for (uint16_t escaped_ref : escaped_refs_) {
1394 EscapedIFieldClobberKey clobber_key = { escaped_ref, type, field_id };
1395 escaped_ifield_clobber_set_.insert(clobber_key);
1396 }
1397 }
1398 }
1399 }
1400
HandleSGet(MIR * mir,uint16_t opcode)1401 uint16_t LocalValueNumbering::HandleSGet(MIR* mir, uint16_t opcode) {
1402 const MirSFieldLoweringInfo& field_info = gvn_->GetMirGraph()->GetSFieldLoweringInfo(mir);
1403 if (!field_info.IsResolved() || field_info.IsVolatile() ||
1404 (!field_info.IsClassInitialized() &&
1405 (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0)) {
1406 // Volatile SGETs (and unresolved fields are potentially volatile) have acquire semantics
1407 // and class initialization can call arbitrary functions, we need to wipe aliasing values.
1408 HandleInvokeOrClInitOrAcquireOp(mir);
1409 }
1410 uint16_t res;
1411 if (!field_info.IsResolved() || field_info.IsVolatile()) {
1412 // Unresolved fields may be volatile, so handle them as such to be safe.
1413 // Volatile fields always get a new memory version; field id is irrelevant.
1414 // Use result s_reg - will be unique.
1415 res = gvn_->LookupValue(kNoValue, mir->ssa_rep->defs[0], kNoValue, kNoValue);
1416 } else {
1417 uint16_t type = SGetMemAccessType(static_cast<Instruction::Code>(opcode));
1418 uint16_t field_id = gvn_->GetSFieldId(mir);
1419 auto lb = sfield_value_map_.lower_bound(field_id);
1420 if (lb != sfield_value_map_.end() && lb->first == field_id) {
1421 res = lb->second;
1422 } else {
1423 // Resolved non-volatile static fields can alias with non-resolved fields of the same type,
1424 // so we need to use unresolved_sfield_version_[type] in addition to global_memory_version_
1425 // to determine the version of the field.
1426 res = gvn_->LookupValue(kResolvedSFieldOp, field_id,
1427 unresolved_sfield_version_[type], global_memory_version_);
1428 sfield_value_map_.PutBefore(lb, field_id, res);
1429 }
1430 }
1431 if (opcode == Instruction::SGET_WIDE) {
1432 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1433 } else {
1434 SetOperandValue(mir->ssa_rep->defs[0], res);
1435 }
1436 return res;
1437 }
1438
HandleSPut(MIR * mir,uint16_t opcode)1439 void LocalValueNumbering::HandleSPut(MIR* mir, uint16_t opcode) {
1440 const MirSFieldLoweringInfo& field_info = gvn_->GetMirGraph()->GetSFieldLoweringInfo(mir);
1441 if (!field_info.IsClassInitialized() &&
1442 (mir->optimization_flags & MIR_CLASS_IS_INITIALIZED) == 0) {
1443 // Class initialization can call arbitrary functions, we need to wipe aliasing values.
1444 HandleInvokeOrClInitOrAcquireOp(mir);
1445 }
1446 uint16_t type = SPutMemAccessType(static_cast<Instruction::Code>(opcode));
1447 if (!field_info.IsResolved()) {
1448 // Unresolved fields always alias with everything of the same type.
1449 // Use mir->offset as modifier; without elaborate inlining, it will be unique.
1450 unresolved_sfield_version_[type] =
1451 gvn_->LookupValue(kUnresolvedSFieldOp, kNoValue, kNoValue, mir->offset);
1452 RemoveSFieldsForType(type);
1453 } else if (field_info.IsVolatile()) {
1454 // Nothing to do, resolved volatile fields always get a new memory version anyway and
1455 // can't alias with resolved non-volatile fields.
1456 } else {
1457 uint16_t field_id = gvn_->GetSFieldId(mir);
1458 uint16_t value = (opcode == Instruction::SPUT_WIDE)
1459 ? GetOperandValueWide(mir->ssa_rep->uses[0])
1460 : GetOperandValue(mir->ssa_rep->uses[0]);
1461 // Resolved non-volatile static fields can alias with non-resolved fields of the same type,
1462 // so we need to use unresolved_sfield_version_[type] in addition to global_memory_version_
1463 // to determine the version of the field.
1464 auto lb = sfield_value_map_.lower_bound(field_id);
1465 if (lb != sfield_value_map_.end() && lb->first == field_id) {
1466 if (lb->second == value) {
1467 // This SPUT can be eliminated, it stores the same value that's already in the field.
1468 // TODO: Eliminate the SPUT.
1469 return;
1470 }
1471 lb->second = value; // Overwrite.
1472 } else {
1473 sfield_value_map_.PutBefore(lb, field_id, value);
1474 }
1475 }
1476 }
1477
RemoveSFieldsForType(uint16_t type)1478 void LocalValueNumbering::RemoveSFieldsForType(uint16_t type) {
1479 // Erase all static fields of this type from the sfield_value_map_.
1480 for (auto it = sfield_value_map_.begin(), end = sfield_value_map_.end(); it != end; ) {
1481 if (gvn_->GetSFieldType(it->first) == type) {
1482 it = sfield_value_map_.erase(it);
1483 } else {
1484 ++it;
1485 }
1486 }
1487 }
1488
HandleInvokeOrClInitOrAcquireOp(MIR * mir)1489 void LocalValueNumbering::HandleInvokeOrClInitOrAcquireOp(MIR* mir) {
1490 // Use mir->offset as modifier; without elaborate inlining, it will be unique.
1491 global_memory_version_ =
1492 gvn_->LookupValue(kInvokeMemoryVersionBumpOp, 0u, 0u, mir->offset);
1493 // All static fields and instance fields and array elements of aliasing references,
1494 // including escaped references, may have been modified.
1495 sfield_value_map_.clear();
1496 aliasing_ifield_value_map_.clear();
1497 aliasing_array_value_map_.clear();
1498 escaped_refs_.clear();
1499 escaped_ifield_clobber_set_.clear();
1500 escaped_array_clobber_set_.clear();
1501 }
1502
GetValueNumber(MIR * mir)1503 uint16_t LocalValueNumbering::GetValueNumber(MIR* mir) {
1504 uint16_t res = kNoValue;
1505 uint16_t opcode = mir->dalvikInsn.opcode;
1506 switch (opcode) {
1507 case Instruction::NOP:
1508 case Instruction::RETURN_VOID:
1509 case Instruction::RETURN:
1510 case Instruction::RETURN_OBJECT:
1511 case Instruction::RETURN_WIDE:
1512 case Instruction::GOTO:
1513 case Instruction::GOTO_16:
1514 case Instruction::GOTO_32:
1515 case Instruction::THROW:
1516 case Instruction::FILL_ARRAY_DATA:
1517 case Instruction::PACKED_SWITCH:
1518 case Instruction::SPARSE_SWITCH:
1519 case Instruction::IF_EQ:
1520 case Instruction::IF_NE:
1521 case Instruction::IF_LT:
1522 case Instruction::IF_GE:
1523 case Instruction::IF_GT:
1524 case Instruction::IF_LE:
1525 case Instruction::IF_EQZ:
1526 case Instruction::IF_NEZ:
1527 case Instruction::IF_LTZ:
1528 case Instruction::IF_GEZ:
1529 case Instruction::IF_GTZ:
1530 case Instruction::IF_LEZ:
1531 case kMirOpFusedCmplFloat:
1532 case kMirOpFusedCmpgFloat:
1533 case kMirOpFusedCmplDouble:
1534 case kMirOpFusedCmpgDouble:
1535 case kMirOpFusedCmpLong:
1536 // Nothing defined - take no action.
1537 break;
1538
1539 case Instruction::MONITOR_ENTER:
1540 HandleNullCheck(mir, GetOperandValue(mir->ssa_rep->uses[0]));
1541 HandleInvokeOrClInitOrAcquireOp(mir); // Acquire operation.
1542 break;
1543
1544 case Instruction::MONITOR_EXIT:
1545 HandleNullCheck(mir, GetOperandValue(mir->ssa_rep->uses[0]));
1546 // If we're running GVN and CanModify(), uneliminated null check indicates bytecode error.
1547 if ((mir->optimization_flags & MIR_IGNORE_NULL_CHECK) == 0 &&
1548 gvn_->work_lvn_ != nullptr && gvn_->CanModify()) {
1549 LOG(WARNING) << "Bytecode error: MONITOR_EXIT is still null checked at 0x" << std::hex
1550 << mir->offset << " in " << PrettyMethod(gvn_->cu_->method_idx, *gvn_->cu_->dex_file);
1551 }
1552 break;
1553
1554 case Instruction::FILLED_NEW_ARRAY:
1555 case Instruction::FILLED_NEW_ARRAY_RANGE:
1556 // Nothing defined but the result will be unique and non-null.
1557 if (mir->next != nullptr && mir->next->dalvikInsn.opcode == Instruction::MOVE_RESULT_OBJECT) {
1558 uint16_t array = MarkNonAliasingNonNull(mir->next);
1559 // Do not SetOperandValue(), we'll do that when we process the MOVE_RESULT_OBJECT.
1560 if (kLocalValueNumberingEnableFilledNewArrayTracking && mir->ssa_rep->num_uses != 0u) {
1561 AliasingValues* values = GetAliasingValues(&non_aliasing_array_value_map_, array);
1562 // Clear the value if we got a merged version in a loop.
1563 *values = AliasingValues(this);
1564 for (size_t i = 0u, count = mir->ssa_rep->num_uses; i != count; ++i) {
1565 DCHECK_EQ(High16Bits(i), 0u);
1566 uint16_t index = gvn_->LookupValue(Instruction::CONST, i, 0u, 0);
1567 uint16_t value = GetOperandValue(mir->ssa_rep->uses[i]);
1568 values->load_value_map.Put(index, value);
1569 RangeCheckKey key = { array, index };
1570 range_checked_.insert(key);
1571 }
1572 }
1573 // The MOVE_RESULT_OBJECT will be processed next and we'll return the value name then.
1574 }
1575 // All args escaped (if references).
1576 for (size_t i = 0u, count = mir->ssa_rep->num_uses; i != count; ++i) {
1577 uint16_t reg = GetOperandValue(mir->ssa_rep->uses[i]);
1578 HandleEscapingRef(reg);
1579 }
1580 break;
1581
1582 case kMirOpNullCheck:
1583 HandleNullCheck(mir, GetOperandValue(mir->ssa_rep->uses[0]));
1584 break;
1585
1586 case Instruction::INVOKE_DIRECT:
1587 case Instruction::INVOKE_DIRECT_RANGE:
1588 case Instruction::INVOKE_VIRTUAL:
1589 case Instruction::INVOKE_VIRTUAL_RANGE:
1590 case Instruction::INVOKE_SUPER:
1591 case Instruction::INVOKE_SUPER_RANGE:
1592 case Instruction::INVOKE_INTERFACE:
1593 case Instruction::INVOKE_INTERFACE_RANGE: {
1594 // Nothing defined but handle the null check.
1595 uint16_t reg = GetOperandValue(mir->ssa_rep->uses[0]);
1596 HandleNullCheck(mir, reg);
1597 }
1598 FALLTHROUGH_INTENDED;
1599 case Instruction::INVOKE_STATIC:
1600 case Instruction::INVOKE_STATIC_RANGE:
1601 // Make ref args aliasing.
1602 HandleInvokeArgs(mir, this);
1603 HandleInvokeOrClInitOrAcquireOp(mir);
1604 break;
1605
1606 case Instruction::INSTANCE_OF: {
1607 uint16_t operand = GetOperandValue(mir->ssa_rep->uses[0]);
1608 uint16_t type = mir->dalvikInsn.vC;
1609 res = gvn_->LookupValue(Instruction::INSTANCE_OF, operand, type, kNoValue);
1610 SetOperandValue(mir->ssa_rep->defs[0], res);
1611 }
1612 break;
1613 case Instruction::CHECK_CAST:
1614 if (gvn_->CanModify()) {
1615 // Check if there was an instance-of operation on the same value and if we are
1616 // in a block where its result is true. If so, we can eliminate the check-cast.
1617 uint16_t operand = GetOperandValue(mir->ssa_rep->uses[0]);
1618 uint16_t type = mir->dalvikInsn.vB;
1619 uint16_t cond = gvn_->FindValue(Instruction::INSTANCE_OF, operand, type, kNoValue);
1620 if (cond != kNoValue && gvn_->IsTrueInBlock(cond, Id())) {
1621 if (gvn_->GetCompilationUnit()->verbose) {
1622 LOG(INFO) << "Removing check-cast at 0x" << std::hex << mir->offset;
1623 }
1624 // Don't use kMirOpNop. Keep the check-cast as it defines the type of the register.
1625 mir->optimization_flags |= MIR_IGNORE_CHECK_CAST;
1626 }
1627 }
1628 break;
1629
1630 case Instruction::MOVE_RESULT:
1631 case Instruction::MOVE_RESULT_OBJECT:
1632 // 1 result, treat as unique each time, use result s_reg - will be unique.
1633 res = GetOperandValue(mir->ssa_rep->defs[0]);
1634 SetOperandValue(mir->ssa_rep->defs[0], res);
1635 break;
1636 case Instruction::MOVE_EXCEPTION:
1637 case Instruction::NEW_INSTANCE:
1638 case Instruction::NEW_ARRAY:
1639 // 1 result, treat as unique each time, use result s_reg - will be unique.
1640 res = MarkNonAliasingNonNull(mir);
1641 SetOperandValue(mir->ssa_rep->defs[0], res);
1642 break;
1643 case Instruction::CONST_CLASS:
1644 DCHECK_EQ(Low16Bits(mir->dalvikInsn.vB), mir->dalvikInsn.vB);
1645 res = gvn_->LookupValue(Instruction::CONST_CLASS, mir->dalvikInsn.vB, 0, 0);
1646 SetOperandValue(mir->ssa_rep->defs[0], res);
1647 null_checked_.insert(res);
1648 non_aliasing_refs_.insert(res);
1649 break;
1650 case Instruction::CONST_STRING:
1651 case Instruction::CONST_STRING_JUMBO:
1652 // These strings are internalized, so assign value based on the string pool index.
1653 res = gvn_->LookupValue(Instruction::CONST_STRING, Low16Bits(mir->dalvikInsn.vB),
1654 High16Bits(mir->dalvikInsn.vB), 0);
1655 SetOperandValue(mir->ssa_rep->defs[0], res);
1656 null_checked_.insert(res); // May already be there.
1657 // NOTE: Hacking the contents of an internalized string via reflection is possible
1658 // but the behavior is undefined. Therefore, we consider the string constant and
1659 // the reference non-aliasing.
1660 // TUNING: We could keep this property even if the reference "escapes".
1661 non_aliasing_refs_.insert(res); // May already be there.
1662 break;
1663 case Instruction::MOVE_RESULT_WIDE:
1664 // 1 wide result, treat as unique each time, use result s_reg - will be unique.
1665 res = GetOperandValueWide(mir->ssa_rep->defs[0]);
1666 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1667 break;
1668
1669 case kMirOpPhi:
1670 res = HandlePhi(mir);
1671 break;
1672
1673 case Instruction::MOVE:
1674 case Instruction::MOVE_OBJECT:
1675 case Instruction::MOVE_16:
1676 case Instruction::MOVE_OBJECT_16:
1677 case Instruction::MOVE_FROM16:
1678 case Instruction::MOVE_OBJECT_FROM16:
1679 case kMirOpCopy:
1680 // Just copy value number of source to value number of result.
1681 res = GetOperandValue(mir->ssa_rep->uses[0]);
1682 SetOperandValue(mir->ssa_rep->defs[0], res);
1683 break;
1684
1685 case Instruction::MOVE_WIDE:
1686 case Instruction::MOVE_WIDE_16:
1687 case Instruction::MOVE_WIDE_FROM16:
1688 // Just copy value number of source to value number of result.
1689 res = GetOperandValueWide(mir->ssa_rep->uses[0]);
1690 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1691 break;
1692
1693 case Instruction::CONST_HIGH16:
1694 res = HandleConst(mir, mir->dalvikInsn.vB << 16);
1695 break;
1696 case Instruction::CONST:
1697 case Instruction::CONST_4:
1698 case Instruction::CONST_16:
1699 res = HandleConst(mir, mir->dalvikInsn.vB);
1700 break;
1701
1702 case Instruction::CONST_WIDE_16:
1703 case Instruction::CONST_WIDE_32:
1704 res = HandleConstWide(
1705 mir,
1706 mir->dalvikInsn.vB +
1707 ((mir->dalvikInsn.vB & 0x80000000) != 0 ? UINT64_C(0xffffffff00000000) : 0u));
1708 break;
1709
1710 case Instruction::CONST_WIDE:
1711 res = HandleConstWide(mir, mir->dalvikInsn.vB_wide);
1712 break;
1713
1714 case Instruction::CONST_WIDE_HIGH16:
1715 res = HandleConstWide(mir, static_cast<uint64_t>(mir->dalvikInsn.vB) << 48);
1716 break;
1717
1718 case Instruction::ARRAY_LENGTH: {
1719 // Handle the null check.
1720 uint16_t reg = GetOperandValue(mir->ssa_rep->uses[0]);
1721 HandleNullCheck(mir, reg);
1722 }
1723 FALLTHROUGH_INTENDED;
1724 case Instruction::NEG_INT:
1725 case Instruction::NOT_INT:
1726 case Instruction::NEG_FLOAT:
1727 case Instruction::INT_TO_BYTE:
1728 case Instruction::INT_TO_SHORT:
1729 case Instruction::INT_TO_CHAR:
1730 case Instruction::INT_TO_FLOAT:
1731 case Instruction::FLOAT_TO_INT: {
1732 // res = op + 1 operand
1733 uint16_t operand1 = GetOperandValue(mir->ssa_rep->uses[0]);
1734 res = gvn_->LookupValue(opcode, operand1, kNoValue, kNoValue);
1735 SetOperandValue(mir->ssa_rep->defs[0], res);
1736 }
1737 break;
1738
1739 case Instruction::LONG_TO_FLOAT:
1740 case Instruction::LONG_TO_INT:
1741 case Instruction::DOUBLE_TO_FLOAT:
1742 case Instruction::DOUBLE_TO_INT: {
1743 // res = op + 1 wide operand
1744 uint16_t operand1 = GetOperandValueWide(mir->ssa_rep->uses[0]);
1745 res = gvn_->LookupValue(opcode, operand1, kNoValue, kNoValue);
1746 SetOperandValue(mir->ssa_rep->defs[0], res);
1747 }
1748 break;
1749
1750 case Instruction::DOUBLE_TO_LONG:
1751 case Instruction::LONG_TO_DOUBLE:
1752 case Instruction::NEG_LONG:
1753 case Instruction::NOT_LONG:
1754 case Instruction::NEG_DOUBLE: {
1755 // wide res = op + 1 wide operand
1756 uint16_t operand1 = GetOperandValueWide(mir->ssa_rep->uses[0]);
1757 res = gvn_->LookupValue(opcode, operand1, kNoValue, kNoValue);
1758 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1759 }
1760 break;
1761
1762 case Instruction::FLOAT_TO_DOUBLE:
1763 case Instruction::FLOAT_TO_LONG:
1764 case Instruction::INT_TO_DOUBLE:
1765 case Instruction::INT_TO_LONG: {
1766 // wide res = op + 1 operand
1767 uint16_t operand1 = GetOperandValue(mir->ssa_rep->uses[0]);
1768 res = gvn_->LookupValue(opcode, operand1, kNoValue, kNoValue);
1769 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1770 }
1771 break;
1772
1773 case Instruction::CMPL_DOUBLE:
1774 case Instruction::CMPG_DOUBLE:
1775 case Instruction::CMP_LONG: {
1776 // res = op + 2 wide operands
1777 uint16_t operand1 = GetOperandValueWide(mir->ssa_rep->uses[0]);
1778 uint16_t operand2 = GetOperandValueWide(mir->ssa_rep->uses[2]);
1779 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1780 SetOperandValue(mir->ssa_rep->defs[0], res);
1781 }
1782 break;
1783
1784 case Instruction::DIV_INT:
1785 case Instruction::DIV_INT_2ADDR:
1786 case Instruction::REM_INT:
1787 case Instruction::REM_INT_2ADDR:
1788 HandleDivZeroCheck(mir, GetOperandValue(mir->ssa_rep->uses[1]));
1789 FALLTHROUGH_INTENDED;
1790
1791 case Instruction::CMPG_FLOAT:
1792 case Instruction::CMPL_FLOAT:
1793 case Instruction::ADD_INT:
1794 case Instruction::ADD_INT_2ADDR:
1795 case Instruction::MUL_INT:
1796 case Instruction::MUL_INT_2ADDR:
1797 case Instruction::AND_INT:
1798 case Instruction::AND_INT_2ADDR:
1799 case Instruction::OR_INT:
1800 case Instruction::OR_INT_2ADDR:
1801 case Instruction::XOR_INT:
1802 case Instruction::XOR_INT_2ADDR:
1803 case Instruction::SUB_INT:
1804 case Instruction::SUB_INT_2ADDR:
1805 case Instruction::SHL_INT:
1806 case Instruction::SHL_INT_2ADDR:
1807 case Instruction::SHR_INT:
1808 case Instruction::SHR_INT_2ADDR:
1809 case Instruction::USHR_INT:
1810 case Instruction::USHR_INT_2ADDR: {
1811 // res = op + 2 operands
1812 uint16_t operand1 = GetOperandValue(mir->ssa_rep->uses[0]);
1813 uint16_t operand2 = GetOperandValue(mir->ssa_rep->uses[1]);
1814 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1815 SetOperandValue(mir->ssa_rep->defs[0], res);
1816 }
1817 break;
1818
1819 case Instruction::DIV_LONG:
1820 case Instruction::REM_LONG:
1821 case Instruction::DIV_LONG_2ADDR:
1822 case Instruction::REM_LONG_2ADDR:
1823 HandleDivZeroCheck(mir, GetOperandValueWide(mir->ssa_rep->uses[2]));
1824 FALLTHROUGH_INTENDED;
1825
1826 case Instruction::ADD_LONG:
1827 case Instruction::SUB_LONG:
1828 case Instruction::MUL_LONG:
1829 case Instruction::AND_LONG:
1830 case Instruction::OR_LONG:
1831 case Instruction::XOR_LONG:
1832 case Instruction::ADD_LONG_2ADDR:
1833 case Instruction::SUB_LONG_2ADDR:
1834 case Instruction::MUL_LONG_2ADDR:
1835 case Instruction::AND_LONG_2ADDR:
1836 case Instruction::OR_LONG_2ADDR:
1837 case Instruction::XOR_LONG_2ADDR:
1838 case Instruction::ADD_DOUBLE:
1839 case Instruction::SUB_DOUBLE:
1840 case Instruction::MUL_DOUBLE:
1841 case Instruction::DIV_DOUBLE:
1842 case Instruction::REM_DOUBLE:
1843 case Instruction::ADD_DOUBLE_2ADDR:
1844 case Instruction::SUB_DOUBLE_2ADDR:
1845 case Instruction::MUL_DOUBLE_2ADDR:
1846 case Instruction::DIV_DOUBLE_2ADDR:
1847 case Instruction::REM_DOUBLE_2ADDR: {
1848 // wide res = op + 2 wide operands
1849 uint16_t operand1 = GetOperandValueWide(mir->ssa_rep->uses[0]);
1850 uint16_t operand2 = GetOperandValueWide(mir->ssa_rep->uses[2]);
1851 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1852 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1853 }
1854 break;
1855
1856 case Instruction::SHL_LONG:
1857 case Instruction::SHR_LONG:
1858 case Instruction::USHR_LONG:
1859 case Instruction::SHL_LONG_2ADDR:
1860 case Instruction::SHR_LONG_2ADDR:
1861 case Instruction::USHR_LONG_2ADDR: {
1862 // wide res = op + 1 wide operand + 1 operand
1863 uint16_t operand1 = GetOperandValueWide(mir->ssa_rep->uses[0]);
1864 uint16_t operand2 = GetOperandValue(mir->ssa_rep->uses[2]);
1865 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1866 SetOperandValueWide(mir->ssa_rep->defs[0], res);
1867 }
1868 break;
1869
1870 case Instruction::ADD_FLOAT:
1871 case Instruction::SUB_FLOAT:
1872 case Instruction::MUL_FLOAT:
1873 case Instruction::DIV_FLOAT:
1874 case Instruction::REM_FLOAT:
1875 case Instruction::ADD_FLOAT_2ADDR:
1876 case Instruction::SUB_FLOAT_2ADDR:
1877 case Instruction::MUL_FLOAT_2ADDR:
1878 case Instruction::DIV_FLOAT_2ADDR:
1879 case Instruction::REM_FLOAT_2ADDR: {
1880 // res = op + 2 operands
1881 uint16_t operand1 = GetOperandValue(mir->ssa_rep->uses[0]);
1882 uint16_t operand2 = GetOperandValue(mir->ssa_rep->uses[1]);
1883 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1884 SetOperandValue(mir->ssa_rep->defs[0], res);
1885 }
1886 break;
1887
1888 case Instruction::RSUB_INT:
1889 case Instruction::ADD_INT_LIT16:
1890 case Instruction::MUL_INT_LIT16:
1891 case Instruction::DIV_INT_LIT16:
1892 case Instruction::REM_INT_LIT16:
1893 case Instruction::AND_INT_LIT16:
1894 case Instruction::OR_INT_LIT16:
1895 case Instruction::XOR_INT_LIT16:
1896 case Instruction::ADD_INT_LIT8:
1897 case Instruction::RSUB_INT_LIT8:
1898 case Instruction::MUL_INT_LIT8:
1899 case Instruction::DIV_INT_LIT8:
1900 case Instruction::REM_INT_LIT8:
1901 case Instruction::AND_INT_LIT8:
1902 case Instruction::OR_INT_LIT8:
1903 case Instruction::XOR_INT_LIT8:
1904 case Instruction::SHL_INT_LIT8:
1905 case Instruction::SHR_INT_LIT8:
1906 case Instruction::USHR_INT_LIT8: {
1907 // Same as res = op + 2 operands, except use vC as operand 2
1908 uint16_t operand1 = GetOperandValue(mir->ssa_rep->uses[0]);
1909 uint16_t operand2 = gvn_->LookupValue(Instruction::CONST, mir->dalvikInsn.vC, 0, 0);
1910 res = gvn_->LookupValue(opcode, operand1, operand2, kNoValue);
1911 SetOperandValue(mir->ssa_rep->defs[0], res);
1912 }
1913 break;
1914
1915 case Instruction::AGET_OBJECT:
1916 case Instruction::AGET:
1917 case Instruction::AGET_WIDE:
1918 case Instruction::AGET_BOOLEAN:
1919 case Instruction::AGET_BYTE:
1920 case Instruction::AGET_CHAR:
1921 case Instruction::AGET_SHORT:
1922 res = HandleAGet(mir, opcode);
1923 break;
1924
1925 case Instruction::APUT_OBJECT:
1926 HandlePutObject(mir);
1927 FALLTHROUGH_INTENDED;
1928 case Instruction::APUT:
1929 case Instruction::APUT_WIDE:
1930 case Instruction::APUT_BYTE:
1931 case Instruction::APUT_BOOLEAN:
1932 case Instruction::APUT_SHORT:
1933 case Instruction::APUT_CHAR:
1934 HandleAPut(mir, opcode);
1935 break;
1936
1937 case Instruction::IGET_OBJECT:
1938 case Instruction::IGET:
1939 case Instruction::IGET_WIDE:
1940 case Instruction::IGET_BOOLEAN:
1941 case Instruction::IGET_BYTE:
1942 case Instruction::IGET_CHAR:
1943 case Instruction::IGET_SHORT:
1944 res = HandleIGet(mir, opcode);
1945 break;
1946
1947 case Instruction::IPUT_OBJECT:
1948 HandlePutObject(mir);
1949 FALLTHROUGH_INTENDED;
1950 case Instruction::IPUT:
1951 case Instruction::IPUT_WIDE:
1952 case Instruction::IPUT_BOOLEAN:
1953 case Instruction::IPUT_BYTE:
1954 case Instruction::IPUT_CHAR:
1955 case Instruction::IPUT_SHORT:
1956 HandleIPut(mir, opcode);
1957 break;
1958
1959 case Instruction::SGET_OBJECT:
1960 case Instruction::SGET:
1961 case Instruction::SGET_WIDE:
1962 case Instruction::SGET_BOOLEAN:
1963 case Instruction::SGET_BYTE:
1964 case Instruction::SGET_CHAR:
1965 case Instruction::SGET_SHORT:
1966 res = HandleSGet(mir, opcode);
1967 break;
1968
1969 case Instruction::SPUT_OBJECT:
1970 HandlePutObject(mir);
1971 FALLTHROUGH_INTENDED;
1972 case Instruction::SPUT:
1973 case Instruction::SPUT_WIDE:
1974 case Instruction::SPUT_BOOLEAN:
1975 case Instruction::SPUT_BYTE:
1976 case Instruction::SPUT_CHAR:
1977 case Instruction::SPUT_SHORT:
1978 HandleSPut(mir, opcode);
1979 break;
1980 }
1981 return res;
1982 }
1983
GetEndingVregValueNumberImpl(int v_reg,bool wide) const1984 uint16_t LocalValueNumbering::GetEndingVregValueNumberImpl(int v_reg, bool wide) const {
1985 const BasicBlock* bb = gvn_->GetBasicBlock(Id());
1986 DCHECK(bb != nullptr);
1987 int s_reg = bb->data_flow_info->vreg_to_ssa_map_exit[v_reg];
1988 if (s_reg == INVALID_SREG) {
1989 return kNoValue;
1990 }
1991 if (gvn_->GetMirGraph()->GetRegLocation(s_reg).wide != wide) {
1992 return kNoValue;
1993 }
1994 if (wide) {
1995 int high_s_reg = bb->data_flow_info->vreg_to_ssa_map_exit[v_reg + 1];
1996 if (high_s_reg != s_reg + 1) {
1997 return kNoValue; // High word has been overwritten.
1998 }
1999 return GetSregValueWide(s_reg);
2000 } else {
2001 return GetSregValue(s_reg);
2002 }
2003 }
2004
GetStartingVregValueNumberImpl(int v_reg,bool wide) const2005 uint16_t LocalValueNumbering::GetStartingVregValueNumberImpl(int v_reg, bool wide) const {
2006 DCHECK_EQ(gvn_->mode_, GlobalValueNumbering::kModeGvnPostProcessing);
2007 DCHECK(gvn_->CanModify());
2008 const BasicBlock* bb = gvn_->GetBasicBlock(Id());
2009 DCHECK(bb != nullptr);
2010 DCHECK_NE(bb->predecessors.size(), 0u);
2011 if (bb->predecessors.size() == 1u) {
2012 return gvn_->GetLvn(bb->predecessors[0])->GetEndingVregValueNumberImpl(v_reg, wide);
2013 }
2014 merge_names_.clear();
2015 uint16_t value_name = kNoValue;
2016 bool same_values = true;
2017 for (BasicBlockId pred_id : bb->predecessors) {
2018 value_name = gvn_->GetLvn(pred_id)->GetEndingVregValueNumberImpl(v_reg, wide);
2019 if (value_name == kNoValue) {
2020 return kNoValue;
2021 }
2022 same_values = same_values && (merge_names_.empty() || value_name == merge_names_.back());
2023 merge_names_.push_back(value_name);
2024 }
2025 if (same_values) {
2026 // value_name already contains the result.
2027 } else {
2028 auto lb = merge_map_.lower_bound(merge_names_);
2029 if (lb != merge_map_.end() && !merge_map_.key_comp()(merge_names_, lb->first)) {
2030 value_name = lb->second;
2031 } else {
2032 value_name = kNoValue; // We never assigned a value name to this set of merged names.
2033 }
2034 }
2035 return value_name;
2036 }
2037
2038 } // namespace art
2039