1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/hydrogen-bce.h"
6
7 namespace v8 {
8 namespace internal {
9
10
11 // We try to "factor up" HBoundsCheck instructions towards the root of the
12 // dominator tree.
13 // For now we handle checks where the index is like "exp + int32value".
14 // If in the dominator tree we check "exp + v1" and later (dominated)
15 // "exp + v2", if v2 <= v1 we can safely remove the second check, and if
16 // v2 > v1 we can use v2 in the 1st check and again remove the second.
17 // To do so we keep a dictionary of all checks where the key if the pair
18 // "exp, length".
19 // The class BoundsCheckKey represents this key.
20 class BoundsCheckKey : public ZoneObject {
21 public:
IndexBase() const22 HValue* IndexBase() const { return index_base_; }
Length() const23 HValue* Length() const { return length_; }
24
Hash()25 uint32_t Hash() {
26 return static_cast<uint32_t>(index_base_->Hashcode() ^ length_->Hashcode());
27 }
28
Create(Zone * zone,HBoundsCheck * check,int32_t * offset)29 static BoundsCheckKey* Create(Zone* zone,
30 HBoundsCheck* check,
31 int32_t* offset) {
32 if (!check->index()->representation().IsSmiOrInteger32()) return NULL;
33
34 HValue* index_base = NULL;
35 HConstant* constant = NULL;
36 bool is_sub = false;
37
38 if (check->index()->IsAdd()) {
39 HAdd* index = HAdd::cast(check->index());
40 if (index->left()->IsConstant()) {
41 constant = HConstant::cast(index->left());
42 index_base = index->right();
43 } else if (index->right()->IsConstant()) {
44 constant = HConstant::cast(index->right());
45 index_base = index->left();
46 }
47 } else if (check->index()->IsSub()) {
48 HSub* index = HSub::cast(check->index());
49 is_sub = true;
50 if (index->right()->IsConstant()) {
51 constant = HConstant::cast(index->right());
52 index_base = index->left();
53 }
54 } else if (check->index()->IsConstant()) {
55 index_base = check->block()->graph()->GetConstant0();
56 constant = HConstant::cast(check->index());
57 }
58
59 if (constant != NULL && constant->HasInteger32Value() &&
60 constant->Integer32Value() != kMinInt) {
61 *offset = is_sub ? - constant->Integer32Value()
62 : constant->Integer32Value();
63 } else {
64 *offset = 0;
65 index_base = check->index();
66 }
67
68 return new(zone) BoundsCheckKey(index_base, check->length());
69 }
70
71 private:
BoundsCheckKey(HValue * index_base,HValue * length)72 BoundsCheckKey(HValue* index_base, HValue* length)
73 : index_base_(index_base),
74 length_(length) { }
75
76 HValue* index_base_;
77 HValue* length_;
78
79 DISALLOW_COPY_AND_ASSIGN(BoundsCheckKey);
80 };
81
82
83 // Data about each HBoundsCheck that can be eliminated or moved.
84 // It is the "value" in the dictionary indexed by "base-index, length"
85 // (the key is BoundsCheckKey).
86 // We scan the code with a dominator tree traversal.
87 // Traversing the dominator tree we keep a stack (implemented as a singly
88 // linked list) of "data" for each basic block that contains a relevant check
89 // with the same key (the dictionary holds the head of the list).
90 // We also keep all the "data" created for a given basic block in a list, and
91 // use it to "clean up" the dictionary when backtracking in the dominator tree
92 // traversal.
93 // Doing this each dictionary entry always directly points to the check that
94 // is dominating the code being examined now.
95 // We also track the current "offset" of the index expression and use it to
96 // decide if any check is already "covered" (so it can be removed) or not.
97 class BoundsCheckBbData: public ZoneObject {
98 public:
Key() const99 BoundsCheckKey* Key() const { return key_; }
LowerOffset() const100 int32_t LowerOffset() const { return lower_offset_; }
UpperOffset() const101 int32_t UpperOffset() const { return upper_offset_; }
BasicBlock() const102 HBasicBlock* BasicBlock() const { return basic_block_; }
LowerCheck() const103 HBoundsCheck* LowerCheck() const { return lower_check_; }
UpperCheck() const104 HBoundsCheck* UpperCheck() const { return upper_check_; }
NextInBasicBlock() const105 BoundsCheckBbData* NextInBasicBlock() const { return next_in_bb_; }
FatherInDominatorTree() const106 BoundsCheckBbData* FatherInDominatorTree() const { return father_in_dt_; }
107
OffsetIsCovered(int32_t offset) const108 bool OffsetIsCovered(int32_t offset) const {
109 return offset >= LowerOffset() && offset <= UpperOffset();
110 }
111
HasSingleCheck()112 bool HasSingleCheck() { return lower_check_ == upper_check_; }
113
UpdateUpperOffsets(HBoundsCheck * check,int32_t offset)114 void UpdateUpperOffsets(HBoundsCheck* check, int32_t offset) {
115 BoundsCheckBbData* data = FatherInDominatorTree();
116 while (data != NULL && data->UpperCheck() == check) {
117 DCHECK(data->upper_offset_ < offset);
118 data->upper_offset_ = offset;
119 data = data->FatherInDominatorTree();
120 }
121 }
122
UpdateLowerOffsets(HBoundsCheck * check,int32_t offset)123 void UpdateLowerOffsets(HBoundsCheck* check, int32_t offset) {
124 BoundsCheckBbData* data = FatherInDominatorTree();
125 while (data != NULL && data->LowerCheck() == check) {
126 DCHECK(data->lower_offset_ > offset);
127 data->lower_offset_ = offset;
128 data = data->FatherInDominatorTree();
129 }
130 }
131
132 // The goal of this method is to modify either upper_offset_ or
133 // lower_offset_ so that also new_offset is covered (the covered
134 // range grows).
135 //
136 // The precondition is that new_check follows UpperCheck() and
137 // LowerCheck() in the same basic block, and that new_offset is not
138 // covered (otherwise we could simply remove new_check).
139 //
140 // If HasSingleCheck() is true then new_check is added as "second check"
141 // (either upper or lower; note that HasSingleCheck() becomes false).
142 // Otherwise one of the current checks is modified so that it also covers
143 // new_offset, and new_check is removed.
CoverCheck(HBoundsCheck * new_check,int32_t new_offset)144 void CoverCheck(HBoundsCheck* new_check,
145 int32_t new_offset) {
146 DCHECK(new_check->index()->representation().IsSmiOrInteger32());
147 bool keep_new_check = false;
148
149 if (new_offset > upper_offset_) {
150 upper_offset_ = new_offset;
151 if (HasSingleCheck()) {
152 keep_new_check = true;
153 upper_check_ = new_check;
154 } else {
155 TightenCheck(upper_check_, new_check, new_offset);
156 UpdateUpperOffsets(upper_check_, upper_offset_);
157 }
158 } else if (new_offset < lower_offset_) {
159 lower_offset_ = new_offset;
160 if (HasSingleCheck()) {
161 keep_new_check = true;
162 lower_check_ = new_check;
163 } else {
164 TightenCheck(lower_check_, new_check, new_offset);
165 UpdateLowerOffsets(lower_check_, lower_offset_);
166 }
167 } else {
168 // Should never have called CoverCheck() in this case.
169 UNREACHABLE();
170 }
171
172 if (!keep_new_check) {
173 if (FLAG_trace_bce) {
174 base::OS::Print("Eliminating check #%d after tightening\n",
175 new_check->id());
176 }
177 new_check->block()->graph()->isolate()->counters()->
178 bounds_checks_eliminated()->Increment();
179 new_check->DeleteAndReplaceWith(new_check->ActualValue());
180 } else {
181 HBoundsCheck* first_check = new_check == lower_check_ ? upper_check_
182 : lower_check_;
183 if (FLAG_trace_bce) {
184 base::OS::Print("Moving second check #%d after first check #%d\n",
185 new_check->id(), first_check->id());
186 }
187 // The length is guaranteed to be live at first_check.
188 DCHECK(new_check->length() == first_check->length());
189 HInstruction* old_position = new_check->next();
190 new_check->Unlink();
191 new_check->InsertAfter(first_check);
192 MoveIndexIfNecessary(new_check->index(), new_check, old_position);
193 }
194 }
195
BoundsCheckBbData(BoundsCheckKey * key,int32_t lower_offset,int32_t upper_offset,HBasicBlock * bb,HBoundsCheck * lower_check,HBoundsCheck * upper_check,BoundsCheckBbData * next_in_bb,BoundsCheckBbData * father_in_dt)196 BoundsCheckBbData(BoundsCheckKey* key,
197 int32_t lower_offset,
198 int32_t upper_offset,
199 HBasicBlock* bb,
200 HBoundsCheck* lower_check,
201 HBoundsCheck* upper_check,
202 BoundsCheckBbData* next_in_bb,
203 BoundsCheckBbData* father_in_dt)
204 : key_(key),
205 lower_offset_(lower_offset),
206 upper_offset_(upper_offset),
207 basic_block_(bb),
208 lower_check_(lower_check),
209 upper_check_(upper_check),
210 next_in_bb_(next_in_bb),
211 father_in_dt_(father_in_dt) { }
212
213 private:
214 BoundsCheckKey* key_;
215 int32_t lower_offset_;
216 int32_t upper_offset_;
217 HBasicBlock* basic_block_;
218 HBoundsCheck* lower_check_;
219 HBoundsCheck* upper_check_;
220 BoundsCheckBbData* next_in_bb_;
221 BoundsCheckBbData* father_in_dt_;
222
MoveIndexIfNecessary(HValue * index_raw,HBoundsCheck * insert_before,HInstruction * end_of_scan_range)223 void MoveIndexIfNecessary(HValue* index_raw,
224 HBoundsCheck* insert_before,
225 HInstruction* end_of_scan_range) {
226 // index_raw can be HAdd(index_base, offset), HSub(index_base, offset),
227 // HConstant(offset) or index_base directly.
228 // In the latter case, no need to move anything.
229 if (index_raw->IsAdd() || index_raw->IsSub()) {
230 HArithmeticBinaryOperation* index =
231 HArithmeticBinaryOperation::cast(index_raw);
232 HValue* left_input = index->left();
233 HValue* right_input = index->right();
234 HValue* context = index->context();
235 bool must_move_index = false;
236 bool must_move_left_input = false;
237 bool must_move_right_input = false;
238 bool must_move_context = false;
239 for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
240 if (cursor == left_input) must_move_left_input = true;
241 if (cursor == right_input) must_move_right_input = true;
242 if (cursor == context) must_move_context = true;
243 if (cursor == index) must_move_index = true;
244 if (cursor->previous() == NULL) {
245 cursor = cursor->block()->dominator()->end();
246 } else {
247 cursor = cursor->previous();
248 }
249 }
250 if (must_move_index) {
251 index->Unlink();
252 index->InsertBefore(insert_before);
253 }
254 // The BCE algorithm only selects mergeable bounds checks that share
255 // the same "index_base", so we'll only ever have to move constants.
256 if (must_move_left_input) {
257 HConstant::cast(left_input)->Unlink();
258 HConstant::cast(left_input)->InsertBefore(index);
259 }
260 if (must_move_right_input) {
261 HConstant::cast(right_input)->Unlink();
262 HConstant::cast(right_input)->InsertBefore(index);
263 }
264 if (must_move_context) {
265 // Contexts are always constants.
266 HConstant::cast(context)->Unlink();
267 HConstant::cast(context)->InsertBefore(index);
268 }
269 } else if (index_raw->IsConstant()) {
270 HConstant* index = HConstant::cast(index_raw);
271 bool must_move = false;
272 for (HInstruction* cursor = end_of_scan_range; cursor != insert_before;) {
273 if (cursor == index) must_move = true;
274 if (cursor->previous() == NULL) {
275 cursor = cursor->block()->dominator()->end();
276 } else {
277 cursor = cursor->previous();
278 }
279 }
280 if (must_move) {
281 index->Unlink();
282 index->InsertBefore(insert_before);
283 }
284 }
285 }
286
TightenCheck(HBoundsCheck * original_check,HBoundsCheck * tighter_check,int32_t new_offset)287 void TightenCheck(HBoundsCheck* original_check,
288 HBoundsCheck* tighter_check,
289 int32_t new_offset) {
290 DCHECK(original_check->length() == tighter_check->length());
291 MoveIndexIfNecessary(tighter_check->index(), original_check, tighter_check);
292 original_check->ReplaceAllUsesWith(original_check->index());
293 original_check->SetOperandAt(0, tighter_check->index());
294 if (FLAG_trace_bce) {
295 base::OS::Print("Tightened check #%d with offset %d from #%d\n",
296 original_check->id(), new_offset, tighter_check->id());
297 }
298 }
299
300 DISALLOW_COPY_AND_ASSIGN(BoundsCheckBbData);
301 };
302
303
BoundsCheckKeyMatch(void * key1,void * key2)304 static bool BoundsCheckKeyMatch(void* key1, void* key2) {
305 BoundsCheckKey* k1 = static_cast<BoundsCheckKey*>(key1);
306 BoundsCheckKey* k2 = static_cast<BoundsCheckKey*>(key2);
307 return k1->IndexBase() == k2->IndexBase() && k1->Length() == k2->Length();
308 }
309
BoundsCheckTable(Zone * zone)310 BoundsCheckTable::BoundsCheckTable(Zone* zone)
311 : CustomMatcherZoneHashMap(BoundsCheckKeyMatch,
312 ZoneHashMap::kDefaultHashMapCapacity,
313 ZoneAllocationPolicy(zone)) {}
314
LookupOrInsert(BoundsCheckKey * key,Zone * zone)315 BoundsCheckBbData** BoundsCheckTable::LookupOrInsert(BoundsCheckKey* key,
316 Zone* zone) {
317 return reinterpret_cast<BoundsCheckBbData**>(
318 &(CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
319 ZoneAllocationPolicy(zone))
320 ->value));
321 }
322
323
Insert(BoundsCheckKey * key,BoundsCheckBbData * data,Zone * zone)324 void BoundsCheckTable::Insert(BoundsCheckKey* key,
325 BoundsCheckBbData* data,
326 Zone* zone) {
327 CustomMatcherZoneHashMap::LookupOrInsert(key, key->Hash(),
328 ZoneAllocationPolicy(zone))
329 ->value = data;
330 }
331
332
Delete(BoundsCheckKey * key)333 void BoundsCheckTable::Delete(BoundsCheckKey* key) {
334 Remove(key, key->Hash());
335 }
336
337
338 class HBoundsCheckEliminationState {
339 public:
340 HBasicBlock* block_;
341 BoundsCheckBbData* bb_data_list_;
342 int index_;
343 };
344
345
346 // Eliminates checks in bb and recursively in the dominated blocks.
347 // Also replace the results of check instructions with the original value, if
348 // the result is used. This is safe now, since we don't do code motion after
349 // this point. It enables better register allocation since the value produced
350 // by check instructions is really a copy of the original value.
EliminateRedundantBoundsChecks(HBasicBlock * entry)351 void HBoundsCheckEliminationPhase::EliminateRedundantBoundsChecks(
352 HBasicBlock* entry) {
353 // Allocate the stack.
354 HBoundsCheckEliminationState* stack =
355 zone()->NewArray<HBoundsCheckEliminationState>(graph()->blocks()->length());
356
357 // Explicitly push the entry block.
358 stack[0].block_ = entry;
359 stack[0].bb_data_list_ = PreProcessBlock(entry);
360 stack[0].index_ = 0;
361 int stack_depth = 1;
362
363 // Implement depth-first traversal with a stack.
364 while (stack_depth > 0) {
365 int current = stack_depth - 1;
366 HBoundsCheckEliminationState* state = &stack[current];
367 const ZoneList<HBasicBlock*>* children = state->block_->dominated_blocks();
368
369 if (state->index_ < children->length()) {
370 // Recursively visit children blocks.
371 HBasicBlock* child = children->at(state->index_++);
372 int next = stack_depth++;
373 stack[next].block_ = child;
374 stack[next].bb_data_list_ = PreProcessBlock(child);
375 stack[next].index_ = 0;
376 } else {
377 // Finished with all children; post process the block.
378 PostProcessBlock(state->block_, state->bb_data_list_);
379 stack_depth--;
380 }
381 }
382 }
383
384
PreProcessBlock(HBasicBlock * bb)385 BoundsCheckBbData* HBoundsCheckEliminationPhase::PreProcessBlock(
386 HBasicBlock* bb) {
387 BoundsCheckBbData* bb_data_list = NULL;
388
389 for (HInstructionIterator it(bb); !it.Done(); it.Advance()) {
390 HInstruction* i = it.Current();
391 if (!i->IsBoundsCheck()) continue;
392
393 HBoundsCheck* check = HBoundsCheck::cast(i);
394 int32_t offset = 0;
395 BoundsCheckKey* key =
396 BoundsCheckKey::Create(zone(), check, &offset);
397 if (key == NULL) continue;
398 BoundsCheckBbData** data_p = table_.LookupOrInsert(key, zone());
399 BoundsCheckBbData* data = *data_p;
400 if (data == NULL) {
401 bb_data_list = new(zone()) BoundsCheckBbData(key,
402 offset,
403 offset,
404 bb,
405 check,
406 check,
407 bb_data_list,
408 NULL);
409 *data_p = bb_data_list;
410 if (FLAG_trace_bce) {
411 base::OS::Print("Fresh bounds check data for block #%d: [%d]\n",
412 bb->block_id(), offset);
413 }
414 } else if (data->OffsetIsCovered(offset)) {
415 bb->graph()->isolate()->counters()->
416 bounds_checks_eliminated()->Increment();
417 if (FLAG_trace_bce) {
418 base::OS::Print("Eliminating bounds check #%d, offset %d is covered\n",
419 check->id(), offset);
420 }
421 check->DeleteAndReplaceWith(check->ActualValue());
422 } else if (data->BasicBlock() == bb) {
423 // TODO(jkummerow): I think the following logic would be preferable:
424 // if (data->Basicblock() == bb ||
425 // graph()->use_optimistic_licm() ||
426 // bb->IsLoopSuccessorDominator()) {
427 // data->CoverCheck(check, offset)
428 // } else {
429 // /* add pristine BCBbData like in (data == NULL) case above */
430 // }
431 // Even better would be: distinguish between read-only dominator-imposed
432 // knowledge and modifiable upper/lower checks.
433 // What happens currently is that the first bounds check in a dominated
434 // block will stay around while any further checks are hoisted out,
435 // which doesn't make sense. Investigate/fix this in a future CL.
436 data->CoverCheck(check, offset);
437 } else if (graph()->use_optimistic_licm() ||
438 bb->IsLoopSuccessorDominator()) {
439 int32_t new_lower_offset = offset < data->LowerOffset()
440 ? offset
441 : data->LowerOffset();
442 int32_t new_upper_offset = offset > data->UpperOffset()
443 ? offset
444 : data->UpperOffset();
445 bb_data_list = new(zone()) BoundsCheckBbData(key,
446 new_lower_offset,
447 new_upper_offset,
448 bb,
449 data->LowerCheck(),
450 data->UpperCheck(),
451 bb_data_list,
452 data);
453 if (FLAG_trace_bce) {
454 base::OS::Print("Updated bounds check data for block #%d: [%d - %d]\n",
455 bb->block_id(), new_lower_offset, new_upper_offset);
456 }
457 table_.Insert(key, bb_data_list, zone());
458 }
459 }
460
461 return bb_data_list;
462 }
463
464
PostProcessBlock(HBasicBlock * block,BoundsCheckBbData * data)465 void HBoundsCheckEliminationPhase::PostProcessBlock(
466 HBasicBlock* block, BoundsCheckBbData* data) {
467 while (data != NULL) {
468 if (data->FatherInDominatorTree()) {
469 table_.Insert(data->Key(), data->FatherInDominatorTree(), zone());
470 } else {
471 table_.Delete(data->Key());
472 }
473 data = data->NextInBasicBlock();
474 }
475 }
476
477 } // namespace internal
478 } // namespace v8
479