1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/crankshaft/arm/lithium-codegen-arm.h"
6 #include "src/crankshaft/arm/lithium-gap-resolver-arm.h"
7 
8 namespace v8 {
9 namespace internal {
10 
11 // We use the root register to spill a value while breaking a cycle in parallel
12 // moves. We don't need access to roots while resolving the move list and using
13 // the root register has two advantages:
14 //  - It is not in crankshaft allocatable registers list, so it can't interfere
15 //    with any of the moves we are resolving.
16 //  - We don't need to push it on the stack, as we can reload it with its value
17 //    once we have resolved a cycle.
18 #define kSavedValueRegister kRootRegister
19 
20 
LGapResolver(LCodeGen * owner)21 LGapResolver::LGapResolver(LCodeGen* owner)
22     : cgen_(owner), moves_(32, owner->zone()), root_index_(0), in_cycle_(false),
23       saved_destination_(NULL), need_to_restore_root_(false) { }
24 
25 
26 #define __ ACCESS_MASM(cgen_->masm())
27 
28 
Resolve(LParallelMove * parallel_move)29 void LGapResolver::Resolve(LParallelMove* parallel_move) {
30   DCHECK(moves_.is_empty());
31   // Build up a worklist of moves.
32   BuildInitialMoveList(parallel_move);
33 
34   for (int i = 0; i < moves_.length(); ++i) {
35     LMoveOperands move = moves_[i];
36     // Skip constants to perform them last.  They don't block other moves
37     // and skipping such moves with register destinations keeps those
38     // registers free for the whole algorithm.
39     if (!move.IsEliminated() && !move.source()->IsConstantOperand()) {
40       root_index_ = i;  // Any cycle is found when by reaching this move again.
41       PerformMove(i);
42       if (in_cycle_) {
43         RestoreValue();
44       }
45     }
46   }
47 
48   // Perform the moves with constant sources.
49   for (int i = 0; i < moves_.length(); ++i) {
50     if (!moves_[i].IsEliminated()) {
51       DCHECK(moves_[i].source()->IsConstantOperand());
52       EmitMove(i);
53     }
54   }
55 
56   if (need_to_restore_root_) {
57     DCHECK(kSavedValueRegister.is(kRootRegister));
58     __ InitializeRootRegister();
59     need_to_restore_root_ = false;
60   }
61 
62   moves_.Rewind(0);
63 }
64 
65 
BuildInitialMoveList(LParallelMove * parallel_move)66 void LGapResolver::BuildInitialMoveList(LParallelMove* parallel_move) {
67   // Perform a linear sweep of the moves to add them to the initial list of
68   // moves to perform, ignoring any move that is redundant (the source is
69   // the same as the destination, the destination is ignored and
70   // unallocated, or the move was already eliminated).
71   const ZoneList<LMoveOperands>* moves = parallel_move->move_operands();
72   for (int i = 0; i < moves->length(); ++i) {
73     LMoveOperands move = moves->at(i);
74     if (!move.IsRedundant()) moves_.Add(move, cgen_->zone());
75   }
76   Verify();
77 }
78 
79 
PerformMove(int index)80 void LGapResolver::PerformMove(int index) {
81   // Each call to this function performs a move and deletes it from the move
82   // graph.  We first recursively perform any move blocking this one.  We
83   // mark a move as "pending" on entry to PerformMove in order to detect
84   // cycles in the move graph.
85 
86   // We can only find a cycle, when doing a depth-first traversal of moves,
87   // be encountering the starting move again. So by spilling the source of
88   // the starting move, we break the cycle.  All moves are then unblocked,
89   // and the starting move is completed by writing the spilled value to
90   // its destination.  All other moves from the spilled source have been
91   // completed prior to breaking the cycle.
92   // An additional complication is that moves to MemOperands with large
93   // offsets (more than 1K or 4K) require us to spill this spilled value to
94   // the stack, to free up the register.
95   DCHECK(!moves_[index].IsPending());
96   DCHECK(!moves_[index].IsRedundant());
97 
98   // Clear this move's destination to indicate a pending move.  The actual
99   // destination is saved in a stack allocated local.  Multiple moves can
100   // be pending because this function is recursive.
101   DCHECK(moves_[index].source() != NULL);  // Or else it will look eliminated.
102   LOperand* destination = moves_[index].destination();
103   moves_[index].set_destination(NULL);
104 
105   // Perform a depth-first traversal of the move graph to resolve
106   // dependencies.  Any unperformed, unpending move with a source the same
107   // as this one's destination blocks this one so recursively perform all
108   // such moves.
109   for (int i = 0; i < moves_.length(); ++i) {
110     LMoveOperands other_move = moves_[i];
111     if (other_move.Blocks(destination) && !other_move.IsPending()) {
112       PerformMove(i);
113       // If there is a blocking, pending move it must be moves_[root_index_]
114       // and all other moves with the same source as moves_[root_index_] are
115       // sucessfully executed (because they are cycle-free) by this loop.
116     }
117   }
118 
119   // We are about to resolve this move and don't need it marked as
120   // pending, so restore its destination.
121   moves_[index].set_destination(destination);
122 
123   // The move may be blocked on a pending move, which must be the starting move.
124   // In this case, we have a cycle, and we save the source of this move to
125   // a scratch register to break it.
126   LMoveOperands other_move = moves_[root_index_];
127   if (other_move.Blocks(destination)) {
128     DCHECK(other_move.IsPending());
129     BreakCycle(index);
130     return;
131   }
132 
133   // This move is no longer blocked.
134   EmitMove(index);
135 }
136 
137 
Verify()138 void LGapResolver::Verify() {
139 #ifdef ENABLE_SLOW_DCHECKS
140   // No operand should be the destination for more than one move.
141   for (int i = 0; i < moves_.length(); ++i) {
142     LOperand* destination = moves_[i].destination();
143     for (int j = i + 1; j < moves_.length(); ++j) {
144       SLOW_DCHECK(!destination->Equals(moves_[j].destination()));
145     }
146   }
147 #endif
148 }
149 
150 
BreakCycle(int index)151 void LGapResolver::BreakCycle(int index) {
152   // We save in a register the source of that move and we remember its
153   // destination. Then we mark this move as resolved so the cycle is
154   // broken and we can perform the other moves.
155   DCHECK(moves_[index].destination()->Equals(moves_[root_index_].source()));
156   DCHECK(!in_cycle_);
157   in_cycle_ = true;
158   LOperand* source = moves_[index].source();
159   saved_destination_ = moves_[index].destination();
160   if (source->IsRegister()) {
161     need_to_restore_root_ = true;
162     __ mov(kSavedValueRegister, cgen_->ToRegister(source));
163   } else if (source->IsStackSlot()) {
164     need_to_restore_root_ = true;
165     __ ldr(kSavedValueRegister, cgen_->ToMemOperand(source));
166   } else if (source->IsDoubleRegister()) {
167     __ vmov(kScratchDoubleReg, cgen_->ToDoubleRegister(source));
168   } else if (source->IsDoubleStackSlot()) {
169     __ vldr(kScratchDoubleReg, cgen_->ToMemOperand(source));
170   } else {
171     UNREACHABLE();
172   }
173   // This move will be done by restoring the saved value to the destination.
174   moves_[index].Eliminate();
175 }
176 
177 
RestoreValue()178 void LGapResolver::RestoreValue() {
179   DCHECK(in_cycle_);
180   DCHECK(saved_destination_ != NULL);
181 
182   if (saved_destination_->IsRegister()) {
183     __ mov(cgen_->ToRegister(saved_destination_), kSavedValueRegister);
184   } else if (saved_destination_->IsStackSlot()) {
185     __ str(kSavedValueRegister, cgen_->ToMemOperand(saved_destination_));
186   } else if (saved_destination_->IsDoubleRegister()) {
187     __ vmov(cgen_->ToDoubleRegister(saved_destination_), kScratchDoubleReg);
188   } else if (saved_destination_->IsDoubleStackSlot()) {
189     __ vstr(kScratchDoubleReg, cgen_->ToMemOperand(saved_destination_));
190   } else {
191     UNREACHABLE();
192   }
193 
194   in_cycle_ = false;
195   saved_destination_ = NULL;
196 }
197 
198 
EmitMove(int index)199 void LGapResolver::EmitMove(int index) {
200   LOperand* source = moves_[index].source();
201   LOperand* destination = moves_[index].destination();
202 
203   // Dispatch on the source and destination operand kinds.  Not all
204   // combinations are possible.
205 
206   if (source->IsRegister()) {
207     Register source_register = cgen_->ToRegister(source);
208     if (destination->IsRegister()) {
209       __ mov(cgen_->ToRegister(destination), source_register);
210     } else {
211       DCHECK(destination->IsStackSlot());
212       __ str(source_register, cgen_->ToMemOperand(destination));
213     }
214   } else if (source->IsStackSlot()) {
215     MemOperand source_operand = cgen_->ToMemOperand(source);
216     if (destination->IsRegister()) {
217       __ ldr(cgen_->ToRegister(destination), source_operand);
218     } else {
219       DCHECK(destination->IsStackSlot());
220       MemOperand destination_operand = cgen_->ToMemOperand(destination);
221       if (!destination_operand.OffsetIsUint12Encodable()) {
222         // ip is overwritten while saving the value to the destination.
223         // Therefore we can't use ip.  It is OK if the read from the source
224         // destroys ip, since that happens before the value is read.
225         __ vldr(kScratchDoubleReg.low(), source_operand);
226         __ vstr(kScratchDoubleReg.low(), destination_operand);
227       } else {
228         __ ldr(ip, source_operand);
229         __ str(ip, destination_operand);
230       }
231     }
232 
233   } else if (source->IsConstantOperand()) {
234     LConstantOperand* constant_source = LConstantOperand::cast(source);
235     if (destination->IsRegister()) {
236       Register dst = cgen_->ToRegister(destination);
237       Representation r = cgen_->IsSmi(constant_source)
238           ? Representation::Smi() : Representation::Integer32();
239       if (cgen_->IsInteger32(constant_source)) {
240         __ mov(dst, Operand(cgen_->ToRepresentation(constant_source, r)));
241       } else {
242         __ Move(dst, cgen_->ToHandle(constant_source));
243       }
244     } else if (destination->IsDoubleRegister()) {
245       DwVfpRegister result = cgen_->ToDoubleRegister(destination);
246       double v = cgen_->ToDouble(constant_source);
247       __ Vmov(result, v, ip);
248     } else {
249       DCHECK(destination->IsStackSlot());
250       DCHECK(!in_cycle_);  // Constant moves happen after all cycles are gone.
251       need_to_restore_root_ = true;
252       Representation r = cgen_->IsSmi(constant_source)
253           ? Representation::Smi() : Representation::Integer32();
254       if (cgen_->IsInteger32(constant_source)) {
255         __ mov(kSavedValueRegister,
256                Operand(cgen_->ToRepresentation(constant_source, r)));
257       } else {
258         __ Move(kSavedValueRegister, cgen_->ToHandle(constant_source));
259       }
260       __ str(kSavedValueRegister, cgen_->ToMemOperand(destination));
261     }
262 
263   } else if (source->IsDoubleRegister()) {
264     DwVfpRegister source_register = cgen_->ToDoubleRegister(source);
265     if (destination->IsDoubleRegister()) {
266       __ vmov(cgen_->ToDoubleRegister(destination), source_register);
267     } else {
268       DCHECK(destination->IsDoubleStackSlot());
269       __ vstr(source_register, cgen_->ToMemOperand(destination));
270     }
271 
272   } else if (source->IsDoubleStackSlot()) {
273     MemOperand source_operand = cgen_->ToMemOperand(source);
274     if (destination->IsDoubleRegister()) {
275       __ vldr(cgen_->ToDoubleRegister(destination), source_operand);
276     } else {
277       DCHECK(destination->IsDoubleStackSlot());
278       MemOperand destination_operand = cgen_->ToMemOperand(destination);
279       if (in_cycle_) {
280         // kScratchDoubleReg was used to break the cycle.
281         __ vpush(kScratchDoubleReg);
282         __ vldr(kScratchDoubleReg, source_operand);
283         __ vstr(kScratchDoubleReg, destination_operand);
284         __ vpop(kScratchDoubleReg);
285       } else {
286         __ vldr(kScratchDoubleReg, source_operand);
287         __ vstr(kScratchDoubleReg, destination_operand);
288       }
289     }
290   } else {
291     UNREACHABLE();
292   }
293 
294   moves_[index].Eliminate();
295 }
296 
297 
298 #undef __
299 
300 }  // namespace internal
301 }  // namespace v8
302