1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "code_generator_x86.h"
18 #include "gc/accounting/card_table.h"
19 #include "utils/assembler.h"
20 #include "utils/stack_checks.h"
21 #include "utils/x86/assembler_x86.h"
22 #include "utils/x86/managed_register_x86.h"
23
24 #include "entrypoints/quick/quick_entrypoints.h"
25 #include "mirror/array.h"
26 #include "mirror/art_method.h"
27 #include "thread.h"
28
29 namespace art {
30
AsX86() const31 x86::X86ManagedRegister Location::AsX86() const {
32 return reg().AsX86();
33 }
34
35 namespace x86 {
36
37 static constexpr bool kExplicitStackOverflowCheck = false;
38
39 static constexpr int kNumberOfPushedRegistersAtEntry = 1;
40 static constexpr int kCurrentMethodStackOffset = 0;
41
X86CpuLocation(Register reg)42 static Location X86CpuLocation(Register reg) {
43 return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
44 }
45
46 static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
47 static constexpr size_t kRuntimeParameterCoreRegistersLength =
48 arraysize(kRuntimeParameterCoreRegisters);
49
50 class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
51 public:
InvokeRuntimeCallingConvention()52 InvokeRuntimeCallingConvention()
53 : CallingConvention(kRuntimeParameterCoreRegisters,
54 kRuntimeParameterCoreRegistersLength) {}
55
56 private:
57 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
58 };
59
60 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62 class NullCheckSlowPathX86 : public SlowPathCode {
63 public:
NullCheckSlowPathX86(uint32_t dex_pc)64 explicit NullCheckSlowPathX86(uint32_t dex_pc) : dex_pc_(dex_pc) {}
65
EmitNativeCode(CodeGenerator * codegen)66 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
67 __ Bind(GetEntryLabel());
68 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
69 codegen->RecordPcInfo(dex_pc_);
70 }
71
72 private:
73 const uint32_t dex_pc_;
74 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
75 };
76
77 class StackOverflowCheckSlowPathX86 : public SlowPathCode {
78 public:
StackOverflowCheckSlowPathX86()79 StackOverflowCheckSlowPathX86() {}
80
EmitNativeCode(CodeGenerator * codegen)81 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82 __ Bind(GetEntryLabel());
83 __ addl(ESP,
84 Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
85 __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
86 }
87
88 private:
89 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
90 };
91
92 class BoundsCheckSlowPathX86 : public SlowPathCode {
93 public:
BoundsCheckSlowPathX86(uint32_t dex_pc,Location index_location,Location length_location)94 explicit BoundsCheckSlowPathX86(uint32_t dex_pc,
95 Location index_location,
96 Location length_location)
97 : dex_pc_(dex_pc), index_location_(index_location), length_location_(length_location) {}
98
EmitNativeCode(CodeGenerator * codegen)99 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100 CodeGeneratorX86* x86_codegen = reinterpret_cast<CodeGeneratorX86*>(codegen);
101 __ Bind(GetEntryLabel());
102 InvokeRuntimeCallingConvention calling_convention;
103 x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(0)), index_location_);
104 x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(1)), length_location_);
105 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
106 codegen->RecordPcInfo(dex_pc_);
107 }
108
109 private:
110 const uint32_t dex_pc_;
111 const Location index_location_;
112 const Location length_location_;
113
114 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
115 };
116
117 #undef __
118 #define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
119
X86Condition(IfCondition cond)120 inline Condition X86Condition(IfCondition cond) {
121 switch (cond) {
122 case kCondEQ: return kEqual;
123 case kCondNE: return kNotEqual;
124 case kCondLT: return kLess;
125 case kCondLE: return kLessEqual;
126 case kCondGT: return kGreater;
127 case kCondGE: return kGreaterEqual;
128 default:
129 LOG(FATAL) << "Unknown if condition";
130 }
131 return kEqual;
132 }
133
DumpCoreRegister(std::ostream & stream,int reg) const134 void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
135 stream << X86ManagedRegister::FromCpuRegister(Register(reg));
136 }
137
DumpFloatingPointRegister(std::ostream & stream,int reg) const138 void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
139 stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
140 }
141
CodeGeneratorX86(HGraph * graph)142 CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
143 : CodeGenerator(graph, kNumberOfRegIds),
144 location_builder_(graph, this),
145 instruction_visitor_(graph, this),
146 move_resolver_(graph->GetArena(), this) {}
147
FrameEntrySpillSize() const148 size_t CodeGeneratorX86::FrameEntrySpillSize() const {
149 return kNumberOfPushedRegistersAtEntry * kX86WordSize;
150 }
151
GetBlockedRegisterPairs(bool * blocked_registers)152 static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
153 return blocked_registers + kNumberOfAllocIds;
154 }
155
AllocateFreeRegister(Primitive::Type type,bool * blocked_registers) const156 ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
157 bool* blocked_registers) const {
158 switch (type) {
159 case Primitive::kPrimLong: {
160 bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
161 size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
162 X86ManagedRegister pair =
163 X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
164 blocked_registers[pair.AsRegisterPairLow()] = true;
165 blocked_registers[pair.AsRegisterPairHigh()] = true;
166 // Block all other register pairs that share a register with `pair`.
167 for (int i = 0; i < kNumberOfRegisterPairs; i++) {
168 X86ManagedRegister current =
169 X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
170 if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
171 || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
172 || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
173 || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
174 blocked_register_pairs[i] = true;
175 }
176 }
177 return pair;
178 }
179
180 case Primitive::kPrimByte:
181 case Primitive::kPrimBoolean:
182 case Primitive::kPrimChar:
183 case Primitive::kPrimShort:
184 case Primitive::kPrimInt:
185 case Primitive::kPrimNot: {
186 Register reg = static_cast<Register>(
187 AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
188 // Block all register pairs that contain `reg`.
189 bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
190 for (int i = 0; i < kNumberOfRegisterPairs; i++) {
191 X86ManagedRegister current =
192 X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
193 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
194 blocked_register_pairs[i] = true;
195 }
196 }
197 return X86ManagedRegister::FromCpuRegister(reg);
198 }
199
200 case Primitive::kPrimFloat:
201 case Primitive::kPrimDouble:
202 LOG(FATAL) << "Unimplemented register type " << type;
203
204 case Primitive::kPrimVoid:
205 LOG(FATAL) << "Unreachable type " << type;
206 }
207
208 return ManagedRegister::NoRegister();
209 }
210
SetupBlockedRegisters(bool * blocked_registers) const211 void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
212 bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
213
214 // Don't allocate the dalvik style register pair passing.
215 blocked_register_pairs[ECX_EDX] = true;
216
217 // Stack register is always reserved.
218 blocked_registers[ESP] = true;
219
220 // TODO: We currently don't use Quick's callee saved registers.
221 blocked_registers[EBP] = true;
222 blocked_registers[ESI] = true;
223 blocked_registers[EDI] = true;
224 blocked_register_pairs[EAX_EDI] = true;
225 blocked_register_pairs[EDX_EDI] = true;
226 blocked_register_pairs[ECX_EDI] = true;
227 blocked_register_pairs[EBX_EDI] = true;
228 }
229
GetNumberOfRegisters() const230 size_t CodeGeneratorX86::GetNumberOfRegisters() const {
231 return kNumberOfRegIds;
232 }
233
InstructionCodeGeneratorX86(HGraph * graph,CodeGeneratorX86 * codegen)234 InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
235 : HGraphVisitor(graph),
236 assembler_(codegen->GetAssembler()),
237 codegen_(codegen) {}
238
GenerateFrameEntry()239 void CodeGeneratorX86::GenerateFrameEntry() {
240 // Create a fake register to mimic Quick.
241 static const int kFakeReturnRegister = 8;
242 core_spill_mask_ |= (1 << kFakeReturnRegister);
243
244 bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
245 if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
246 __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
247 RecordPcInfo(0);
248 }
249
250 // The return PC has already been pushed on the stack.
251 __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
252
253 if (!skip_overflow_check && kExplicitStackOverflowCheck) {
254 SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
255 AddSlowPath(slow_path);
256
257 __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
258 __ j(kLess, slow_path->GetEntryLabel());
259 }
260
261 __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
262 }
263
GenerateFrameExit()264 void CodeGeneratorX86::GenerateFrameExit() {
265 __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
266 }
267
Bind(Label * label)268 void CodeGeneratorX86::Bind(Label* label) {
269 __ Bind(label);
270 }
271
LoadCurrentMethod(Register reg)272 void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
273 __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
274 }
275
GetStackLocation(HLoadLocal * load) const276 Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
277 switch (load->GetType()) {
278 case Primitive::kPrimLong:
279 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
280 break;
281
282 case Primitive::kPrimInt:
283 case Primitive::kPrimNot:
284 return Location::StackSlot(GetStackSlot(load->GetLocal()));
285
286 case Primitive::kPrimFloat:
287 case Primitive::kPrimDouble:
288 LOG(FATAL) << "Unimplemented type " << load->GetType();
289
290 case Primitive::kPrimBoolean:
291 case Primitive::kPrimByte:
292 case Primitive::kPrimChar:
293 case Primitive::kPrimShort:
294 case Primitive::kPrimVoid:
295 LOG(FATAL) << "Unexpected type " << load->GetType();
296 }
297
298 LOG(FATAL) << "Unreachable";
299 return Location();
300 }
301
GetNextLocation(Primitive::Type type)302 Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
303 switch (type) {
304 case Primitive::kPrimBoolean:
305 case Primitive::kPrimByte:
306 case Primitive::kPrimChar:
307 case Primitive::kPrimShort:
308 case Primitive::kPrimInt:
309 case Primitive::kPrimNot: {
310 uint32_t index = gp_index_++;
311 if (index < calling_convention.GetNumberOfRegisters()) {
312 return X86CpuLocation(calling_convention.GetRegisterAt(index));
313 } else {
314 return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
315 }
316 }
317
318 case Primitive::kPrimLong: {
319 uint32_t index = gp_index_;
320 gp_index_ += 2;
321 if (index + 1 < calling_convention.GetNumberOfRegisters()) {
322 return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
323 calling_convention.GetRegisterPairAt(index)));
324 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
325 return Location::QuickParameter(index);
326 } else {
327 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
328 }
329 }
330
331 case Primitive::kPrimDouble:
332 case Primitive::kPrimFloat:
333 LOG(FATAL) << "Unimplemented parameter type " << type;
334 break;
335
336 case Primitive::kPrimVoid:
337 LOG(FATAL) << "Unexpected parameter type " << type;
338 break;
339 }
340 return Location();
341 }
342
Move32(Location destination,Location source)343 void CodeGeneratorX86::Move32(Location destination, Location source) {
344 if (source.Equals(destination)) {
345 return;
346 }
347 if (destination.IsRegister()) {
348 if (source.IsRegister()) {
349 __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
350 } else {
351 DCHECK(source.IsStackSlot());
352 __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
353 }
354 } else {
355 if (source.IsRegister()) {
356 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
357 } else {
358 DCHECK(source.IsStackSlot());
359 __ pushl(Address(ESP, source.GetStackIndex()));
360 __ popl(Address(ESP, destination.GetStackIndex()));
361 }
362 }
363 }
364
Move64(Location destination,Location source)365 void CodeGeneratorX86::Move64(Location destination, Location source) {
366 if (source.Equals(destination)) {
367 return;
368 }
369 if (destination.IsRegister()) {
370 if (source.IsRegister()) {
371 __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
372 __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
373 } else if (source.IsQuickParameter()) {
374 uint32_t argument_index = source.GetQuickParameterIndex();
375 InvokeDexCallingConvention calling_convention;
376 __ movl(destination.AsX86().AsRegisterPairLow(),
377 calling_convention.GetRegisterAt(argument_index));
378 __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
379 calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
380 } else {
381 DCHECK(source.IsDoubleStackSlot());
382 __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
383 __ movl(destination.AsX86().AsRegisterPairHigh(),
384 Address(ESP, source.GetHighStackIndex(kX86WordSize)));
385 }
386 } else if (destination.IsQuickParameter()) {
387 InvokeDexCallingConvention calling_convention;
388 uint32_t argument_index = destination.GetQuickParameterIndex();
389 if (source.IsRegister()) {
390 __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
391 __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
392 source.AsX86().AsRegisterPairHigh());
393 } else {
394 DCHECK(source.IsDoubleStackSlot());
395 __ movl(calling_convention.GetRegisterAt(argument_index),
396 Address(ESP, source.GetStackIndex()));
397 __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
398 __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
399 }
400 } else {
401 if (source.IsRegister()) {
402 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
403 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
404 source.AsX86().AsRegisterPairHigh());
405 } else if (source.IsQuickParameter()) {
406 InvokeDexCallingConvention calling_convention;
407 uint32_t argument_index = source.GetQuickParameterIndex();
408 __ movl(Address(ESP, destination.GetStackIndex()),
409 calling_convention.GetRegisterAt(argument_index));
410 __ pushl(Address(ESP,
411 calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
412 __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
413 } else {
414 DCHECK(source.IsDoubleStackSlot());
415 __ pushl(Address(ESP, source.GetStackIndex()));
416 __ popl(Address(ESP, destination.GetStackIndex()));
417 __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
418 __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
419 }
420 }
421 }
422
Move(HInstruction * instruction,Location location,HInstruction * move_for)423 void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
424 if (instruction->AsIntConstant() != nullptr) {
425 Immediate imm(instruction->AsIntConstant()->GetValue());
426 if (location.IsRegister()) {
427 __ movl(location.AsX86().AsCpuRegister(), imm);
428 } else {
429 __ movl(Address(ESP, location.GetStackIndex()), imm);
430 }
431 } else if (instruction->AsLongConstant() != nullptr) {
432 int64_t value = instruction->AsLongConstant()->GetValue();
433 if (location.IsRegister()) {
434 __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
435 __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
436 } else {
437 __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
438 __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
439 }
440 } else if (instruction->AsLoadLocal() != nullptr) {
441 switch (instruction->GetType()) {
442 case Primitive::kPrimBoolean:
443 case Primitive::kPrimByte:
444 case Primitive::kPrimChar:
445 case Primitive::kPrimShort:
446 case Primitive::kPrimInt:
447 case Primitive::kPrimNot:
448 Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
449 break;
450
451 case Primitive::kPrimLong:
452 Move64(location, Location::DoubleStackSlot(
453 GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
454 break;
455
456 default:
457 LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
458 }
459 } else {
460 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
461 switch (instruction->GetType()) {
462 case Primitive::kPrimBoolean:
463 case Primitive::kPrimByte:
464 case Primitive::kPrimChar:
465 case Primitive::kPrimShort:
466 case Primitive::kPrimInt:
467 case Primitive::kPrimNot:
468 Move32(location, instruction->GetLocations()->Out());
469 break;
470
471 case Primitive::kPrimLong:
472 Move64(location, instruction->GetLocations()->Out());
473 break;
474
475 default:
476 LOG(FATAL) << "Unimplemented type " << instruction->GetType();
477 }
478 }
479 }
480
VisitGoto(HGoto * got)481 void LocationsBuilderX86::VisitGoto(HGoto* got) {
482 got->SetLocations(nullptr);
483 }
484
VisitGoto(HGoto * got)485 void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
486 HBasicBlock* successor = got->GetSuccessor();
487 if (GetGraph()->GetExitBlock() == successor) {
488 codegen_->GenerateFrameExit();
489 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
490 __ jmp(codegen_->GetLabelOf(successor));
491 }
492 }
493
VisitExit(HExit * exit)494 void LocationsBuilderX86::VisitExit(HExit* exit) {
495 exit->SetLocations(nullptr);
496 }
497
VisitExit(HExit * exit)498 void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
499 if (kIsDebugBuild) {
500 __ Comment("Unreachable");
501 __ int3();
502 }
503 }
504
VisitIf(HIf * if_instr)505 void LocationsBuilderX86::VisitIf(HIf* if_instr) {
506 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
507 HInstruction* cond = if_instr->InputAt(0);
508 DCHECK(cond->IsCondition());
509 HCondition* condition = cond->AsCondition();
510 if (condition->NeedsMaterialization()) {
511 locations->SetInAt(0, Location::Any());
512 }
513 if_instr->SetLocations(locations);
514 }
515
VisitIf(HIf * if_instr)516 void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
517 HInstruction* cond = if_instr->InputAt(0);
518 DCHECK(cond->IsCondition());
519 HCondition* condition = cond->AsCondition();
520 if (condition->NeedsMaterialization()) {
521 // Materialized condition, compare against 0
522 Location lhs = if_instr->GetLocations()->InAt(0);
523 if (lhs.IsRegister()) {
524 __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
525 } else {
526 __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
527 }
528 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
529 } else {
530 Location lhs = condition->GetLocations()->InAt(0);
531 Location rhs = condition->GetLocations()->InAt(1);
532 // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
533 if (rhs.IsRegister()) {
534 __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
535 } else if (rhs.IsConstant()) {
536 HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
537 Immediate imm(instruction->AsIntConstant()->GetValue());
538 __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
539 } else {
540 __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
541 }
542 __ j(X86Condition(condition->GetCondition()),
543 codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
544 }
545 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
546 __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
547 }
548 }
549
VisitLocal(HLocal * local)550 void LocationsBuilderX86::VisitLocal(HLocal* local) {
551 local->SetLocations(nullptr);
552 }
553
VisitLocal(HLocal * local)554 void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
555 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
556 }
557
VisitLoadLocal(HLoadLocal * local)558 void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
559 local->SetLocations(nullptr);
560 }
561
VisitLoadLocal(HLoadLocal * load)562 void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
563 // Nothing to do, this is driven by the code generator.
564 }
565
VisitStoreLocal(HStoreLocal * store)566 void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
567 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
568 switch (store->InputAt(1)->GetType()) {
569 case Primitive::kPrimBoolean:
570 case Primitive::kPrimByte:
571 case Primitive::kPrimChar:
572 case Primitive::kPrimShort:
573 case Primitive::kPrimInt:
574 case Primitive::kPrimNot:
575 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
576 break;
577
578 case Primitive::kPrimLong:
579 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
580 break;
581
582 default:
583 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
584 }
585 store->SetLocations(locations);
586 }
587
VisitStoreLocal(HStoreLocal * store)588 void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
589 }
590
VisitCondition(HCondition * comp)591 void LocationsBuilderX86::VisitCondition(HCondition* comp) {
592 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
593 locations->SetInAt(0, Location::RequiresRegister());
594 locations->SetInAt(1, Location::Any());
595 if (comp->NeedsMaterialization()) {
596 locations->SetOut(Location::RequiresRegister());
597 }
598 comp->SetLocations(locations);
599 }
600
VisitCondition(HCondition * comp)601 void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
602 if (comp->NeedsMaterialization()) {
603 LocationSummary* locations = comp->GetLocations();
604 if (locations->InAt(1).IsRegister()) {
605 __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
606 locations->InAt(1).AsX86().AsCpuRegister());
607 } else if (locations->InAt(1).IsConstant()) {
608 HConstant* instruction = locations->InAt(1).GetConstant();
609 Immediate imm(instruction->AsIntConstant()->GetValue());
610 __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
611 } else {
612 __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
613 Address(ESP, locations->InAt(1).GetStackIndex()));
614 }
615 __ setb(X86Condition(comp->GetCondition()), locations->Out().AsX86().AsCpuRegister());
616 }
617 }
618
VisitEqual(HEqual * comp)619 void LocationsBuilderX86::VisitEqual(HEqual* comp) {
620 VisitCondition(comp);
621 }
622
VisitEqual(HEqual * comp)623 void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
624 VisitCondition(comp);
625 }
626
VisitNotEqual(HNotEqual * comp)627 void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
628 VisitCondition(comp);
629 }
630
VisitNotEqual(HNotEqual * comp)631 void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
632 VisitCondition(comp);
633 }
634
VisitLessThan(HLessThan * comp)635 void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
636 VisitCondition(comp);
637 }
638
VisitLessThan(HLessThan * comp)639 void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
640 VisitCondition(comp);
641 }
642
VisitLessThanOrEqual(HLessThanOrEqual * comp)643 void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
644 VisitCondition(comp);
645 }
646
VisitLessThanOrEqual(HLessThanOrEqual * comp)647 void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
648 VisitCondition(comp);
649 }
650
VisitGreaterThan(HGreaterThan * comp)651 void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
652 VisitCondition(comp);
653 }
654
VisitGreaterThan(HGreaterThan * comp)655 void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
656 VisitCondition(comp);
657 }
658
VisitGreaterThanOrEqual(HGreaterThanOrEqual * comp)659 void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
660 VisitCondition(comp);
661 }
662
VisitGreaterThanOrEqual(HGreaterThanOrEqual * comp)663 void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
664 VisitCondition(comp);
665 }
666
VisitIntConstant(HIntConstant * constant)667 void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
668 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
669 locations->SetOut(Location::ConstantLocation(constant));
670 constant->SetLocations(locations);
671 }
672
VisitIntConstant(HIntConstant * constant)673 void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
674 }
675
VisitLongConstant(HLongConstant * constant)676 void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
677 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
678 locations->SetOut(Location::ConstantLocation(constant));
679 constant->SetLocations(locations);
680 }
681
VisitLongConstant(HLongConstant * constant)682 void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
683 // Will be generated at use site.
684 }
685
VisitReturnVoid(HReturnVoid * ret)686 void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
687 ret->SetLocations(nullptr);
688 }
689
VisitReturnVoid(HReturnVoid * ret)690 void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
691 codegen_->GenerateFrameExit();
692 __ ret();
693 }
694
VisitReturn(HReturn * ret)695 void LocationsBuilderX86::VisitReturn(HReturn* ret) {
696 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
697 switch (ret->InputAt(0)->GetType()) {
698 case Primitive::kPrimBoolean:
699 case Primitive::kPrimByte:
700 case Primitive::kPrimChar:
701 case Primitive::kPrimShort:
702 case Primitive::kPrimInt:
703 case Primitive::kPrimNot:
704 locations->SetInAt(0, X86CpuLocation(EAX));
705 break;
706
707 case Primitive::kPrimLong:
708 locations->SetInAt(
709 0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
710 break;
711
712 default:
713 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
714 }
715 ret->SetLocations(locations);
716 }
717
VisitReturn(HReturn * ret)718 void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
719 if (kIsDebugBuild) {
720 switch (ret->InputAt(0)->GetType()) {
721 case Primitive::kPrimBoolean:
722 case Primitive::kPrimByte:
723 case Primitive::kPrimChar:
724 case Primitive::kPrimShort:
725 case Primitive::kPrimInt:
726 case Primitive::kPrimNot:
727 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
728 break;
729
730 case Primitive::kPrimLong:
731 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
732 break;
733
734 default:
735 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
736 }
737 }
738 codegen_->GenerateFrameExit();
739 __ ret();
740 }
741
VisitInvokeStatic(HInvokeStatic * invoke)742 void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
743 codegen_->MarkNotLeaf();
744 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
745 locations->AddTemp(X86CpuLocation(EAX));
746
747 InvokeDexCallingConventionVisitor calling_convention_visitor;
748 for (size_t i = 0; i < invoke->InputCount(); i++) {
749 HInstruction* input = invoke->InputAt(i);
750 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
751 }
752
753 switch (invoke->GetType()) {
754 case Primitive::kPrimBoolean:
755 case Primitive::kPrimByte:
756 case Primitive::kPrimChar:
757 case Primitive::kPrimShort:
758 case Primitive::kPrimInt:
759 case Primitive::kPrimNot:
760 locations->SetOut(X86CpuLocation(EAX));
761 break;
762
763 case Primitive::kPrimLong:
764 locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
765 break;
766
767 case Primitive::kPrimVoid:
768 break;
769
770 case Primitive::kPrimDouble:
771 case Primitive::kPrimFloat:
772 LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
773 break;
774 }
775
776 invoke->SetLocations(locations);
777 }
778
VisitInvokeStatic(HInvokeStatic * invoke)779 void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
780 Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
781 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
782 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
783 invoke->GetIndexInDexCache() * kX86WordSize;
784
785 // TODO: Implement all kinds of calls:
786 // 1) boot -> boot
787 // 2) app -> boot
788 // 3) app -> app
789 //
790 // Currently we implement the app -> app logic, which looks up in the resolve cache.
791
792 // temp = method;
793 LoadCurrentMethod(temp);
794 // temp = temp->dex_cache_resolved_methods_;
795 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
796 // temp = temp[index_in_cache]
797 __ movl(temp, Address(temp, index_in_cache));
798 // (temp + offset_of_quick_compiled_code)()
799 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
800 kX86PointerSize).Int32Value()));
801
802 DCHECK(!codegen_->IsLeafMethod());
803 codegen_->RecordPcInfo(invoke->GetDexPc());
804 }
805
VisitAdd(HAdd * add)806 void LocationsBuilderX86::VisitAdd(HAdd* add) {
807 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
808 switch (add->GetResultType()) {
809 case Primitive::kPrimInt:
810 case Primitive::kPrimLong: {
811 locations->SetInAt(0, Location::RequiresRegister());
812 locations->SetInAt(1, Location::Any());
813 locations->SetOut(Location::SameAsFirstInput());
814 break;
815 }
816
817 case Primitive::kPrimBoolean:
818 case Primitive::kPrimByte:
819 case Primitive::kPrimChar:
820 case Primitive::kPrimShort:
821 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
822 break;
823
824 default:
825 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
826 }
827 add->SetLocations(locations);
828 }
829
VisitAdd(HAdd * add)830 void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
831 LocationSummary* locations = add->GetLocations();
832 switch (add->GetResultType()) {
833 case Primitive::kPrimInt: {
834 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
835 locations->Out().AsX86().AsCpuRegister());
836 if (locations->InAt(1).IsRegister()) {
837 __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
838 locations->InAt(1).AsX86().AsCpuRegister());
839 } else if (locations->InAt(1).IsConstant()) {
840 HConstant* instruction = locations->InAt(1).GetConstant();
841 Immediate imm(instruction->AsIntConstant()->GetValue());
842 __ addl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
843 } else {
844 __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
845 Address(ESP, locations->InAt(1).GetStackIndex()));
846 }
847 break;
848 }
849
850 case Primitive::kPrimLong: {
851 DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
852 locations->Out().AsX86().AsRegisterPair());
853 if (locations->InAt(1).IsRegister()) {
854 __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
855 locations->InAt(1).AsX86().AsRegisterPairLow());
856 __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
857 locations->InAt(1).AsX86().AsRegisterPairHigh());
858 } else {
859 __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
860 Address(ESP, locations->InAt(1).GetStackIndex()));
861 __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
862 Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
863 }
864 break;
865 }
866
867 case Primitive::kPrimBoolean:
868 case Primitive::kPrimByte:
869 case Primitive::kPrimChar:
870 case Primitive::kPrimShort:
871 LOG(FATAL) << "Unexpected add type " << add->GetResultType();
872 break;
873
874 default:
875 LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
876 }
877 }
878
VisitSub(HSub * sub)879 void LocationsBuilderX86::VisitSub(HSub* sub) {
880 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
881 switch (sub->GetResultType()) {
882 case Primitive::kPrimInt:
883 case Primitive::kPrimLong: {
884 locations->SetInAt(0, Location::RequiresRegister());
885 locations->SetInAt(1, Location::Any());
886 locations->SetOut(Location::SameAsFirstInput());
887 break;
888 }
889
890 case Primitive::kPrimBoolean:
891 case Primitive::kPrimByte:
892 case Primitive::kPrimChar:
893 case Primitive::kPrimShort:
894 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
895 break;
896
897 default:
898 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
899 }
900 sub->SetLocations(locations);
901 }
902
VisitSub(HSub * sub)903 void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
904 LocationSummary* locations = sub->GetLocations();
905 switch (sub->GetResultType()) {
906 case Primitive::kPrimInt: {
907 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
908 locations->Out().AsX86().AsCpuRegister());
909 if (locations->InAt(1).IsRegister()) {
910 __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
911 locations->InAt(1).AsX86().AsCpuRegister());
912 } else if (locations->InAt(1).IsConstant()) {
913 HConstant* instruction = locations->InAt(1).GetConstant();
914 Immediate imm(instruction->AsIntConstant()->GetValue());
915 __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
916 } else {
917 __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
918 Address(ESP, locations->InAt(1).GetStackIndex()));
919 }
920 break;
921 }
922
923 case Primitive::kPrimLong: {
924 DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
925 locations->Out().AsX86().AsRegisterPair());
926 if (locations->InAt(1).IsRegister()) {
927 __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
928 locations->InAt(1).AsX86().AsRegisterPairLow());
929 __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
930 locations->InAt(1).AsX86().AsRegisterPairHigh());
931 } else {
932 __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
933 Address(ESP, locations->InAt(1).GetStackIndex()));
934 __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
935 Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
936 }
937 break;
938 }
939
940 case Primitive::kPrimBoolean:
941 case Primitive::kPrimByte:
942 case Primitive::kPrimChar:
943 case Primitive::kPrimShort:
944 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
945 break;
946
947 default:
948 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
949 }
950 }
951
VisitNewInstance(HNewInstance * instruction)952 void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
953 codegen_->MarkNotLeaf();
954 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
955 locations->SetOut(X86CpuLocation(EAX));
956 InvokeRuntimeCallingConvention calling_convention;
957 locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
958 locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
959 instruction->SetLocations(locations);
960 }
961
VisitNewInstance(HNewInstance * instruction)962 void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
963 InvokeRuntimeCallingConvention calling_convention;
964 LoadCurrentMethod(calling_convention.GetRegisterAt(1));
965 __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
966
967 __ fs()->call(
968 Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
969
970 codegen_->RecordPcInfo(instruction->GetDexPc());
971 DCHECK(!codegen_->IsLeafMethod());
972 }
973
VisitParameterValue(HParameterValue * instruction)974 void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
975 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
976 Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
977 if (location.IsStackSlot()) {
978 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
979 } else if (location.IsDoubleStackSlot()) {
980 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
981 }
982 locations->SetOut(location);
983 instruction->SetLocations(locations);
984 }
985
VisitParameterValue(HParameterValue * instruction)986 void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
987 }
988
VisitNot(HNot * instruction)989 void LocationsBuilderX86::VisitNot(HNot* instruction) {
990 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
991 locations->SetInAt(0, Location::RequiresRegister());
992 locations->SetOut(Location::SameAsFirstInput());
993 instruction->SetLocations(locations);
994 }
995
VisitNot(HNot * instruction)996 void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
997 LocationSummary* locations = instruction->GetLocations();
998 Location out = locations->Out();
999 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
1000 __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
1001 }
1002
VisitCompare(HCompare * compare)1003 void LocationsBuilderX86::VisitCompare(HCompare* compare) {
1004 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
1005 locations->SetInAt(0, Location::RequiresRegister());
1006 locations->SetInAt(1, Location::Any());
1007 locations->SetOut(Location::RequiresRegister());
1008 compare->SetLocations(locations);
1009 }
1010
VisitCompare(HCompare * compare)1011 void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
1012 Label greater, done;
1013 LocationSummary* locations = compare->GetLocations();
1014 switch (compare->InputAt(0)->GetType()) {
1015 case Primitive::kPrimLong: {
1016 Label less, greater, done;
1017 Register output = locations->Out().AsX86().AsCpuRegister();
1018 X86ManagedRegister left = locations->InAt(0).AsX86();
1019 Location right = locations->InAt(1);
1020 if (right.IsRegister()) {
1021 __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
1022 } else {
1023 DCHECK(right.IsDoubleStackSlot());
1024 __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1025 }
1026 __ j(kLess, &less); // Signed compare.
1027 __ j(kGreater, &greater); // Signed compare.
1028 if (right.IsRegister()) {
1029 __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
1030 } else {
1031 DCHECK(right.IsDoubleStackSlot());
1032 __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
1033 }
1034 __ movl(output, Immediate(0));
1035 __ j(kEqual, &done);
1036 __ j(kBelow, &less); // Unsigned compare.
1037
1038 __ Bind(&greater);
1039 __ movl(output, Immediate(1));
1040 __ jmp(&done);
1041
1042 __ Bind(&less);
1043 __ movl(output, Immediate(-1));
1044
1045 __ Bind(&done);
1046 break;
1047 }
1048 default:
1049 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1050 }
1051 }
1052
VisitPhi(HPhi * instruction)1053 void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
1054 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1055 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1056 locations->SetInAt(i, Location::Any());
1057 }
1058 locations->SetOut(Location::Any());
1059 instruction->SetLocations(locations);
1060 }
1061
VisitPhi(HPhi * instruction)1062 void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1063 LOG(FATAL) << "Unreachable";
1064 }
1065
VisitInstanceFieldSet(HInstanceFieldSet * instruction)1066 void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1067 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1068 locations->SetInAt(0, Location::RequiresRegister());
1069 Primitive::Type field_type = instruction->InputAt(1)->GetType();
1070 if (field_type == Primitive::kPrimBoolean || field_type == Primitive::kPrimByte) {
1071 // Ensure the value is in a byte register.
1072 locations->SetInAt(1, X86CpuLocation(EAX));
1073 } else {
1074 locations->SetInAt(1, Location::RequiresRegister());
1075 }
1076 // Temporary registers for the write barrier.
1077 if (field_type == Primitive::kPrimNot) {
1078 locations->AddTemp(Location::RequiresRegister());
1079 // Ensure the card is in a byte register.
1080 locations->AddTemp(X86CpuLocation(ECX));
1081 }
1082 instruction->SetLocations(locations);
1083 }
1084
VisitInstanceFieldSet(HInstanceFieldSet * instruction)1085 void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1086 LocationSummary* locations = instruction->GetLocations();
1087 Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1088 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1089 Primitive::Type field_type = instruction->InputAt(1)->GetType();
1090
1091 switch (field_type) {
1092 case Primitive::kPrimBoolean:
1093 case Primitive::kPrimByte: {
1094 ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1095 __ movb(Address(obj, offset), value);
1096 break;
1097 }
1098
1099 case Primitive::kPrimShort:
1100 case Primitive::kPrimChar: {
1101 Register value = locations->InAt(1).AsX86().AsCpuRegister();
1102 __ movw(Address(obj, offset), value);
1103 break;
1104 }
1105
1106 case Primitive::kPrimInt:
1107 case Primitive::kPrimNot: {
1108 Register value = locations->InAt(1).AsX86().AsCpuRegister();
1109 __ movl(Address(obj, offset), value);
1110
1111 if (field_type == Primitive::kPrimNot) {
1112 Register temp = locations->GetTemp(0).AsX86().AsCpuRegister();
1113 Register card = locations->GetTemp(1).AsX86().AsCpuRegister();
1114 codegen_->MarkGCCard(temp, card, obj, value);
1115 }
1116 break;
1117 }
1118
1119 case Primitive::kPrimLong: {
1120 X86ManagedRegister value = locations->InAt(1).AsX86();
1121 __ movl(Address(obj, offset), value.AsRegisterPairLow());
1122 __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1123 break;
1124 }
1125
1126 case Primitive::kPrimFloat:
1127 case Primitive::kPrimDouble:
1128 LOG(FATAL) << "Unimplemented register type " << field_type;
1129
1130 case Primitive::kPrimVoid:
1131 LOG(FATAL) << "Unreachable type " << field_type;
1132 }
1133 }
1134
MarkGCCard(Register temp,Register card,Register object,Register value)1135 void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
1136 Label is_null;
1137 __ testl(value, value);
1138 __ j(kEqual, &is_null);
1139 __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1140 __ movl(temp, object);
1141 __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1142 __ movb(Address(temp, card, TIMES_1, 0),
1143 X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
1144 __ Bind(&is_null);
1145 }
1146
VisitInstanceFieldGet(HInstanceFieldGet * instruction)1147 void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1148 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1149 locations->SetInAt(0, Location::RequiresRegister());
1150 locations->SetOut(Location::RequiresRegister());
1151 instruction->SetLocations(locations);
1152 }
1153
VisitInstanceFieldGet(HInstanceFieldGet * instruction)1154 void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1155 LocationSummary* locations = instruction->GetLocations();
1156 Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1157 uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1158
1159 switch (instruction->GetType()) {
1160 case Primitive::kPrimBoolean: {
1161 Register out = locations->Out().AsX86().AsCpuRegister();
1162 __ movzxb(out, Address(obj, offset));
1163 break;
1164 }
1165
1166 case Primitive::kPrimByte: {
1167 Register out = locations->Out().AsX86().AsCpuRegister();
1168 __ movsxb(out, Address(obj, offset));
1169 break;
1170 }
1171
1172 case Primitive::kPrimShort: {
1173 Register out = locations->Out().AsX86().AsCpuRegister();
1174 __ movsxw(out, Address(obj, offset));
1175 break;
1176 }
1177
1178 case Primitive::kPrimChar: {
1179 Register out = locations->Out().AsX86().AsCpuRegister();
1180 __ movzxw(out, Address(obj, offset));
1181 break;
1182 }
1183
1184 case Primitive::kPrimInt:
1185 case Primitive::kPrimNot: {
1186 Register out = locations->Out().AsX86().AsCpuRegister();
1187 __ movl(out, Address(obj, offset));
1188 break;
1189 }
1190
1191 case Primitive::kPrimLong: {
1192 // TODO: support volatile.
1193 X86ManagedRegister out = locations->Out().AsX86();
1194 __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1195 __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1196 break;
1197 }
1198
1199 case Primitive::kPrimFloat:
1200 case Primitive::kPrimDouble:
1201 LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1202
1203 case Primitive::kPrimVoid:
1204 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1205 }
1206 }
1207
VisitNullCheck(HNullCheck * instruction)1208 void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1209 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1210 locations->SetInAt(0, Location::Any());
1211 // TODO: Have a normalization phase that makes this instruction never used.
1212 locations->SetOut(Location::SameAsFirstInput());
1213 instruction->SetLocations(locations);
1214 }
1215
VisitNullCheck(HNullCheck * instruction)1216 void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1217 SlowPathCode* slow_path =
1218 new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction->GetDexPc());
1219 codegen_->AddSlowPath(slow_path);
1220
1221 LocationSummary* locations = instruction->GetLocations();
1222 Location obj = locations->InAt(0);
1223 DCHECK(obj.Equals(locations->Out()));
1224
1225 if (obj.IsRegister()) {
1226 __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1227 } else {
1228 DCHECK(locations->InAt(0).IsStackSlot());
1229 __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1230 }
1231 __ j(kEqual, slow_path->GetEntryLabel());
1232 }
1233
VisitArrayGet(HArrayGet * instruction)1234 void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
1235 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1236 locations->SetInAt(0, Location::RequiresRegister());
1237 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1238 locations->SetOut(Location::RequiresRegister());
1239 instruction->SetLocations(locations);
1240 }
1241
VisitArrayGet(HArrayGet * instruction)1242 void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
1243 LocationSummary* locations = instruction->GetLocations();
1244 Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1245 Location index = locations->InAt(1);
1246
1247 switch (instruction->GetType()) {
1248 case Primitive::kPrimBoolean: {
1249 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1250 Register out = locations->Out().AsX86().AsCpuRegister();
1251 if (index.IsConstant()) {
1252 __ movzxb(out, Address(obj,
1253 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1254 } else {
1255 __ movzxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1256 }
1257 break;
1258 }
1259
1260 case Primitive::kPrimByte: {
1261 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1262 Register out = locations->Out().AsX86().AsCpuRegister();
1263 if (index.IsConstant()) {
1264 __ movsxb(out, Address(obj,
1265 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1266 } else {
1267 __ movsxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1268 }
1269 break;
1270 }
1271
1272 case Primitive::kPrimShort: {
1273 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1274 Register out = locations->Out().AsX86().AsCpuRegister();
1275 if (index.IsConstant()) {
1276 __ movsxw(out, Address(obj,
1277 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1278 } else {
1279 __ movsxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1280 }
1281 break;
1282 }
1283
1284 case Primitive::kPrimChar: {
1285 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1286 Register out = locations->Out().AsX86().AsCpuRegister();
1287 if (index.IsConstant()) {
1288 __ movzxw(out, Address(obj,
1289 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1290 } else {
1291 __ movzxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1292 }
1293 break;
1294 }
1295
1296 case Primitive::kPrimInt:
1297 case Primitive::kPrimNot: {
1298 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1299 Register out = locations->Out().AsX86().AsCpuRegister();
1300 if (index.IsConstant()) {
1301 __ movl(out, Address(obj,
1302 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
1303 } else {
1304 __ movl(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset));
1305 }
1306 break;
1307 }
1308
1309 case Primitive::kPrimLong: {
1310 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1311 X86ManagedRegister out = locations->Out().AsX86();
1312 if (index.IsConstant()) {
1313 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1314 __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1315 __ movl(out.AsRegisterPairHigh(), Address(obj, offset + kX86WordSize));
1316 } else {
1317 __ movl(out.AsRegisterPairLow(),
1318 Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset));
1319 __ movl(out.AsRegisterPairHigh(),
1320 Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize));
1321 }
1322 break;
1323 }
1324
1325 case Primitive::kPrimFloat:
1326 case Primitive::kPrimDouble:
1327 LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1328
1329 case Primitive::kPrimVoid:
1330 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1331 }
1332 }
1333
VisitArraySet(HArraySet * instruction)1334 void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
1335 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1336 Primitive::Type value_type = instruction->InputAt(2)->GetType();
1337 if (value_type == Primitive::kPrimNot) {
1338 InvokeRuntimeCallingConvention calling_convention;
1339 locations->SetInAt(0, X86CpuLocation(calling_convention.GetRegisterAt(0)));
1340 locations->SetInAt(1, X86CpuLocation(calling_convention.GetRegisterAt(1)));
1341 locations->SetInAt(2, X86CpuLocation(calling_convention.GetRegisterAt(2)));
1342 codegen_->MarkNotLeaf();
1343 } else {
1344 locations->SetInAt(0, Location::RequiresRegister());
1345 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1346 if (value_type == Primitive::kPrimBoolean || value_type == Primitive::kPrimByte) {
1347 // Ensure the value is in a byte register.
1348 locations->SetInAt(2, X86CpuLocation(EAX));
1349 } else {
1350 locations->SetInAt(2, Location::RequiresRegister());
1351 }
1352 }
1353
1354 instruction->SetLocations(locations);
1355 }
1356
VisitArraySet(HArraySet * instruction)1357 void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
1358 LocationSummary* locations = instruction->GetLocations();
1359 Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1360 Location index = locations->InAt(1);
1361 Primitive::Type value_type = instruction->InputAt(2)->GetType();
1362
1363 switch (value_type) {
1364 case Primitive::kPrimBoolean:
1365 case Primitive::kPrimByte: {
1366 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1367 ByteRegister value = locations->InAt(2).AsX86().AsByteRegister();
1368 if (index.IsConstant()) {
1369 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1370 __ movb(Address(obj, offset), value);
1371 } else {
1372 __ movb(Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset), value);
1373 }
1374 break;
1375 }
1376
1377 case Primitive::kPrimShort:
1378 case Primitive::kPrimChar: {
1379 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1380 Register value = locations->InAt(2).AsX86().AsCpuRegister();
1381 if (index.IsConstant()) {
1382 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1383 __ movw(Address(obj, offset), value);
1384 } else {
1385 __ movw(Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset), value);
1386 }
1387 break;
1388 }
1389
1390 case Primitive::kPrimInt: {
1391 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1392 Register value = locations->InAt(2).AsX86().AsCpuRegister();
1393 if (index.IsConstant()) {
1394 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1395 __ movl(Address(obj, offset), value);
1396 } else {
1397 __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset), value);
1398 }
1399 break;
1400 }
1401
1402 case Primitive::kPrimNot: {
1403 DCHECK(!codegen_->IsLeafMethod());
1404 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
1405 codegen_->RecordPcInfo(instruction->GetDexPc());
1406 break;
1407 }
1408
1409 case Primitive::kPrimLong: {
1410 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1411 X86ManagedRegister value = locations->InAt(2).AsX86();
1412 if (index.IsConstant()) {
1413 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1414 __ movl(Address(obj, offset), value.AsRegisterPairLow());
1415 __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh());
1416 } else {
1417 __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset),
1418 value.AsRegisterPairLow());
1419 __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize),
1420 value.AsRegisterPairHigh());
1421 }
1422 break;
1423 }
1424
1425 case Primitive::kPrimFloat:
1426 case Primitive::kPrimDouble:
1427 LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1428
1429 case Primitive::kPrimVoid:
1430 LOG(FATAL) << "Unreachable type " << instruction->GetType();
1431 }
1432 }
1433
VisitArrayLength(HArrayLength * instruction)1434 void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
1435 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1436 locations->SetInAt(0, Location::RequiresRegister());
1437 locations->SetOut(Location::RequiresRegister());
1438 instruction->SetLocations(locations);
1439 }
1440
VisitArrayLength(HArrayLength * instruction)1441 void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
1442 LocationSummary* locations = instruction->GetLocations();
1443 uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1444 Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1445 Register out = locations->Out().AsX86().AsCpuRegister();
1446 __ movl(out, Address(obj, offset));
1447 }
1448
VisitBoundsCheck(HBoundsCheck * instruction)1449 void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1450 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1451 locations->SetInAt(0, Location::RequiresRegister());
1452 locations->SetInAt(1, Location::RequiresRegister());
1453 // TODO: Have a normalization phase that makes this instruction never used.
1454 locations->SetOut(Location::SameAsFirstInput());
1455 instruction->SetLocations(locations);
1456 }
1457
VisitBoundsCheck(HBoundsCheck * instruction)1458 void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1459 LocationSummary* locations = instruction->GetLocations();
1460 SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
1461 instruction->GetDexPc(), locations->InAt(0), locations->InAt(1));
1462 codegen_->AddSlowPath(slow_path);
1463
1464 Register index = locations->InAt(0).AsX86().AsCpuRegister();
1465 Register length = locations->InAt(1).AsX86().AsCpuRegister();
1466
1467 __ cmpl(index, length);
1468 __ j(kAboveEqual, slow_path->GetEntryLabel());
1469 }
1470
VisitTemporary(HTemporary * temp)1471 void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1472 temp->SetLocations(nullptr);
1473 }
1474
VisitTemporary(HTemporary * temp)1475 void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1476 // Nothing to do, this is driven by the code generator.
1477 }
1478
VisitParallelMove(HParallelMove * instruction)1479 void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1480 LOG(FATAL) << "Unreachable";
1481 }
1482
VisitParallelMove(HParallelMove * instruction)1483 void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1484 codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1485 }
1486
GetAssembler() const1487 X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1488 return codegen_->GetAssembler();
1489 }
1490
MoveMemoryToMemory(int dst,int src)1491 void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1492 ScratchRegisterScope ensure_scratch(
1493 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1494 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1495 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1496 __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1497 }
1498
EmitMove(size_t index)1499 void ParallelMoveResolverX86::EmitMove(size_t index) {
1500 MoveOperands* move = moves_.Get(index);
1501 Location source = move->GetSource();
1502 Location destination = move->GetDestination();
1503
1504 if (source.IsRegister()) {
1505 if (destination.IsRegister()) {
1506 __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1507 } else {
1508 DCHECK(destination.IsStackSlot());
1509 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1510 }
1511 } else if (source.IsStackSlot()) {
1512 if (destination.IsRegister()) {
1513 __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1514 } else {
1515 DCHECK(destination.IsStackSlot());
1516 MoveMemoryToMemory(destination.GetStackIndex(),
1517 source.GetStackIndex());
1518 }
1519 } else if (source.IsConstant()) {
1520 HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1521 Immediate imm(instruction->AsIntConstant()->GetValue());
1522 if (destination.IsRegister()) {
1523 __ movl(destination.AsX86().AsCpuRegister(), imm);
1524 } else {
1525 __ movl(Address(ESP, destination.GetStackIndex()), imm);
1526 }
1527 } else {
1528 LOG(FATAL) << "Unimplemented";
1529 }
1530 }
1531
Exchange(Register reg,int mem)1532 void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1533 Register suggested_scratch = reg == EAX ? EBX : EAX;
1534 ScratchRegisterScope ensure_scratch(
1535 this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1536
1537 int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1538 __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1539 __ movl(Address(ESP, mem + stack_offset), reg);
1540 __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1541 }
1542
Exchange(int mem1,int mem2)1543 void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1544 ScratchRegisterScope ensure_scratch1(
1545 this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1546
1547 Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1548 ScratchRegisterScope ensure_scratch2(
1549 this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1550
1551 int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1552 stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1553 __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1554 __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1555 __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1556 __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1557 }
1558
EmitSwap(size_t index)1559 void ParallelMoveResolverX86::EmitSwap(size_t index) {
1560 MoveOperands* move = moves_.Get(index);
1561 Location source = move->GetSource();
1562 Location destination = move->GetDestination();
1563
1564 if (source.IsRegister() && destination.IsRegister()) {
1565 __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1566 } else if (source.IsRegister() && destination.IsStackSlot()) {
1567 Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1568 } else if (source.IsStackSlot() && destination.IsRegister()) {
1569 Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1570 } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1571 Exchange(destination.GetStackIndex(), source.GetStackIndex());
1572 } else {
1573 LOG(FATAL) << "Unimplemented";
1574 }
1575 }
1576
SpillScratch(int reg)1577 void ParallelMoveResolverX86::SpillScratch(int reg) {
1578 __ pushl(static_cast<Register>(reg));
1579 }
1580
RestoreScratch(int reg)1581 void ParallelMoveResolverX86::RestoreScratch(int reg) {
1582 __ popl(static_cast<Register>(reg));
1583 }
1584
1585 } // namespace x86
1586 } // namespace art
1587