Home
last modified time | relevance | path

Searched refs:__ (Results 1 – 25 of 36) sorted by relevance

12

/art/compiler/utils/
Dassembler_thumb_test.cc179 #define __ assembler-> macro
183 __ FinalizeCode(); in EmitAndCheck()
184 size_t cs = __ CodeSize(); in EmitAndCheck()
187 __ FinalizeInstructions(code); in EmitAndCheck()
200 #undef __
211 #define __ assembler. macro
214 __ movs(R0, ShifterOperand(R1)); in TEST_F()
215 __ mov(R0, ShifterOperand(R1)); in TEST_F()
216 __ mov(R8, ShifterOperand(R9)); in TEST_F()
218 __ mov(R0, ShifterOperand(1)); in TEST_F()
[all …]
/art/compiler/utils/arm/
Dassembler_thumb2_test.cc103 #define __ GetAssembler()-> macro
106 __ sbfx(arm::R0, arm::R1, 0, 1); in TEST_F()
107 __ sbfx(arm::R0, arm::R1, 0, 8); in TEST_F()
108 __ sbfx(arm::R0, arm::R1, 0, 16); in TEST_F()
109 __ sbfx(arm::R0, arm::R1, 0, 32); in TEST_F()
111 __ sbfx(arm::R0, arm::R1, 8, 1); in TEST_F()
112 __ sbfx(arm::R0, arm::R1, 8, 8); in TEST_F()
113 __ sbfx(arm::R0, arm::R1, 8, 16); in TEST_F()
114 __ sbfx(arm::R0, arm::R1, 8, 24); in TEST_F()
116 __ sbfx(arm::R0, arm::R1, 16, 1); in TEST_F()
[all …]
Djni_macro_assembler_arm.cc86 #define __ asm_-> macro
106 __ PushList(core_spill_mask); in BuildFrame()
110 __ vpushs(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask)); in BuildFrame()
121 __ StoreToOffset(kStoreWord, R0, SP, 0); in BuildFrame()
132 __ StoreToOffset(kStoreWord, reg.AsCoreRegister(), SP, offset); in BuildFrame()
135 __ StoreSToOffset(reg.AsSRegister(), SP, offset); in BuildFrame()
138 __ StoreDToOffset(reg.AsDRegister(), SP, offset); in BuildFrame()
166 __ vpops(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask)); in RemoveFrame()
172 __ PopList(core_spill_mask); in RemoveFrame()
180 __ AddConstant(SP, -adjust); in IncreaseFrameSize()
[all …]
/art/compiler/utils/mips/
Dassembler_mips_test.cc24 #define __ GetAssembler()-> macro
196 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
198 __ Bind(&label); in BranchCondOneRegHelper()
201 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
225 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
227 __ Bind(&label); in BranchCondTwoRegsHelper()
230 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
394 __ Ins(*reg1, *reg2, pos, size); in TEST_F()
413 __ Ext(*reg1, *reg2, pos, size); in TEST_F()
771 __ Addiu32(mips::A1, mips::A2, -0x8000); in TEST_F()
[all …]
Dassembler_mips32r6_test.cc24 #define __ GetAssembler()-> macro
227 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
229 __ Bind(&label); in BranchCondTwoRegsHelper()
232 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
505 __ LoadDFromOffset(mips::F0, mips::A0, -0x8000); in TEST_F()
506 __ LoadDFromOffset(mips::F0, mips::A0, +0); in TEST_F()
507 __ LoadDFromOffset(mips::F0, mips::A0, +0x7FF8); in TEST_F()
508 __ LoadDFromOffset(mips::F0, mips::A0, +0x7FFB); in TEST_F()
509 __ LoadDFromOffset(mips::F0, mips::A0, +0x7FFC); in TEST_F()
510 __ LoadDFromOffset(mips::F0, mips::A0, +0x7FFF); in TEST_F()
[all …]
/art/compiler/optimizing/
Dintrinsics_mips.cc58 #define __ codegen->GetAssembler()-> macro
73 __ Move(V0, trg_reg); in MoveFromReturnRegister()
79 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
81 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
107 __ Bind(GetEntryLabel()); in EmitNativeCode()
130 __ B(GetExitLabel()); in EmitNativeCode()
142 #undef __
150 #define __ assembler-> macro
167 __ Mfc1(out_lo, in); in MoveFPToInt()
168 __ MoveFromFpuHigh(out_hi, in); in MoveFPToInt()
[all …]
Dcode_generator_mips.cc151 #define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> // NOLINT macro
161 __ Bind(GetEntryLabel()); in EmitNativeCode()
197 __ Bind(GetEntryLabel()); in EmitNativeCode()
224 __ Bind(GetEntryLabel()); in EmitNativeCode()
229 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_); in EmitNativeCode()
261 bool reordering = __ SetReorder(false); in EmitNativeCode()
263 __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, /* placeholder */ 0x5678); in EmitNativeCode()
264 __ SetReorder(reordering); in EmitNativeCode()
266 __ B(GetExitLabel()); in EmitNativeCode()
293 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dintrinsics_x86_64.cc71 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
98 __ Bind(GetEntryLabel()); in EmitNativeCode()
100 __ Bind(&loop); in EmitNativeCode()
101 __ movl(CpuRegister(TMP), Address(src_curr_addr, 0)); in EmitNativeCode()
102 __ MaybeUnpoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
112 __ MaybePoisonHeapReference(CpuRegister(TMP)); in EmitNativeCode()
113 __ movl(Address(dst_curr_addr, 0), CpuRegister(TMP)); in EmitNativeCode()
114 __ addl(src_curr_addr, Immediate(element_size)); in EmitNativeCode()
115 __ addl(dst_curr_addr, Immediate(element_size)); in EmitNativeCode()
116 __ cmpl(src_curr_addr, src_stop_addr); in EmitNativeCode()
[all …]
Dintrinsics_x86.cc78 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
112 __ Bind(GetEntryLabel()); in EmitNativeCode()
124 __ xorl(temp1, temp1); in EmitNativeCode()
126 __ Bind(&loop); in EmitNativeCode()
131 __ movl(temp2, Address(src, temp1, ScaleFactor::TIMES_4, adjusted_offset)); in EmitNativeCode()
133 __ leal(temp2, Address(src_pos.AsRegister<Register>(), temp1, ScaleFactor::TIMES_1, 0)); in EmitNativeCode()
134 __ movl(temp2, Address(src, temp2, ScaleFactor::TIMES_4, offset)); in EmitNativeCode()
136 __ MaybeUnpoisonHeapReference(temp2); in EmitNativeCode()
150 __ MaybePoisonHeapReference(temp2); in EmitNativeCode()
155 __ movl(Address(dest, temp1, ScaleFactor::TIMES_4, adjusted_offset), temp2); in EmitNativeCode()
[all …]
Dintrinsics_arm.cc48 #define __ assembler-> macro
66 __ AddConstant(base, array, element_size * constant + data_offset); in GenSystemArrayCopyBaseAddress()
68 __ add(base, array, ShifterOperand(pos.AsRegister<Register>(), LSL, element_size_shift)); in GenSystemArrayCopyBaseAddress()
69 __ AddConstant(base, data_offset); in GenSystemArrayCopyBaseAddress()
88 __ AddConstant(end, base, element_size * constant); in GenSystemArrayCopyEndAddress()
90 __ add(end, base, ShifterOperand(copy_length.AsRegister<Register>(), LSL, element_size_shift)); in GenSystemArrayCopyEndAddress()
94 #undef __
97 #define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT macro
129 __ Bind(GetEntryLabel()); in EmitNativeCode()
134 __ Bind(&loop); in EmitNativeCode()
[all …]
Dintrinsics_mips64.cc46 #define __ codegen->GetAssembler()-> macro
61 __ Move(V0, trg_reg); in MoveFromReturnRegister()
67 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
69 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
96 __ Bind(GetEntryLabel()); in EmitNativeCode()
119 __ Bc(GetExitLabel()); in EmitNativeCode()
131 #undef __
139 #define __ assembler-> macro
154 __ Dmfc1(out, in); in MoveFPToInt()
156 __ Mfc1(out, in); in MoveFPToInt()
[all …]
Dintrinsics_arm_vixl.cc36 #define __ assembler->GetVIXLAssembler()-> macro
93 __ Bind(GetEntryLabel()); in EmitNativeCode()
115 __ B(GetExitLabel()); in EmitNativeCode()
143 __ Add(base, array, element_size * constant + data_offset); in GenSystemArrayCopyBaseAddress()
145 __ Add(base, array, Operand(RegisterFrom(pos), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyBaseAddress()
146 __ Add(base, base, data_offset); in GenSystemArrayCopyBaseAddress()
165 __ Add(end, base, element_size * constant); in GenSystemArrayCopyEndAddress()
167 __ Add(end, base, Operand(RegisterFrom(copy_length), vixl32::LSL, element_size_shift)); in GenSystemArrayCopyEndAddress()
201 __ Bind(GetEntryLabel()); in EmitNativeCode()
206 __ Bind(&loop); in EmitNativeCode()
[all …]
Dcode_generator_mips64.cc102 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> // NOLINT macro
112 __ Bind(GetEntryLabel()); in EmitNativeCode()
148 __ Bind(GetEntryLabel()); in EmitNativeCode()
175 __ Bind(GetEntryLabel()); in EmitNativeCode()
180 __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_); in EmitNativeCode()
210 __ Sw(out.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678); in EmitNativeCode()
212 __ Bc(GetExitLabel()); in EmitNativeCode()
239 __ Bind(GetEntryLabel()); in EmitNativeCode()
245 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_); in EmitNativeCode()
266 __ Sw(out, AT, /* placeholder */ 0x5678); in EmitNativeCode()
[all …]
Dcode_generator_x86.cc51 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> // NOLINT macro
60 __ Bind(GetEntryLabel()); in EmitNativeCode()
86 __ Bind(GetEntryLabel()); in EmitNativeCode()
105 __ Bind(GetEntryLabel()); in EmitNativeCode()
107 __ negl(reg_); in EmitNativeCode()
109 __ movl(reg_, Immediate(0)); in EmitNativeCode()
111 __ jmp(GetExitLabel()); in EmitNativeCode()
129 __ Bind(GetEntryLabel()); in EmitNativeCode()
152 __ movl(length_loc.AsRegister<Register>(), array_len); in EmitNativeCode()
154 __ shrl(length_loc.AsRegister<Register>(), Immediate(1)); in EmitNativeCode()
[all …]
Dcode_generator_vector_x86.cc24 #define __ down_cast<X86Assembler*>(GetAssembler())-> // NOLINT macro
59 __ movd(reg, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
60 __ punpcklbw(reg, reg); in VisitVecReplicateScalar()
61 __ punpcklwd(reg, reg); in VisitVecReplicateScalar()
62 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
67 __ movd(reg, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
68 __ punpcklwd(reg, reg); in VisitVecReplicateScalar()
69 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
73 __ movd(reg, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar()
74 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
[all …]
Dintrinsics_arm64.cc75 #define __ codegen->GetVIXLAssembler()-> macro
90 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); in MoveFromReturnRegister()
94 __ Fmov(trg_reg, res_reg); in MoveFromReturnRegister()
116 __ Bind(GetEntryLabel()); in EmitNativeCode()
144 __ B(GetExitLabel()); in EmitNativeCode()
182 __ Bind(GetEntryLabel()); in EmitNativeCode()
184 __ Bind(&slow_copy_loop); in EmitNativeCode()
185 __ Ldr(tmp_reg, MemOperand(src_curr_addr, element_size, PostIndex)); in EmitNativeCode()
212 __ Str(tmp_reg, MemOperand(dst_curr_addr, element_size, PostIndex)); in EmitNativeCode()
213 __ Cmp(src_curr_addr, src_stop_addr); in EmitNativeCode()
[all …]
Dcode_generator_vector_x86_64.cc24 #define __ down_cast<X86_64Assembler*>(GetAssembler())-> // NOLINT macro
56 __ movd(reg, locations->InAt(0).AsRegister<CpuRegister>()); in VisitVecReplicateScalar()
57 __ punpcklbw(reg, reg); in VisitVecReplicateScalar()
58 __ punpcklwd(reg, reg); in VisitVecReplicateScalar()
59 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
64 __ movd(reg, locations->InAt(0).AsRegister<CpuRegister>()); in VisitVecReplicateScalar()
65 __ punpcklwd(reg, reg); in VisitVecReplicateScalar()
66 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
70 __ movd(reg, locations->InAt(0).AsRegister<CpuRegister>()); in VisitVecReplicateScalar()
71 __ pshufd(reg, reg, Immediate(0)); in VisitVecReplicateScalar()
[all …]
Dcode_generator_x86_64.cc55 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> // NOLINT macro
64 __ Bind(GetEntryLabel()); in EmitNativeCode()
90 __ Bind(GetEntryLabel()); in EmitNativeCode()
109 __ Bind(GetEntryLabel()); in EmitNativeCode()
112 __ negl(cpu_reg_); in EmitNativeCode()
114 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
120 __ negq(cpu_reg_); in EmitNativeCode()
122 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
125 __ jmp(GetExitLabel()); in EmitNativeCode()
145 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dcode_generator_arm.cc64 #define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> // NOLINT macro
111 __ StoreDToOffset(d_reg, SP, stack_offset); in SaveContiguousSRegisterList()
113 __ add(IP, SP, ShifterOperand(stack_offset)); in SaveContiguousSRegisterList()
114 __ vstmiad(IP, d_reg, number_of_d_regs); in SaveContiguousSRegisterList()
150 __ LoadDFromOffset(d_reg, SP, stack_offset); in RestoreContiguousSRegisterList()
152 __ add(IP, SP, ShifterOperand(stack_offset)); in RestoreContiguousSRegisterList()
153 __ vldmiad(IP, d_reg, number_of_d_regs); in RestoreContiguousSRegisterList()
184 __ StoreList(RegList(core_spills), orig_offset); in SaveLiveRegisters()
226 __ LoadList(RegList(core_spills), orig_offset); in RestoreLiveRegisters()
252 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
/art/compiler/utils/x86/
Djni_macro_assembler_x86.cc42 #define __ asm_. macro
54 __ pushl(spill); in BuildFrame()
64 __ addl(ESP, Immediate(-adjust)); in BuildFrame()
66 __ pushl(method_reg.AsX86().AsCpuRegister()); in BuildFrame()
74 __ movl(Address(ESP, offset), spill.AsX86().AsCpuRegister()); in BuildFrame()
78 __ movsd(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister()); in BuildFrame()
81 __ movss(Address(ESP, frame_size + spill.getSpillOffset()), spill.AsX86().AsXmmRegister()); in BuildFrame()
93 __ addl(ESP, Immediate(adjust)); in RemoveFrame()
97 __ popl(spill); in RemoveFrame()
101 __ ret(); in RemoveFrame()
[all …]
/art/compiler/utils/x86_64/
Djni_macro_assembler_x86_64.cc36 #define __ asm_. macro
49 __ pushq(spill.AsCpuRegister()); in BuildFrame()
59 __ subq(CpuRegister(RSP), Immediate(rest_of_frame)); in BuildFrame()
68 __ movsd(Address(CpuRegister(RSP), offset), spill.AsXmmRegister()); in BuildFrame()
76 __ movq(Address(CpuRegister(RSP), 0), method_reg.AsX86_64().AsCpuRegister()); in BuildFrame()
82 __ movq(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
86 __ movl(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
91 __ movsd(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
95 __ movss(Address(CpuRegister(RSP), frame_size + spill.getSpillOffset()), in BuildFrame()
115 __ movsd(spill.AsXmmRegister(), Address(CpuRegister(RSP), offset)); in RemoveFrame()
[all …]
/art/compiler/trampolines/
Dtrampoline_compiler.cc46 #define __ assembler. macro
84 __ FinalizeCode(); in CreateTrampoline()
85 size_t cs = __ CodeSize(); in CreateTrampoline()
88 __ FinalizeInstructions(code); in CreateTrampoline()
106 __ JumpTo(Arm64ManagedRegister::FromXRegister(X0), Offset(offset.Int32Value()), in CreateTrampoline()
111 __ LoadRawPtr(Arm64ManagedRegister::FromXRegister(IP1), in CreateTrampoline()
115 __ JumpTo(Arm64ManagedRegister::FromXRegister(IP1), Offset(offset.Int32Value()), in CreateTrampoline()
120 __ JumpTo(Arm64ManagedRegister::FromXRegister(TR), Offset(offset.Int32Value()), in CreateTrampoline()
126 __ FinalizeCode(); in CreateTrampoline()
127 size_t cs = __ CodeSize(); in CreateTrampoline()
[all …]
/art/compiler/utils/mips64/
Dassembler_mips64_test.cc27 #define __ GetAssembler()-> macro
267 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
269 __ Bind(&label); in BranchCondOneRegHelper()
272 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
296 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondTwoRegsHelper()
298 __ Bind(&label); in BranchCondTwoRegsHelper()
301 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondTwoRegsHelper()
682 __ Balc(&label1); in TEST_F()
685 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in TEST_F()
687 __ Bind(&label1); in TEST_F()
[all …]
/art/runtime/hprof/
Dhprof.cc434 #define __ output_-> macro
567 __ AddU4(sn); in WriteClassTable()
568 __ AddObjectId(c); in WriteClassTable()
569 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c)); in WriteClassTable()
570 __ AddStringId(LookupClassNameId(c)); in WriteClassTable()
585 __ AddU4(id); in WriteStringTable()
586 __ AddUtf8String(string.c_str()); in WriteStringTable()
662 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic)); in WriteFixedHeader()
669 __ AddU4(sizeof(uint32_t)); in WriteFixedHeader()
677 __ AddU4(static_cast<uint32_t>(nowMs >> 32)); in WriteFixedHeader()
[all …]
/art/compiler/jni/quick/
Djni_compiler.cc51 #define __ jni_asm-> macro
233 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); in ArtJniCompileMethodInternal()
243 __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), in ArtJniCompileMethodInternal()
247 __ CopyRawPtrFromThread(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal()
250 __ StoreStackOffsetToThread(Thread::TopHandleScopeOffset<kPointerSize>(), in ArtJniCompileMethodInternal()
265 __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), in ArtJniCompileMethodInternal()
267 __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); in ArtJniCompileMethodInternal()
269 __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); in ArtJniCompileMethodInternal()
292 __ VerifyObject(in_reg, mr_conv->IsCurrentArgPossiblyNull()); in ArtJniCompileMethodInternal()
293 __ StoreRef(handle_scope_offset, in_reg); in ArtJniCompileMethodInternal()
[all …]

12