Home
last modified time | relevance | path

Searched refs:__ (Results 1 – 24 of 24) sorted by relevance

/art/compiler/utils/
Dassembler_thumb_test.cc175 #define __ assembler-> macro
179 __ FinalizeCode(); in EmitAndCheck()
180 size_t cs = __ CodeSize(); in EmitAndCheck()
183 __ FinalizeInstructions(code); in EmitAndCheck()
196 #undef __
207 #define __ assembler. macro
210 __ movs(R0, ShifterOperand(R1)); in TEST_F()
211 __ mov(R0, ShifterOperand(R1)); in TEST_F()
212 __ mov(R8, ShifterOperand(R9)); in TEST_F()
214 __ mov(R0, ShifterOperand(1)); in TEST_F()
[all …]
/art/compiler/utils/arm/
Dassembler_thumb2_test.cc100 #define __ GetAssembler()-> macro
103 __ sbfx(arm::R0, arm::R1, 0, 1); in TEST_F()
104 __ sbfx(arm::R0, arm::R1, 0, 8); in TEST_F()
105 __ sbfx(arm::R0, arm::R1, 0, 16); in TEST_F()
106 __ sbfx(arm::R0, arm::R1, 0, 32); in TEST_F()
108 __ sbfx(arm::R0, arm::R1, 8, 1); in TEST_F()
109 __ sbfx(arm::R0, arm::R1, 8, 8); in TEST_F()
110 __ sbfx(arm::R0, arm::R1, 8, 16); in TEST_F()
111 __ sbfx(arm::R0, arm::R1, 8, 24); in TEST_F()
113 __ sbfx(arm::R0, arm::R1, 16, 1); in TEST_F()
[all …]
Dassembler_arm.cc858 #define __ sp_asm-> in Emit() macro
859 __ Bind(&entry_); in Emit()
861 __ DecreaseFrameSize(stack_adjust_); in Emit()
865 __ mov(R0, ShifterOperand(scratch_.AsCoreRegister())); in Emit()
867 __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(4, pDeliverException).Int32Value()); in Emit()
868 __ blx(R12); in Emit()
869 #undef __ in Emit()
/art/compiler/optimizing/
Dintrinsics_mips.cc58 #define __ codegen->GetAssembler()-> macro
73 __ Move(V0, trg_reg); in MoveFromReturnRegister()
79 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
81 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
107 __ Bind(GetEntryLabel()); in EmitNativeCode()
130 __ B(GetExitLabel()); in EmitNativeCode()
142 #undef __
150 #define __ assembler-> macro
167 __ Mfc1(out_lo, in); in MoveFPToInt()
168 __ MoveFromFpuHigh(out_hi, in); in MoveFPToInt()
[all …]
Dcode_generator_mips.cc144 #define __ down_cast<CodeGeneratorMIPS*>(codegen)->GetAssembler()-> macro
154 __ Bind(GetEntryLabel()); in EmitNativeCode()
190 __ Bind(GetEntryLabel()); in EmitNativeCode()
225 __ Bind(GetEntryLabel()); in EmitNativeCode()
229 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode()
252 __ B(GetExitLabel()); in EmitNativeCode()
283 __ Bind(GetEntryLabel()); in EmitNativeCode()
288 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index); in EmitNativeCode()
301 __ B(GetExitLabel()); in EmitNativeCode()
316 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dintrinsics_x86_64.cc80 #define __ assembler-> macro
101 __ movd(output.AsRegister<CpuRegister>(), input.AsFpuRegister<XmmRegister>(), is64bit); in MoveFPToInt()
107 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<CpuRegister>(), is64bit); in MoveIntToFP()
154 __ bswapl(out); in GenReverseBytes()
155 __ sarl(out, Immediate(16)); in GenReverseBytes()
158 __ bswapl(out); in GenReverseBytes()
161 __ bswapq(out); in GenReverseBytes()
220 __ movsd(xmm_temp, codegen->LiteralInt64Address(INT64_C(0x7FFFFFFFFFFFFFFF))); in MathAbsFP()
221 __ andpd(output.AsFpuRegister<XmmRegister>(), xmm_temp); in MathAbsFP()
223 __ movss(xmm_temp, codegen->LiteralInt32Address(INT32_C(0x7FFFFFFF))); in MathAbsFP()
[all …]
Dcode_generator_mips64.cc105 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> macro
115 __ Bind(GetEntryLabel()); in EmitNativeCode()
150 __ Bind(GetEntryLabel()); in EmitNativeCode()
184 __ Bind(GetEntryLabel()); in EmitNativeCode()
188 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode()
207 __ Bc(GetExitLabel()); in EmitNativeCode()
238 __ Bind(GetEntryLabel()); in EmitNativeCode()
243 __ LoadConst32(calling_convention.GetRegisterAt(0), string_index); in EmitNativeCode()
255 __ Bc(GetExitLabel()); in EmitNativeCode()
270 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dintrinsics_arm.cc66 #define __ assembler-> macro
88 __ vmovrrd(output.AsRegisterPairLow<Register>(), in MoveFPToInt()
92 __ vmovrs(output.AsRegister<Register>(), input.AsFpuRegister<SRegister>()); in MoveFPToInt()
100 __ vmovdrr(FromLowSToD(output.AsFpuRegisterPairLow<SRegister>()), in MoveIntToFP()
104 __ vmovsr(output.AsFpuRegister<SRegister>(), input.AsRegister<Register>()); in MoveIntToFP()
164 __ clz(out, in_reg_hi); in GenNumberOfLeadingZeros()
165 __ CompareAndBranchIfNonZero(in_reg_hi, &end); in GenNumberOfLeadingZeros()
166 __ clz(out, in_reg_lo); in GenNumberOfLeadingZeros()
167 __ AddConstant(out, 32); in GenNumberOfLeadingZeros()
168 __ Bind(&end); in GenNumberOfLeadingZeros()
[all …]
Dintrinsics_x86.cc86 #define __ assembler-> macro
117 __ movsd(temp, input.AsFpuRegister<XmmRegister>()); in MoveFPToInt()
118 __ movd(output.AsRegisterPairLow<Register>(), temp); in MoveFPToInt()
119 __ psrlq(temp, Immediate(32)); in MoveFPToInt()
120 __ movd(output.AsRegisterPairHigh<Register>(), temp); in MoveFPToInt()
122 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>()); in MoveFPToInt()
133 __ movd(temp1, input.AsRegisterPairLow<Register>()); in MoveIntToFP()
134 __ movd(temp2, input.AsRegisterPairHigh<Register>()); in MoveIntToFP()
135 __ punpckldq(temp1, temp2); in MoveIntToFP()
136 __ movsd(output.AsFpuRegister<XmmRegister>(), temp1); in MoveIntToFP()
[all …]
Dcode_generator_x86_64.cc54 #define __ down_cast<X86_64Assembler*>(codegen->GetAssembler())-> macro
63 __ Bind(GetEntryLabel()); in EmitNativeCode()
89 __ Bind(GetEntryLabel()); in EmitNativeCode()
115 __ Bind(GetEntryLabel()); in EmitNativeCode()
118 __ negl(cpu_reg_); in EmitNativeCode()
120 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
126 __ negq(cpu_reg_); in EmitNativeCode()
128 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode()
131 __ jmp(GetExitLabel()); in EmitNativeCode()
150 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Dcode_generator_x86.cc50 #define __ down_cast<X86Assembler*>(codegen->GetAssembler())-> macro
59 __ Bind(GetEntryLabel()); in EmitNativeCode()
85 __ Bind(GetEntryLabel()); in EmitNativeCode()
111 __ Bind(GetEntryLabel()); in EmitNativeCode()
113 __ negl(reg_); in EmitNativeCode()
115 __ movl(reg_, Immediate(0)); in EmitNativeCode()
117 __ jmp(GetExitLabel()); in EmitNativeCode()
135 __ Bind(GetEntryLabel()); in EmitNativeCode()
172 __ Bind(GetEntryLabel()); in EmitNativeCode()
181 __ jmp(GetReturnLabel()); in EmitNativeCode()
[all …]
Dintrinsics_mips64.cc46 #define __ codegen->GetAssembler()-> macro
61 __ Move(V0, trg_reg); in MoveFromReturnRegister()
67 __ MovS(F0, trg_reg); in MoveFromReturnRegister()
69 __ MovD(F0, trg_reg); in MoveFromReturnRegister()
96 __ Bind(GetEntryLabel()); in EmitNativeCode()
119 __ Bc(GetExitLabel()); in EmitNativeCode()
131 #undef __
139 #define __ assembler-> macro
154 __ Dmfc1(out, in); in MoveFPToInt()
156 __ Mfc1(out, in); in MoveFPToInt()
[all …]
Dcode_generator_arm.cc62 #define __ down_cast<ArmAssembler*>(codegen->GetAssembler())-> macro
71 __ Bind(GetEntryLabel()); in EmitNativeCode()
95 __ Bind(GetEntryLabel()); in EmitNativeCode()
120 __ Bind(GetEntryLabel()); in EmitNativeCode()
127 __ b(GetReturnLabel()); in EmitNativeCode()
129 __ b(arm_codegen->GetLabelOf(successor_)); in EmitNativeCode()
163 __ Bind(GetEntryLabel()); in EmitNativeCode()
205 __ Bind(GetEntryLabel()); in EmitNativeCode()
209 __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode()
227 __ b(GetExitLabel()); in EmitNativeCode()
[all …]
Dintrinsics_arm64.cc67 #define __ codegen->GetAssembler()->vixl_masm_-> macro
82 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); in MoveFromReturnRegister()
86 __ Fmov(trg_reg, res_reg); in MoveFromReturnRegister()
108 __ Bind(GetEntryLabel()); in EmitNativeCode()
131 __ B(GetExitLabel()); in EmitNativeCode()
143 #undef __
167 #define __ masm-> macro
188 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), in MoveFPToInt()
195 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), in MoveIntToFP()
243 __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); in GenReverseBytes()
[all …]
Dcode_generator_arm64.cc39 #ifdef __
135 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> macro
167 __ Add(new_base, base, Operand(spill_offset + core_spill_size)); in SaveRestoreLiveRegistersHelper()
176 __ StoreCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelper()
177 __ StoreCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size)); in SaveRestoreLiveRegistersHelper()
179 __ LoadCPURegList(core_list, MemOperand(base, spill_offset)); in SaveRestoreLiveRegistersHelper()
180 __ LoadCPURegList(fp_list, MemOperand(base, spill_offset + core_spill_size)); in SaveRestoreLiveRegistersHelper()
228 __ Bind(GetEntryLabel()); in EmitNativeCode()
258 __ Bind(GetEntryLabel()); in EmitNativeCode()
290 __ Bind(GetEntryLabel()); in EmitNativeCode()
[all …]
Doptimizing_cfi_test.cc175 #define __ down_cast<arm::Thumb2Assembler*>(GetCodeGenerator()->GetAssembler())-> in TEST_ISA() macro
177 __ CompareAndBranchIfZero(arm::R0, &target); in TEST_ISA()
180 __ ldr(arm::R0, arm::Address(arm::R0)); in TEST_ISA()
182 __ Bind(&target); in TEST_ISA()
183 #undef __ in TEST_ISA()
203 #define __ down_cast<mips::MipsAssembler*>(GetCodeGenerator()->GetAssembler())-> in TEST_F() macro
205 __ Beqz(mips::A0, &target); in TEST_F()
208 __ Nop(); in TEST_F()
210 __ Bind(&target); in TEST_F()
211 #undef __ in TEST_F()
[all …]
/art/compiler/trampolines/
Dtrampoline_compiler.cc46 #define __ assembler. macro
58 __ LoadFromOffset(kLoadWord, PC, R0, offset.Int32Value()); in CreateTrampoline()
61 __ LoadFromOffset(kLoadWord, IP, R0, JNIEnvExt::SelfOffset(4).Int32Value()); in CreateTrampoline()
62 __ LoadFromOffset(kLoadWord, PC, IP, offset.Int32Value()); in CreateTrampoline()
65 __ LoadFromOffset(kLoadWord, PC, R9, offset.Int32Value()); in CreateTrampoline()
67 __ bkpt(0); in CreateTrampoline()
69 __ FinalizeCode(); in CreateTrampoline()
70 size_t cs = __ CodeSize(); in CreateTrampoline()
73 __ FinalizeInstructions(code); in CreateTrampoline()
88 __ JumpTo(Arm64ManagedRegister::FromXRegister(X0), Offset(offset.Int32Value()), in CreateTrampoline()
[all …]
/art/compiler/utils/mips64/
Dassembler_mips64_test.cc27 #define __ GetAssembler()-> macro
220 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
222 __ Bind(&label); in BranchCondOneRegHelper()
225 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondOneRegHelper()
249 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondTwoRegsHelper()
251 __ Bind(&label); in BranchCondTwoRegsHelper()
254 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in BranchCondTwoRegsHelper()
581 __ Jialc(&label1, mips64::T9); in TEST_F()
584 __ Addu(mips64::ZERO, mips64::ZERO, mips64::ZERO); in TEST_F()
586 __ Bind(&label1); in TEST_F()
[all …]
/art/compiler/jni/quick/
Djni_compiler.cc45 #define __ jni_asm-> macro
116 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); in ArtJniCompileMethodInternal()
122 __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), in ArtJniCompileMethodInternal()
127 __ CopyRawPtrFromThread64(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal()
130 __ StoreStackOffsetToThread64(Thread::TopHandleScopeOffset<8>(), in ArtJniCompileMethodInternal()
134 __ CopyRawPtrFromThread32(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal()
137 __ StoreStackOffsetToThread32(Thread::TopHandleScopeOffset<4>(), in ArtJniCompileMethodInternal()
151 __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), in ArtJniCompileMethodInternal()
153 __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); in ArtJniCompileMethodInternal()
154 __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); in ArtJniCompileMethodInternal()
[all …]
/art/compiler/utils/mips/
Dassembler_mips_test.cc24 #define __ GetAssembler()-> macro
196 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
198 __ Bind(&label); in BranchCondOneRegHelper()
201 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondOneRegHelper()
225 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
227 __ Bind(&label); in BranchCondTwoRegsHelper()
230 __ Addu(mips::ZERO, mips::ZERO, mips::ZERO); in BranchCondTwoRegsHelper()
394 __ Ins(*reg1, *reg2, pos, size); in TEST_F()
413 __ Ext(*reg1, *reg2, pos, size); in TEST_F()
727 __ LoadFromOffset(mips::kLoadSignedByte, mips::A0, mips::A0, 0); in TEST_F()
[all …]
/art/runtime/hprof/
Dhprof.cc415 #define __ output_-> macro
547 __ AddU4(sn); in WriteClassTable()
548 __ AddObjectId(c); in WriteClassTable()
549 __ AddStackTraceSerialNumber(LookupStackTraceSerialNumber(c)); in WriteClassTable()
550 __ AddStringId(LookupClassNameId(c)); in WriteClassTable()
565 __ AddU4(id); in WriteStringTable()
566 __ AddUtf8String(string.c_str()); in WriteStringTable()
642 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic)); in WriteFixedHeader()
649 __ AddU4(sizeof(uint32_t)); in WriteFixedHeader()
657 __ AddU4(static_cast<uint32_t>(nowMs >> 32)); in WriteFixedHeader()
[all …]
/art/build/
DAndroid.cpplint.mk44 art_cpplint_touch := $$(OUT_CPPLINT)/$$(subst /,__,$$(art_cpplint_file))
/art/compiler/utils/x86/
Dassembler_x86.cc2390 #define __ sp_asm-> in Emit() macro
2391 __ Bind(&entry_); in Emit()
2394 __ DecreaseFrameSize(stack_adjust_); in Emit()
2397 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>())); in Emit()
2398 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException))); in Emit()
2400 __ int3(); in Emit()
2401 #undef __ in Emit()
/art/compiler/utils/x86_64/
Dassembler_x86_64.cc3155 #define __ sp_asm-> in Emit() macro
3156 __ Bind(&entry_); in Emit()
3159 __ DecreaseFrameSize(stack_adjust_); in Emit()
3162 __ gs()->movq(CpuRegister(RDI), Address::Absolute(Thread::ExceptionOffset<8>(), true)); in Emit()
3163 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(8, pDeliverException), true)); in Emit()
3165 __ int3(); in Emit()
3166 #undef __ in Emit()