/art/compiler/utils/ |
D | assembler_thumb_test.cc | 159 #define __ assembler-> macro 164 __ mov(R0, ShifterOperand(R1)); in TEST() 165 __ mov(R8, ShifterOperand(R9)); in TEST() 167 __ mov(R0, ShifterOperand(1)); in TEST() 168 __ mov(R8, ShifterOperand(9)); in TEST() 170 size_t cs = __ CodeSize(); in TEST() 173 __ FinalizeInstructions(code); in TEST() 182 __ mov(R0, ShifterOperand(R1)); in TEST() 183 __ mov(R8, ShifterOperand(R9)); in TEST() 185 size_t cs = __ CodeSize(); in TEST() [all …]
|
/art/compiler/jni/quick/ |
D | jni_compiler.cc | 44 #define __ jni_asm-> macro 108 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs, mr_conv->EntrySpills()); in ArtJniCompileMethodInternal() 114 __ StoreImmediateToFrame(main_jni_conv->HandleScopeNumRefsOffset(), in ArtJniCompileMethodInternal() 119 __ CopyRawPtrFromThread64(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal() 122 __ StoreStackOffsetToThread64(Thread::TopHandleScopeOffset<8>(), in ArtJniCompileMethodInternal() 126 __ CopyRawPtrFromThread32(main_jni_conv->HandleScopeLinkOffset(), in ArtJniCompileMethodInternal() 129 __ StoreStackOffsetToThread32(Thread::TopHandleScopeOffset<4>(), in ArtJniCompileMethodInternal() 143 __ LoadRef(main_jni_conv->InterproceduralScratchRegister(), in ArtJniCompileMethodInternal() 145 __ VerifyObject(main_jni_conv->InterproceduralScratchRegister(), false); in ArtJniCompileMethodInternal() 146 __ StoreRef(handle_scope_offset, main_jni_conv->InterproceduralScratchRegister()); in ArtJniCompileMethodInternal() [all …]
|
/art/compiler/optimizing/ |
D | code_generator_x86.cc | 46 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> macro 53 __ Bind(GetEntryLabel()); in EmitNativeCode() 54 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer))); in EmitNativeCode() 68 __ Bind(GetEntryLabel()); in EmitNativeCode() 69 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowDivZero))); in EmitNativeCode() 83 __ Bind(GetEntryLabel()); in EmitNativeCode() 85 __ negl(reg_); in EmitNativeCode() 87 __ movl(reg_, Immediate(0)); in EmitNativeCode() 89 __ jmp(GetExitLabel()); in EmitNativeCode() 109 __ Bind(GetEntryLabel()); in EmitNativeCode() [all …]
|
D | code_generator_x86_64.cc | 49 #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> macro 56 __ Bind(GetEntryLabel()); in EmitNativeCode() 57 __ gs()->call( in EmitNativeCode() 72 __ Bind(GetEntryLabel()); in EmitNativeCode() 73 __ gs()->call( in EmitNativeCode() 89 __ Bind(GetEntryLabel()); in EmitNativeCode() 92 __ negl(cpu_reg_); in EmitNativeCode() 94 __ movl(cpu_reg_, Immediate(0)); in EmitNativeCode() 100 __ negq(cpu_reg_); in EmitNativeCode() 102 __ xorl(cpu_reg_, cpu_reg_); in EmitNativeCode() [all …]
|
D | code_generator_arm.cc | 56 #define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> macro 65 __ Bind(GetEntryLabel()); in EmitNativeCode() 81 __ Bind(GetEntryLabel()); in EmitNativeCode() 98 __ Bind(GetEntryLabel()); in EmitNativeCode() 104 __ b(GetReturnLabel()); in EmitNativeCode() 106 __ b(arm_codegen->GetLabelOf(successor_)); in EmitNativeCode() 141 __ Bind(GetEntryLabel()); in EmitNativeCode() 178 __ Bind(GetEntryLabel()); in EmitNativeCode() 182 __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode() 195 __ b(GetExitLabel()); in EmitNativeCode() [all …]
|
D | intrinsics_x86.cc | 59 #define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())-> macro 79 __ movl(target_reg, EAX); in MoveFromReturnRegister() 87 __ movl(target_reg_lo, EAX); in MoveFromReturnRegister() 90 __ movl(target_reg_hi, EDX); in MoveFromReturnRegister() 102 __ movsd(target_reg, XMM0); in MoveFromReturnRegister() 109 __ movss(target_reg, XMM0); in MoveFromReturnRegister() 134 __ Bind(GetEntryLabel()); in EmitNativeCode() 157 __ jmp(GetExitLabel()); in EmitNativeCode() 167 #undef __ 168 #define __ assembler-> macro [all …]
|
D | intrinsics_x86_64.cc | 55 #define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> macro 75 __ movl(trg_reg, CpuRegister(RAX)); in MoveFromReturnRegister() 82 __ movq(trg_reg, CpuRegister(RAX)); in MoveFromReturnRegister() 94 __ movsd(trg_reg, XmmRegister(XMM0)); in MoveFromReturnRegister() 101 __ movss(trg_reg, XmmRegister(XMM0)); in MoveFromReturnRegister() 125 __ Bind(GetEntryLabel()); in EmitNativeCode() 148 __ jmp(GetExitLabel()); in EmitNativeCode() 158 #undef __ 159 #define __ assembler-> macro 180 __ movd(output.AsRegister<CpuRegister>(), input.AsFpuRegister<XmmRegister>(), is64bit); in MoveFPToInt() [all …]
|
D | code_generator_mips64.cc | 106 #define __ down_cast<CodeGeneratorMIPS64*>(codegen)->GetAssembler()-> macro 120 __ Bind(GetEntryLabel()); in EmitNativeCode() 151 __ Bind(GetEntryLabel()); in EmitNativeCode() 178 __ Bind(GetEntryLabel()); in EmitNativeCode() 182 __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); in EmitNativeCode() 201 __ B(GetExitLabel()); in EmitNativeCode() 230 __ Bind(GetEntryLabel()); in EmitNativeCode() 234 __ LoadConst32(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex()); in EmitNativeCode() 246 __ B(GetExitLabel()); in EmitNativeCode() 261 __ Bind(GetEntryLabel()); in EmitNativeCode() [all …]
|
D | intrinsics_arm.cc | 41 #define __ codegen->GetAssembler()-> macro 59 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo)); in MoveFromReturnRegister() 60 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi)); in MoveFromReturnRegister() 65 __ mov(trg_reg_hi, ShifterOperand(res_reg_hi)); in MoveFromReturnRegister() 66 __ mov(trg_reg_lo, ShifterOperand(res_reg_lo)); in MoveFromReturnRegister() 72 __ mov(trg_reg, ShifterOperand(res_reg)); in MoveFromReturnRegister() 97 __ Bind(GetEntryLabel()); in EmitNativeCode() 120 __ b(GetExitLabel()); in EmitNativeCode() 130 #undef __ 138 #define __ assembler-> macro [all …]
|
D | intrinsics_arm64.cc | 66 #define __ codegen->GetAssembler()->vixl_masm_-> macro 81 __ Mov(trg_reg, res_reg, kDiscardForSameWReg); in MoveFromReturnRegister() 85 __ Fmov(trg_reg, res_reg); in MoveFromReturnRegister() 106 __ Bind(GetEntryLabel()); in EmitNativeCode() 129 __ B(GetExitLabel()); in EmitNativeCode() 139 #undef __ 147 #define __ masm-> macro 168 __ Fmov(is64bit ? XRegisterFrom(output) : WRegisterFrom(output), in MoveFPToInt() 175 __ Fmov(is64bit ? DRegisterFrom(output) : SRegisterFrom(output), in MoveIntToFP() 223 __ Rev16(WRegisterFrom(out), WRegisterFrom(in)); in GenReverseBytes() [all …]
|
D | code_generator_arm64.cc | 38 #ifdef __ 104 #define __ down_cast<CodeGeneratorARM64*>(codegen)->GetVIXLAssembler()-> macro 119 __ Bind(GetEntryLabel()); in EmitNativeCode() 145 __ Bind(GetEntryLabel()); in EmitNativeCode() 170 __ Bind(GetEntryLabel()); in EmitNativeCode() 174 __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex()); in EmitNativeCode() 193 __ B(GetExitLabel()); in EmitNativeCode() 222 __ Bind(GetEntryLabel()); in EmitNativeCode() 226 __ Mov(calling_convention.GetRegisterAt(0).W(), instruction_->GetStringIndex()); in EmitNativeCode() 234 __ B(GetExitLabel()); in EmitNativeCode() [all …]
|
/art/runtime/hprof/ |
D | hprof.cc | 407 #define __ output_-> macro 526 __ AddU4(nextSerialNumber++); in WriteClassTable() 527 __ AddObjectId(c); in WriteClassTable() 528 __ AddU4(kHprofNullStackTrace); in WriteClassTable() 529 __ AddStringId(LookupClassNameId(c)); in WriteClassTable() 544 __ AddU4(id); in WriteStringTable() 545 __ AddUtf8String(string.c_str()); in WriteStringTable() 605 __ AddU1List(reinterpret_cast<const uint8_t*>(magic), sizeof(magic)); in WriteFixedHeader() 612 __ AddU4(sizeof(uint32_t)); in WriteFixedHeader() 620 __ AddU4(static_cast<uint32_t>(nowMs >> 32)); in WriteFixedHeader() [all …]
|
/art/compiler/trampolines/ |
D | trampoline_compiler.cc | 27 #define __ assembler-> macro 38 __ LoadFromOffset(kLoadWord, PC, R0, offset.Int32Value()); in CreateTrampoline() 41 __ LoadFromOffset(kLoadWord, IP, R0, JNIEnvExt::SelfOffset().Int32Value()); in CreateTrampoline() 42 __ LoadFromOffset(kLoadWord, PC, IP, offset.Int32Value()); in CreateTrampoline() 45 __ LoadFromOffset(kLoadWord, PC, R9, offset.Int32Value()); in CreateTrampoline() 47 __ bkpt(0); in CreateTrampoline() 65 __ JumpTo(Arm64ManagedRegister::FromXRegister(X0), Offset(offset.Int32Value()), in CreateTrampoline() 70 __ LoadRawPtr(Arm64ManagedRegister::FromXRegister(IP1), in CreateTrampoline() 74 __ JumpTo(Arm64ManagedRegister::FromXRegister(IP1), Offset(offset.Int32Value()), in CreateTrampoline() 79 __ JumpTo(Arm64ManagedRegister::FromXRegister(TR), Offset(offset.Int32Value()), in CreateTrampoline() [all …]
|
/art/compiler/utils/arm/ |
D | assembler_thumb2_test.cc | 214 #define __ GetAssembler()-> in TEST_F() macro 215 __ eor(arm::R1, arm::R1, arm::ShifterOperand(arm::R0)); in TEST_F() 216 __ eor(arm::R1, arm::R0, arm::ShifterOperand(arm::R1)); in TEST_F() 217 __ eor(arm::R1, arm::R8, arm::ShifterOperand(arm::R0)); in TEST_F() 218 __ eor(arm::R8, arm::R1, arm::ShifterOperand(arm::R0)); in TEST_F() 219 __ eor(arm::R1, arm::R0, arm::ShifterOperand(arm::R8)); in TEST_F() 231 __ subs(arm::R1, arm::R0, arm::ShifterOperand(42)); in TEST_F() 232 __ sub(arm::R1, arm::R0, arm::ShifterOperand(42)); in TEST_F() 241 __ adds(arm::R1, arm::R0, arm::ShifterOperand(42)); in TEST_F() 242 __ add(arm::R1, arm::R0, arm::ShifterOperand(42)); in TEST_F() [all …]
|
D | assembler_arm.cc | 852 #define __ sp_asm-> in Emit() macro 853 __ Bind(&entry_); in Emit() 855 __ DecreaseFrameSize(stack_adjust_); in Emit() 859 __ mov(R0, ShifterOperand(scratch_.AsCoreRegister())); in Emit() 861 __ LoadFromOffset(kLoadWord, R12, TR, QUICK_ENTRYPOINT_OFFSET(4, pDeliverException).Int32Value()); in Emit() 862 __ blx(R12); in Emit() 864 __ bkpt(0); in Emit() 865 #undef __ in Emit()
|
/art/build/ |
D | Android.cpplint.mk | 39 art_cpplint_touch := $$(OUT_CPPLINT)/$$(subst /,__,$$(art_cpplint_file))
|
/art/compiler/utils/mips/ |
D | assembler_mips.cc | 968 #define __ sp_asm-> in Emit() macro 969 __ Bind(&entry_, false); in Emit() 971 __ DecreaseFrameSize(stack_adjust_); in Emit() 975 __ Move(A0, scratch_.AsCoreRegister()); in Emit() 977 __ LoadFromOffset(kLoadWord, T9, S1, QUICK_ENTRYPOINT_OFFSET(4, pDeliverException).Int32Value()); in Emit() 978 __ Jr(T9); in Emit() 980 __ Break(); in Emit() 981 #undef __ in Emit()
|
/art/compiler/utils/mips64/ |
D | assembler_mips64.cc | 1537 #define __ sp_asm-> in Emit() macro 1538 __ Bind(&entry_); in Emit() 1540 __ DecreaseFrameSize(stack_adjust_); in Emit() 1544 __ Move(A0, scratch_.AsGpuRegister()); in Emit() 1546 __ LoadFromOffset(kLoadDoubleword, T9, S1, in Emit() 1549 __ Jr(T9); in Emit() 1551 __ Break(); in Emit() 1552 #undef __ in Emit()
|
/art/compiler/utils/x86/ |
D | assembler_x86.cc | 2174 #define __ sp_asm-> in Emit() macro 2175 __ Bind(&entry_); in Emit() 2178 __ DecreaseFrameSize(stack_adjust_); in Emit() 2181 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<4>())); in Emit() 2182 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(4, pDeliverException))); in Emit() 2184 __ int3(); in Emit() 2185 #undef __ in Emit()
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.cc | 2871 #define __ sp_asm-> in Emit() macro 2872 __ Bind(&entry_); in Emit() 2875 __ DecreaseFrameSize(stack_adjust_); in Emit() 2878 __ gs()->movq(CpuRegister(RDI), Address::Absolute(Thread::ExceptionOffset<8>(), true)); in Emit() 2879 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(8, pDeliverException), true)); in Emit() 2881 __ int3(); in Emit() 2882 #undef __ in Emit()
|