/art/runtime/arch/arm64/ |
D | memcmp16_arm64.S | 42 #define tmp2 x9 macro 116 mov tmp2, #~0 118 lsr tmp2, tmp2, tmp1 /* Shift (tmp1 & 63). */ 120 orr data1, data1, tmp2 121 orr data2, data2, tmp2
|
/art/runtime/arch/arm/ |
D | asm_support_arm.S | 367 .macro LOCK_OBJECT_FAST_PATH obj, tmp1, tmp2, tmp3, slow_lock, can_be_null 373 ldrex \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] 374 eor \tmp3, \tmp2, \tmp1 @ Prepare the value to store if unlocked 378 ands ip, \tmp2, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED @ Test the non-gc bits. 381 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] 382 cbnz \tmp2, 3f @ If store failed, retry. 385 2: @ tmp2: original lock word, tmp1: thread_id, tmp3: tmp2 ^ tmp1 446 sub \tmp3, \tmp2, #LOCK_WORD_THIN_LOCK_COUNT_ONE @ Decrement recursive lock count. 451 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] 452 cbnz \tmp2, 3f @ If the store failed, retry.
|
/art/test/115-native-bridge/ |
D | nativebridge.cc | 233 struct sigaction64 tmp2; in trampoline_Java_Main_testSignal() local 234 sigemptyset64(&tmp2.sa_mask); in trampoline_Java_Main_testSignal() 235 tmp2.sa_sigaction = test_sigaction_handler; in trampoline_Java_Main_testSignal() 237 tmp2.sa_restorer = nullptr; in trampoline_Java_Main_testSignal() 240 sigaction64(SIGSEGV, &tmp2, nullptr); in trampoline_Java_Main_testSignal() 241 sigaction64(SIGILL, &tmp2, nullptr); in trampoline_Java_Main_testSignal()
|
/art/compiler/optimizing/ |
D | code_generator_vector_arm64_neon.cc | 1203 VRegister tmp2 = VRegisterFrom(locations->GetTemp(1)); in VisitVecSADAccumulate() local 1205 __ Sxtl(tmp2.V8H(), right.V8B()); in VisitVecSADAccumulate() 1206 __ Sabal(acc.V4S(), tmp1.V4H(), tmp2.V4H()); in VisitVecSADAccumulate() 1207 __ Sabal2(acc.V4S(), tmp1.V8H(), tmp2.V8H()); in VisitVecSADAccumulate() 1209 __ Sxtl2(tmp2.V8H(), right.V16B()); in VisitVecSADAccumulate() 1210 __ Sabal(acc.V4S(), tmp1.V4H(), tmp2.V4H()); in VisitVecSADAccumulate() 1211 __ Sabal2(acc.V4S(), tmp1.V8H(), tmp2.V8H()); in VisitVecSADAccumulate() 1217 VRegister tmp2 = VRegisterFrom(locations->GetTemp(1)); in VisitVecSADAccumulate() local 1221 __ Sxtl(tmp2.V8H(), right.V8B()); in VisitVecSADAccumulate() 1223 __ Sxtl(tmp4.V4S(), tmp2.V4H()); in VisitVecSADAccumulate() [all …]
|
D | code_generator_riscv64.cc | 1097 Location tmp2 = use_fp_tmp2 in Exchange() local 1105 codegen_->MoveLocation(tmp2, loc2, tmp2_type); in Exchange() 1119 codegen_->MoveLocation(loc1, tmp2, tmp2_type); in Exchange() 1132 XRegister tmp2 = srs.AllocateXRegister(); in GenerateClassInitializationCheck() local 1138 __ Li(tmp2, ShiftedSignExtendedClassStatusValue<ClassStatus::kVisiblyInitialized>()); in GenerateClassInitializationCheck() 1139 __ Bltu(tmp, tmp2, slow_path->GetEntryLabel()); in GenerateClassInitializationCheck() 1498 XRegister tmp2 = srs2.AllocateXRegister(); in DivRemByPowerOfTwo() local 1499 __ Li(tmp2, -abs_imm); in DivRemByPowerOfTwo() 1500 __ And(tmp, tmp, tmp2); in DivRemByPowerOfTwo() 2229 XRegister tmp2 = srs.AllocateXRegister(); in HandleBinaryOp() local [all …]
|
D | intrinsics_riscv64.cc | 509 XRegister tmp2 = srs.AllocateXRegister(); in VisitIntegerHighestOneBit() local 511 __ Li(tmp2, INT64_C(-0x80000000)); in VisitIntegerHighestOneBit() 512 __ Srlw(tmp2, tmp2, tmp); in VisitIntegerHighestOneBit() 513 __ And(rd, rs1, tmp2); // Make sure the result is zero if the input is zero. in VisitIntegerHighestOneBit() 526 XRegister tmp2 = srs.AllocateXRegister(); in VisitLongHighestOneBit() local 528 __ Li(tmp2, INT64_C(-0x8000000000000000)); in VisitLongHighestOneBit() 529 __ Srl(tmp2, tmp2, tmp); in VisitLongHighestOneBit() 530 __ And(rd, rs1, tmp2); // Make sure the result is zero if the input is zero. in VisitLongHighestOneBit() 857 XRegister tmp2 = srs.AllocateXRegister(); in VisitReferenceRefersTo() local 858 __ Li(tmp2, INT64_C(-1) & ~static_cast<int64_t>((1 << LockWord::kStateShift) - 1)); in VisitReferenceRefersTo() [all …]
|
D | intrinsics_arm64.cc | 2637 Register tmp2 = temps.AcquireX(); in VisitStringGetCharsNoCheck() local 2657 __ Ldr(tmp2, MemOperand(srcObj, count_offset)); in VisitStringGetCharsNoCheck() 2658 __ Tbz(tmp2, 0, &compressed_string_preloop); in VisitStringGetCharsNoCheck() 2676 __ Ldp(tmp1, tmp2, MemOperand(src_ptr, char_size * 8, PostIndex)); in VisitStringGetCharsNoCheck() 2678 __ Stp(tmp1, tmp2, MemOperand(dst_ptr, char_size * 8, PostIndex)); in VisitStringGetCharsNoCheck()
|
/art/libartbase/base/unix_file/ |
D | fd_file_test.cc | 606 art::ScratchFile tmp2; in TEST_F() local 607 EXPECT_TRUE(tmp2.GetFile()->WriteFully(&buffer[0], length)); in TEST_F() 608 EXPECT_EQ(tmp2.GetFile()->GetLength(), length); in TEST_F() 612 tmp2.GetFile()->ResetOffset(); in TEST_F() 614 EXPECT_EQ(reset_compare(tmp, tmp2), 0); in TEST_F()
|
/art/runtime/interpreter/mterp/armng/ |
D | main.S | 234 .macro CLEAR_SHADOW_PAIR vreg, tmp1, tmp2 argument 236 add \tmp2, \vreg, #1 238 SET_VREG_SHADOW \tmp1, \tmp2 1478 .macro ATOMIC_STORE64 addr, store1, store2, tmp1, tmp2, label 1479 LDREXD_STREXD_LOOP \addr, \tmp1, \tmp2, \store1, \store2, \tmp1, \label 1528 .macro LOOP_OVER_INTs shorty, arg_offset, regs, refs, stack_ptr, tmp1, tmp2, finished 1539 add \tmp2, \stack_ptr, \arg_offset 1540 ldr \tmp2, [\tmp2, #OFFSET_TO_FIRST_ARGUMENT_IN_STACK] 1541 str \tmp2, [\regs, \arg_offset] 1544 str \tmp2, [\refs, \arg_offset]
|
/art/runtime/ |
D | runtime_callbacks_test.cc | 285 std::string tmp, tmp2; in ClassPrepare() local 287 + "[" + temp_klass->GetDescriptor(&tmp2) + "]"; in ClassPrepare()
|
/art/test/160-read-barrier-stress/src/ |
D | Main.java | 136 Object tmp2 = la[i0 + 1]; in $noinline$testArrayReadsWithNonConstIndex() local 143 $noinline$assertDifferentObject(f0000, tmp2); in $noinline$testArrayReadsWithNonConstIndex()
|
/art/compiler/utils/riscv64/ |
D | assembler_riscv64_test.cc | 9205 XRegister tmp2 = srs.AllocateXRegister(); in TEST_F() local 9206 EXPECT_EQ(TMP2, tmp2); in TEST_F()
|