/art/compiler/utils/riscv64/ |
D | jni_macro_assembler_riscv64.cc | 421 __ Bnez(TMP, Riscv64JNIMacroLabel::Cast(slow_path)->AsRiscv64()); in DecodeJNITransitionOrLocalJObject() local 480 __ Bnez(scratch, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64()); in TryToTransitionFromRunnableToNative() local 483 __ Bnez(scratch, &retry); in TryToTransitionFromRunnableToNative() local 522 __ Bnez(scratch, &retry); in TryToTransitionFromNativeToRunnable() local 535 __ Bnez(tmp, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64()); in SuspendCheck() local 542 __ Bnez(tmp, Riscv64JNIMacroLabel::Cast(label)->AsRiscv64()); in ExceptionPoll() local 579 __ Bnez(test_reg, down_cast<Riscv64Label*>(Riscv64JNIMacroLabel::Cast(label)->AsRiscv64())); in TestGcMarking() local 613 __ Bnez(tmp, down_cast<Riscv64Label*>(Riscv64JNIMacroLabel::Cast(label)->AsRiscv64())); in TestByteAndJumpIfNotZero() local
|
D | assembler_riscv64_test.cc | 533 __ Bnez(rs, label, is_bare); in EmitBcondForAllConditions() local 8305 TEST_F(AssemblerRISCV64Test, Bnez) { in TEST_F() argument 8308 &Riscv64Assembler::Bnez, /*imm_bits=*/-12, /*shift=*/1, "bne {reg}, zero, {imm}\n"), in TEST_F() 8316 &Riscv64Assembler::Bnez, /*imm_bits=*/-11, /*shift=*/2, "bne {reg}, zero, {imm}\n"), in TEST_F()
|
D | assembler_riscv64.h | 1748 void Bnez(XRegister rs, int32_t offset); 1805 void Bnez(XRegister rs, Riscv64Label* label, bool is_bare = false);
|
D | assembler_riscv64.cc | 6223 void Riscv64Assembler::Bnez(XRegister rs, int32_t offset) { in Bnez() function in art::riscv64::Riscv64Assembler 6448 void Riscv64Assembler::Bnez(XRegister rs, Riscv64Label* label, bool is_bare) { in Bnez() function in art::riscv64::Riscv64Assembler
|
/art/compiler/optimizing/ |
D | intrinsics_riscv64.cc | 768 __ Bnez(temp, slow_path->GetEntryLabel()); in VisitReferenceGetReferent() local 783 __ Bnez(temp, slow_path->GetEntryLabel()); in VisitReferenceGetReferent() local 904 __ Bnez(tmp, slow_path->GetEntryLabel()); in GenerateVisitStringIndexOf() local 1306 __ Bnez(store_result, &loop, /*is_bare=*/ true); in GenerateCompareAndSet() local 1522 __ Bnez(pos.AsRegister<XRegister>(), slow_path->GetEntryLabel()); in CheckSystemArrayCopyPosition() local 1726 __ Bnez(temp, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() local 1773 __ Bnez(temp1, intrinsic_slow_path->GetEntryLabel()); in VisitSystemArrayCopy() local 1977 __ Bnez(temp, &retry, /*is_bare=*/ true); // Bare: `TMP` shall not be clobbered. in GenerateGetAndUpdate() local 2007 __ Bnez(temp, &retry, /*is_bare=*/ true); // Bare: `TMP` shall not be clobbered. in GenerateGetAndUpdate() local 3005 __ Bnez(temp4, &different_compression_diff); in VisitStringCompareTo() local [all …]
|
D | code_generator_riscv64.cc | 1187 __ Bnez(tmp, slow_path->GetEntryLabel()); in GenerateSuspendCheck() local 1280 __ Bnez(temp.AsRegister<XRegister>(), slow_path->GetEntryLabel()); in EmitBakerReadBarierMarkingCheck() local 1392 __ Bnez(cond_val.AsRegister<XRegister>(), true_target); in GenerateTestAndBranch() local 1692 __ Bnez(left, label); in GenerateIntLongCompareAndBranch() local 1853 __ Bnez(rd, label); in GenerateFpCondition() local 1937 __ Bnez(tmp, slow_path->GetEntryLabel()); in GenerateReferenceLoadWithBakerReadBarrier() local 2483 __ Bnez(temp, &done); in CheckGCCardIsValid() local 2625 __ Bnez(tmp, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook() local 2641 __ Bnez(tmp, slow_path->GetEntryLabel()); in GenerateMethodEntryExitHook() local 2819 __ Bnez(tmp, &uncompressed_load); in VisitArrayGet() local [all …]
|