/external/v8/test/cctest/ |
D | test-assembler-arm64.cc | 4781 __ Asr(x16, x0, x1); in TEST() local 4782 __ Asr(x17, x0, x2); in TEST() local 4783 __ Asr(x18, x0, x3); in TEST() local 4784 __ Asr(x19, x0, x4); in TEST() local 4785 __ Asr(x20, x0, x5); in TEST() local 4786 __ Asr(x21, x0, x6); in TEST() local 4788 __ Asr(w22, w0, w1); in TEST() local 4789 __ Asr(w23, w0, w2); in TEST() local 4790 __ Asr(w24, w0, w3); in TEST() local 4791 __ Asr(w25, w0, w4); in TEST() local [all …]
|
/external/v8/src/crankshaft/arm64/ |
D | lithium-codegen-arm64.cc | 3644 __ Asr(result, result, shift); in DoFlooringDivByPowerOf2I() local 4207 __ Asr(result, left, kSmiShift / 2); in DoMulS() local 4602 case Token::SAR: __ Asr(result, left, right); break; in DoShiftI() local 4625 case Token::SAR: __ Asr(result, left, shift_count); break; in DoShiftI() local 4660 __ Asr(result, left, result); in DoShiftS() local 4693 __ Asr(result, left, shift_count); in DoShiftS() local
|
/external/vixl/src/vixl/a64/ |
D | macro-assembler-a64.h | 941 void Asr(const Register& rd, const Register& rn, unsigned shift) { in Asr() function 948 void Asr(const Register& rd, const Register& rn, const Register& rm) { in Asr() function
|
/external/vixl/test/ |
D | test-assembler-a64.cc | 8863 __ Asr(x16, x0, x1); in TEST() local 8864 __ Asr(x17, x0, x2); in TEST() local 8865 __ Asr(x18, x0, x3); in TEST() local 8866 __ Asr(x19, x0, x4); in TEST() local 8867 __ Asr(x20, x0, x5); in TEST() local 8868 __ Asr(x21, x0, x6); in TEST() local 8870 __ Asr(w22, w0, w1); in TEST() local 8871 __ Asr(w23, w0, w2); in TEST() local 8872 __ Asr(w24, w0, w3); in TEST() local 8873 __ Asr(w25, w0, w4); in TEST() local [all …]
|
/external/v8/src/arm64/ |
D | macro-assembler-arm64-inl.h | 322 void MacroAssembler::Asr(const Register& rd, in Asr() function
|
/external/v8/src/full-codegen/arm64/ |
D | full-codegen-arm64.cc | 1988 __ Asr(result, left, right); in EmitInlineSmiBinaryOp() local
|