/external/vixl/test/aarch64/ |
D | test-trace-aarch64.cc | 47 #define __ masm-> macro 55 __ adc(w3, w4, w5); in GenerateTestSequenceBase() 56 __ adc(x6, x7, x8); in GenerateTestSequenceBase() 57 __ adcs(w9, w10, w11); in GenerateTestSequenceBase() 58 __ adcs(x12, x13, x14); in GenerateTestSequenceBase() 59 __ add(w15, w16, w17); in GenerateTestSequenceBase() 60 __ add(x18, x19, x20); in GenerateTestSequenceBase() 61 __ adds(w21, w22, w23); in GenerateTestSequenceBase() 62 __ adds(x24, x25, x26); in GenerateTestSequenceBase() 63 __ and_(w27, w28, w29); in GenerateTestSequenceBase() [all …]
|
D | test-assembler-aarch64.cc | 57 // __ mov(x0, Operand(1)); 96 #define __ masm. macro 127 __ PushCalleeSavedRegisters(); \ 135 __ Trace(static_cast<TraceParameters>(trace_parameters), TRACE_ENABLE); \ 139 __ EnableInstrumentation(); \ 144 __ DisableInstrumentation(); \ 146 __ Trace(LOG_ALL, TRACE_DISABLE); \ 148 __ PopCalleeSavedRegisters(); \ 149 __ Ret(); \ 194 __ PushCalleeSavedRegisters() [all …]
|
/external/v8/src/interpreter/ |
D | interpreter.cc | 32 #define __ assembler-> macro 320 Node* zero_value = __ NumberConstant(0.0); in DoLdaZero() 321 __ SetAccumulator(zero_value); in DoLdaZero() 322 __ Dispatch(); in DoLdaZero() 329 Node* raw_int = __ BytecodeOperandImm(0); in DoLdaSmi() 330 Node* smi_int = __ SmiTag(raw_int); in DoLdaSmi() 331 __ SetAccumulator(smi_int); in DoLdaSmi() 332 __ Dispatch(); in DoLdaSmi() 339 Node* index = __ BytecodeOperandIdx(0); in DoLdaConstant() 340 Node* constant = __ LoadConstantPoolEntry(index); in DoLdaConstant() [all …]
|
/external/v8/src/x64/ |
D | code-stubs-x64.cc | 23 #define __ ACCESS_MASM(masm) macro 26 __ popq(rcx); in Generate() 27 __ movq(MemOperand(rsp, rax, times_8, 0), rdi); in Generate() 28 __ pushq(rdi); in Generate() 29 __ pushq(rbx); in Generate() 30 __ pushq(rcx); in Generate() 31 __ addq(rax, Immediate(3)); in Generate() 32 __ TailCallRuntime(Runtime::kNewArray); in Generate() 60 __ Push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 62 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() [all …]
|
/external/v8/src/mips64/ |
D | codegen-mips64.cc | 19 #define __ masm. macro 69 __ slti(a6, a2, 2 * loadstore_chunk); in CreateMemCopyUint8Function() 70 __ bne(a6, zero_reg, &lastb); in CreateMemCopyUint8Function() 71 __ mov(v0, a0); // In delay slot. in CreateMemCopyUint8Function() 77 __ xor_(t8, a1, a0); in CreateMemCopyUint8Function() 78 __ andi(t8, t8, loadstore_chunk - 1); // t8 is a0/a1 word-displacement. in CreateMemCopyUint8Function() 79 __ bne(t8, zero_reg, &unaligned); in CreateMemCopyUint8Function() 80 __ subu(a3, zero_reg, a0); // In delay slot. in CreateMemCopyUint8Function() 82 __ andi(a3, a3, loadstore_chunk - 1); // Copy a3 bytes to align a0/a1. in CreateMemCopyUint8Function() 83 __ beq(a3, zero_reg, &aligned); // Already aligned. in CreateMemCopyUint8Function() [all …]
|
D | code-stubs-mips64.cc | 23 #define __ ACCESS_MASM(masm) macro 26 __ dsll(t9, a0, kPointerSizeLog2); in Generate() 27 __ Daddu(t9, sp, t9); in Generate() 28 __ sd(a1, MemOperand(t9, 0)); in Generate() 29 __ Push(a1); in Generate() 30 __ Push(a2); in Generate() 31 __ Daddu(a0, a0, 3); in Generate() 32 __ TailCallRuntime(Runtime::kNewArray); in Generate() 72 __ Dsubu(sp, sp, Operand(param_count * kPointerSize)); in GenerateLightweightMiss() 75 __ sd(descriptor.GetRegisterParameter(i), in GenerateLightweightMiss() [all …]
|
/external/v8/src/s390/ |
D | code-stubs-s390.cc | 24 #define __ ACCESS_MASM(masm) macro 27 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate() 28 __ StoreP(r3, MemOperand(sp, r1)); in Generate() 29 __ push(r3); in Generate() 30 __ push(r4); in Generate() 31 __ AddP(r2, r2, Operand(3)); in Generate() 32 __ TailCallRuntime(Runtime::kNewArray); in Generate() 68 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 70 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() 73 __ Ret(); in GenerateLightweightMiss() [all …]
|
/external/v8/src/ppc/ |
D | code-stubs-ppc.cc | 24 #define __ ACCESS_MASM(masm) macro 27 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate() 28 __ StorePX(r4, MemOperand(sp, r0)); in Generate() 29 __ push(r4); in Generate() 30 __ push(r5); in Generate() 31 __ addi(r3, r3, Operand(3)); in Generate() 32 __ TailCallRuntime(Runtime::kNewArray); in Generate() 69 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 71 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() 74 __ Ret(); in GenerateLightweightMiss() [all …]
|
/external/v8/src/mips/ |
D | codegen-mips.cc | 19 #define __ masm. macro 69 __ slti(t2, a2, 2 * loadstore_chunk); in CreateMemCopyUint8Function() 70 __ bne(t2, zero_reg, &lastb); in CreateMemCopyUint8Function() 71 __ mov(v0, a0); // In delay slot. in CreateMemCopyUint8Function() 77 __ xor_(t8, a1, a0); in CreateMemCopyUint8Function() 78 __ andi(t8, t8, loadstore_chunk - 1); // t8 is a0/a1 word-displacement. in CreateMemCopyUint8Function() 79 __ bne(t8, zero_reg, &unaligned); in CreateMemCopyUint8Function() 80 __ subu(a3, zero_reg, a0); // In delay slot. in CreateMemCopyUint8Function() 82 __ andi(a3, a3, loadstore_chunk - 1); // Copy a3 bytes to align a0/a1. in CreateMemCopyUint8Function() 83 __ beq(a3, zero_reg, &aligned); // Already aligned. in CreateMemCopyUint8Function() [all …]
|
D | code-stubs-mips.cc | 24 #define __ ACCESS_MASM(masm) macro 27 __ sll(t9, a0, kPointerSizeLog2); in Generate() 28 __ Addu(t9, sp, t9); in Generate() 29 __ sw(a1, MemOperand(t9, 0)); in Generate() 30 __ Push(a1); in Generate() 31 __ Push(a2); in Generate() 32 __ Addu(a0, a0, Operand(3)); in Generate() 33 __ TailCallRuntime(Runtime::kNewArray); in Generate() 73 __ Subu(sp, sp, Operand(param_count * kPointerSize)); in GenerateLightweightMiss() 76 __ sw(descriptor.GetRegisterParameter(i), in GenerateLightweightMiss() [all …]
|
/external/v8/src/ia32/ |
D | code-stubs-ia32.cc | 25 #define __ ACCESS_MASM(masm) macro 28 __ pop(ecx); in Generate() 29 __ mov(MemOperand(esp, eax, times_4, 0), edi); in Generate() 30 __ push(edi); in Generate() 31 __ push(ebx); in Generate() 32 __ push(ecx); in Generate() 33 __ add(eax, Immediate(3)); in Generate() 34 __ TailCallRuntime(Runtime::kNewArray); in Generate() 62 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 64 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() [all …]
|
D | codegen-ia32.cc | 34 #define __ masm. macro 49 __ movsd(xmm0, Operand(esp, 1 * kPointerSize)); in CreateSqrtFunction() 50 __ sqrtsd(xmm0, xmm0); in CreateSqrtFunction() 51 __ movsd(Operand(esp, 1 * kPointerSize), xmm0); in CreateSqrtFunction() 53 __ fld_d(Operand(esp, 1 * kPointerSize)); in CreateSqrtFunction() 54 __ Ret(); in CreateSqrtFunction() 68 #undef __ 69 #define __ ACCESS_MASM(masm) macro 88 __ cmp(loop_count, 0); in MemMoveEmitMainLoop() 89 __ j(equal, &move_last_63); in MemMoveEmitMainLoop() [all …]
|
/external/v8/src/x87/ |
D | code-stubs-x87.cc | 25 #define __ ACCESS_MASM(masm) macro 28 __ pop(ecx); in Generate() 29 __ mov(MemOperand(esp, eax, times_4, 0), edi); in Generate() 30 __ push(edi); in Generate() 31 __ push(ebx); in Generate() 32 __ push(ecx); in Generate() 33 __ add(eax, Immediate(3)); in Generate() 34 __ TailCallRuntime(Runtime::kNewArray); in Generate() 62 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 64 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() [all …]
|
/external/v8/src/builtins/x87/ |
D | builtins-x87.cc | 16 #define __ ACCESS_MASM(masm) macro 30 __ AssertFunction(edi); in Generate_Adaptor() 36 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); in Generate_Adaptor() 41 __ add(eax, Immediate(num_extra_args + 1)); in Generate_Adaptor() 44 __ PopReturnAddressTo(ecx); in Generate_Adaptor() 45 __ SmiTag(eax); in Generate_Adaptor() 46 __ Push(eax); in Generate_Adaptor() 47 __ SmiUntag(eax); in Generate_Adaptor() 48 __ Push(edi); in Generate_Adaptor() 49 __ Push(edx); in Generate_Adaptor() [all …]
|
/external/v8/src/builtins/s390/ |
D | builtins-s390.cc | 16 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(r3); in Generate_Adaptor() 35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate_Adaptor() 40 __ AddP(r2, r2, Operand(num_extra_args + 1)); in Generate_Adaptor() 43 __ SmiTag(r2); in Generate_Adaptor() 44 __ Push(r2, r3, r5); in Generate_Adaptor() 45 __ SmiUntag(r2); in Generate_Adaptor() 47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() [all …]
|
/external/v8/src/builtins/ppc/ |
D | builtins-ppc.cc | 16 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(r4); in Generate_Adaptor() 35 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate_Adaptor() 40 __ addi(r3, r3, Operand(num_extra_args + 1)); in Generate_Adaptor() 43 __ SmiTag(r3); in Generate_Adaptor() 44 __ Push(r3, r4, r6); in Generate_Adaptor() 45 __ SmiUntag(r3); in Generate_Adaptor() 47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() [all …]
|
/external/v8/src/builtins/ia32/ |
D | builtins-ia32.cc | 16 #define __ ACCESS_MASM(masm) macro 30 __ AssertFunction(edi); in Generate_Adaptor() 36 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); in Generate_Adaptor() 41 __ add(eax, Immediate(num_extra_args + 1)); in Generate_Adaptor() 44 __ PopReturnAddressTo(ecx); in Generate_Adaptor() 45 __ SmiTag(eax); in Generate_Adaptor() 46 __ Push(eax); in Generate_Adaptor() 47 __ SmiUntag(eax); in Generate_Adaptor() 48 __ Push(edi); in Generate_Adaptor() 49 __ Push(edx); in Generate_Adaptor() [all …]
|
/external/v8/src/arm/ |
D | code-stubs-arm.cc | 25 #define __ ACCESS_MASM(masm) macro 28 __ lsl(r5, r0, Operand(kPointerSizeLog2)); in Generate() 29 __ str(r1, MemOperand(sp, r5)); in Generate() 30 __ Push(r1); in Generate() 31 __ Push(r2); in Generate() 32 __ add(r0, r0, Operand(3)); in Generate() 33 __ TailCallRuntime(Runtime::kNewArray); in Generate() 74 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss() 76 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() 79 __ Ret(); in GenerateLightweightMiss() [all …]
|
D | codegen-arm.cc | 19 #define __ masm. macro 45 __ pld(MemOperand(src, 0)); in CreateMemCopyUint8Function() 47 __ cmp(chars, Operand(8)); in CreateMemCopyUint8Function() 48 __ b(lt, &size_less_than_8); in CreateMemCopyUint8Function() 49 __ cmp(chars, Operand(32)); in CreateMemCopyUint8Function() 50 __ b(lt, &less_32); in CreateMemCopyUint8Function() 52 __ pld(MemOperand(src, 32)); in CreateMemCopyUint8Function() 54 __ cmp(chars, Operand(64)); in CreateMemCopyUint8Function() 55 __ b(lt, &less_64); in CreateMemCopyUint8Function() 56 __ pld(MemOperand(src, 64)); in CreateMemCopyUint8Function() [all …]
|
/external/v8/src/builtins/arm/ |
D | builtins-arm.cc | 16 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(r1); in Generate_Adaptor() 35 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); in Generate_Adaptor() 40 __ add(r0, r0, Operand(num_extra_args + 1)); in Generate_Adaptor() 43 __ SmiTag(r0); in Generate_Adaptor() 44 __ Push(r0, r1, r3); in Generate_Adaptor() 45 __ SmiUntag(r0); in Generate_Adaptor() 47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() [all …]
|
/external/v8/src/arm64/ |
D | code-stubs-arm64.cc | 25 #define __ ACCESS_MASM(masm) macro 28 __ Mov(x5, Operand(x0, LSL, kPointerSizeLog2)); in Generate() 29 __ Str(x1, MemOperand(jssp, x5)); in Generate() 30 __ Push(x1); in Generate() 31 __ Push(x2); in Generate() 32 __ Add(x0, x0, Operand(3)); in Generate() 33 __ TailCallRuntime(Runtime::kNewArray); in Generate() 67 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss() 70 __ Ret(); in GenerateLightweightMiss() 90 __ Push(scratch1, scratch2); in Generate() [all …]
|
/external/v8/src/builtins/x64/ |
D | builtins-x64.cc | 15 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(rdi); in Generate_Adaptor() 38 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); in Generate_Adaptor() 43 __ addp(rax, Immediate(num_extra_args + 1)); in Generate_Adaptor() 47 __ PopReturnAddressTo(kScratchRegister); in Generate_Adaptor() 48 __ Integer32ToSmi(rax, rax); in Generate_Adaptor() 49 __ Push(rax); in Generate_Adaptor() 50 __ SmiToInteger32(rax, rax); in Generate_Adaptor() 51 __ Push(rdi); in Generate_Adaptor() 52 __ Push(rdx); in Generate_Adaptor() [all …]
|
/external/v8/src/builtins/mips/ |
D | builtins-mips.cc | 16 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(a1); in Generate_Adaptor() 35 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_Adaptor() 40 __ Addu(a0, a0, num_extra_args + 1); in Generate_Adaptor() 43 __ SmiTag(a0); in Generate_Adaptor() 44 __ Push(a0, a1, a3); in Generate_Adaptor() 45 __ SmiUntag(a0); in Generate_Adaptor() 47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() [all …]
|
/external/v8/src/builtins/mips64/ |
D | builtins-mips64.cc | 16 #define __ ACCESS_MASM(masm) macro 29 __ AssertFunction(a1); in Generate_Adaptor() 35 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_Adaptor() 40 __ Daddu(a0, a0, num_extra_args + 1); in Generate_Adaptor() 43 __ SmiTag(a0); in Generate_Adaptor() 44 __ Push(a0, a1, a3); in Generate_Adaptor() 45 __ SmiUntag(a0); in Generate_Adaptor() 47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() 55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() [all …]
|
/external/v8/src/builtins/arm64/ |
D | builtins-arm64.cc | 17 #define __ ACCESS_MASM(masm) macro 22 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction() 29 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction() 43 __ AssertFunction(x1); in Generate_Adaptor() 49 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate_Adaptor() 54 __ Add(x0, x0, num_extra_args + 1); in Generate_Adaptor() 57 __ SmiTag(x0); in Generate_Adaptor() 58 __ Push(x0, x1, x3); in Generate_Adaptor() 59 __ SmiUntag(x0); in Generate_Adaptor() 61 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor() [all …]
|