/art/compiler/utils/ |
D | assembler_thumb_test.cc | 1287 TEST_F(Thumb2AssemblerTest, AddConstant) { in TEST_F() argument 1289 __ AddConstant(R0, R1, 0); // MOV. in TEST_F() local 1290 __ AddConstant(R0, R1, 1); // 16-bit ADDS, encoding T1. in TEST_F() local 1291 __ AddConstant(R0, R1, 7); // 16-bit ADDS, encoding T1. in TEST_F() local 1292 __ AddConstant(R0, R1, 8); // 32-bit ADD, encoding T3. in TEST_F() local 1293 __ AddConstant(R0, R1, 255); // 32-bit ADD, encoding T3. in TEST_F() local 1294 __ AddConstant(R0, R1, 256); // 32-bit ADD, encoding T3. in TEST_F() local 1295 __ AddConstant(R0, R1, 257); // 32-bit ADD, encoding T4. in TEST_F() local 1296 __ AddConstant(R0, R1, 0xfff); // 32-bit ADD, encoding T4. in TEST_F() local 1297 __ AddConstant(R0, R1, 0x1000); // 32-bit ADD, encoding T3. in TEST_F() local [all …]
|
/art/compiler/utils/arm/ |
D | jni_macro_assembler_arm.cc | 180 __ AddConstant(SP, -adjust); in IncreaseFrameSize() local 346 __ AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value(), AL); in StoreStackOffsetToThread() local 495 __ AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE); in CreateHandleScopeEntry() local 497 __ AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL); in CreateHandleScopeEntry() local 514 __ AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE); in CreateHandleScopeEntry() local 516 __ AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL); in CreateHandleScopeEntry() local
|
D | assembler_arm_vixl.cc | 404 void ArmVIXLAssembler::AddConstant(vixl32::Register rd, int32_t value) { in AddConstant() function in art::arm::ArmVIXLAssembler 409 void ArmVIXLAssembler::AddConstant(vixl32::Register rd, in AddConstant() function in art::arm::ArmVIXLAssembler
|
D | assembler_thumb2.cc | 3629 void Thumb2Assembler::AddConstant(Register rd, Register rn, int32_t value, in AddConstant() function in art::arm::Thumb2Assembler
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 73 void Arm64JNIMacroAssembler::AddConstant(XRegister rd, int32_t value, Condition cond) { in AddConstant() function in art::arm64::Arm64JNIMacroAssembler 77 void Arm64JNIMacroAssembler::AddConstant(XRegister rd, in AddConstant() function in art::arm64::Arm64JNIMacroAssembler
|
/art/compiler/optimizing/ |
D | intrinsics_arm.cc | 66 __ AddConstant(base, array, element_size * constant + data_offset); in GenSystemArrayCopyBaseAddress() local 69 __ AddConstant(base, data_offset); in GenSystemArrayCopyBaseAddress() local 88 __ AddConstant(end, base, element_size * constant); in GenSystemArrayCopyEndAddress() local 295 __ AddConstant(out, 32); in GenNumberOfLeadingZeros() local 343 __ AddConstant(out, 32); in GenNumberOfTrailingZeros() local 2732 __ AddConstant(out, in, -info.low); in VisitIntegerValueOf() local
|
D | code_generator_arm.cc | 1205 __ AddConstant(index_reg, index_reg, offset_); in EmitNativeCode() local 2073 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); in GenerateFrameEntry() local 2095 __ AddConstant(SP, -adjust); in GenerateFrameEntry() local 2113 __ AddConstant(SP, adjust); in GenerateFrameExit() local 3821 __ AddConstant(out.AsRegister<Register>(), in VisitAdd() local 3901 __ AddConstant(out.AsRegister<Register>(), in VisitSub() local 6272 __ AddConstant(out.AsRegister<Register>(), in VisitIntermediateAddress() local 7766 __ AddConstant(out_high, first_high, value_high); in GenerateAddLongConst() local 7978 __ AddConstant(root_reg, obj, offset); in GenerateGcRootFieldLoad() local 8608 __ AddConstant(key_reg, value_reg, -lower_bound); in VisitPackedSwitch() local
|