Lines Matching refs:GetVIXLAssembler

67 MacroAssembler* IntrinsicCodeGeneratorARM64::GetVIXLAssembler() {  in GetVIXLAssembler()  function in art::arm64::IntrinsicCodeGeneratorARM64
68 return codegen_->GetVIXLAssembler(); in GetVIXLAssembler()
75 #define __ codegen->GetVIXLAssembler()->
125 vixl::EmissionCheckScope guard(codegen->GetVIXLAssembler(), kInvokeCodeMarginSizeInBytes); in EmitNativeCode()
276 MoveFPToInt(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitDoubleDoubleToRawLongBits()
279 MoveIntToFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitDoubleLongBitsToDouble()
290 MoveFPToInt(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitFloatFloatToRawIntBits()
293 MoveIntToFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitFloatIntBitsToFloat()
330 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerReverseBytes()
338 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongReverseBytes()
346 GenReverseBytes(invoke->GetLocations(), Primitive::kPrimShort, GetVIXLAssembler()); in VisitShortReverseBytes()
374 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerNumberOfLeadingZeros()
382 GenNumberOfLeadingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongNumberOfLeadingZeros()
402 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerNumberOfTrailingZeros()
410 GenNumberOfTrailingZeros(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongNumberOfTrailingZeros()
429 GenReverse(invoke->GetLocations(), Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerReverse()
437 GenReverse(invoke->GetLocations(), Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongReverse()
462 GenBitCount(invoke, Primitive::kPrimLong, GetVIXLAssembler()); in VisitLongBitCount()
470 GenBitCount(invoke, Primitive::kPrimInt, GetVIXLAssembler()); in VisitIntegerBitCount()
496 MathAbsFP(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitMathAbsDouble()
504 MathAbsFP(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitMathAbsFloat()
533 GenAbsInteger(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitMathAbsInt()
541 GenAbsInteger(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitMathAbsLong()
576 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ true, GetVIXLAssembler()); in VisitMathMinDoubleDouble()
584 GenMinMaxFP(invoke->GetLocations(), /* is_min */ true, /* is_double */ false, GetVIXLAssembler()); in VisitMathMinFloatFloat()
592 GenMinMaxFP(invoke->GetLocations(), /* is_min */ false, /* is_double */ true, GetVIXLAssembler()); in VisitMathMaxDoubleDouble()
601 invoke->GetLocations(), /* is_min */ false, /* is_double */ false, GetVIXLAssembler()); in VisitMathMaxFloatFloat()
625 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ false, GetVIXLAssembler()); in VisitMathMinIntInt()
633 GenMinMax(invoke->GetLocations(), /* is_min */ true, /* is_long */ true, GetVIXLAssembler()); in VisitMathMinLongLong()
641 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ false, GetVIXLAssembler()); in VisitMathMaxIntInt()
649 GenMinMax(invoke->GetLocations(), /* is_min */ false, /* is_long */ true, GetVIXLAssembler()); in VisitMathMaxLongLong()
658 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathSqrt()
668 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathCeil()
678 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathFloor()
688 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathRint()
743 GenMathRound(invoke, /* is_double */ true, GetVIXLAssembler()); in VisitMathRoundDouble()
751 GenMathRound(invoke, /* is_double */ false, GetVIXLAssembler()); in VisitMathRoundFloat()
759 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekByte()
769 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekIntNative()
779 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekLongNative()
789 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekShortNative()
807 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeByte()
817 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeIntNative()
827 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeLongNative()
837 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeShortNative()
1001 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenUnsafePut()
1131 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenCas()
1264 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringCompareTo()
1505 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringEquals()
1721 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ true); in VisitStringIndexOf()
1739 invoke, GetVIXLAssembler(), codegen_, GetAllocator(), /* start_at_zero */ false); in VisitStringIndexOfAfter()
1755 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringNewStringFromBytes()
1801 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringNewStringFromString()
2003 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringGetCharsNoCheck()
2245 MacroAssembler* masm = GetVIXLAssembler(); in VisitSystemArrayCopyChar()
2412 MacroAssembler* masm = GetVIXLAssembler(); in VisitSystemArrayCopy()
2892 GenIsInfinite(invoke->GetLocations(), /* is64bit */ false, GetVIXLAssembler()); in VisitFloatIsInfinite()
2900 GenIsInfinite(invoke->GetLocations(), /* is64bit */ true, GetVIXLAssembler()); in VisitDoubleIsInfinite()
2918 MacroAssembler* masm = GetVIXLAssembler(); in VisitReferenceGetReferent()
2957 vixl::EmissionCheckScope guard(codegen_->GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes); in VisitReferenceGetReferent()
2978 MacroAssembler* masm = GetVIXLAssembler(); in VisitIntegerValueOf()