Home
last modified time | relevance | path

Searched refs:mask (Results 1 – 25 of 39) sorted by relevance

12

/art/runtime/arch/arm64/
Dmemcmp16_arm64.S45 #define mask x13 macro
79 mov mask, #~0
80 lsl mask, mask, limit
81 bic data1, data1, mask
82 bic data2, data2, mask
94 mov mask, #0xFFFF
99 and data1, data1, mask
100 and data2, data2, mask
/art/compiler/dex/quick/
Dresource_mask.cc145 const ResourceMask* ResourceMaskCache::GetMask(const ResourceMask& mask) { in GetMask() argument
153 if ((mask.masks_[0] >> 32) == 0u && (mask.masks_[1] & ~kAllowedSpecialBits.masks_[1]) == 0u) { in GetMask()
155 uint32_t low_regs = static_cast<uint32_t>(mask.masks_[0]); in GetMask()
157 if (low_regs_without_lowest == 0u && IsPowerOfTwo(mask.masks_[1])) { in GetMask()
159 size_t index = (mask.masks_[1] != 0u) ? CLZ(mask.masks_[1]) : 0u; in GetMask()
163 } else if (IsPowerOfTwo(low_regs_without_lowest) && mask.masks_[1] == 0u) { in GetMask()
169 } else if (mask.Equals(kEncodeAll)) { in GetMask()
173 DCHECK(res->Equals(mask)) in GetMask()
174 << "(" << std::hex << std::setw(16) << mask.masks_[0] in GetMask()
175 << ", "<< std::hex << std::setw(16) << mask.masks_[1] in GetMask()
[all …]
Dmir_to_lir-inl.h145 inline void Mir2Lir::SetupRegMask(ResourceMask* mask, int reg) { in SetupRegMask() argument
149 *mask = mask->Union(reginfo_map_[reg]->DefUseMask()); in SetupRegMask()
155 inline void Mir2Lir::ClearRegMask(ResourceMask* mask, int reg) { in ClearRegMask() argument
159 *mask = mask->ClearBits(reginfo_map_[reg]->DefUseMask()); in ClearRegMask()
Dcodegen_util.cc146 ResourceMask mask; in SetMemRefType() local
152 mask = **mask_ptr; in SetMemRefType()
154 mask.ClearBits(kEncodeMem); in SetMemRefType()
159 mask.SetBit(ResourceMask::kLiteral); in SetMemRefType()
162 mask.SetBit(ResourceMask::kDalvikReg); in SetMemRefType()
165 mask.SetBit(ResourceMask::kHeapRef); in SetMemRefType()
170 mask.SetBit(ResourceMask::kMustNotAlias); in SetMemRefType()
175 *mask_ptr = mask_cache_.GetMask(mask); in SetMemRefType()
Dresource_mask.h161 const ResourceMask* GetMask(const ResourceMask& mask);
Dlocal_optimizations.cc25 #define LOAD_STORE_CHECK_REG_DEP(mask, check) (mask.Intersects(*check->u.m.def_mask)) argument
/art/tools/dexfuzz/src/dexfuzz/rawdex/
DDexRandomAccessFile.java300 long mask = alignment - 1; in alignForwards() local
301 if ((offset & mask) != 0) { in alignForwards()
302 int extra = alignment - (int) (offset & mask); in alignForwards()
313 long mask = alignment - 1; in alignBackwards() local
314 if ((offset & mask) != 0) { in alignBackwards()
315 offset &= (~mask); in alignBackwards()
/art/runtime/gc/accounting/
Dspace_bitmap-inl.h38 const uintptr_t mask = OffsetToMask(offset); in AtomicTestAndSet() local
45 if ((old_word & mask) != 0) { in AtomicTestAndSet()
49 } while (!atomic_entry->CompareExchangeWeakSequentiallyConsistent(old_word, old_word | mask)); in AtomicTestAndSet()
165 const uintptr_t mask = OffsetToMask(offset); in Modify() local
170 *address = old_word | mask; in Modify()
172 *address = old_word & ~mask; in Modify()
175 return (old_word & mask) != 0; in Modify()
/art/runtime/gc/allocator/
Drosalloc-inl.h137 const uint32_t mask = 1U << ffz; in AllocSlot() local
140 DCHECK_EQ(*alloc_bitmap_ptr & mask, 0U); in AllocSlot()
141 *alloc_bitmap_ptr |= mask; in AllocSlot()
142 DCHECK_NE(*alloc_bitmap_ptr & mask, 0U); in AllocSlot()
Drosalloc.cc900 const uint32_t mask = 1U << vec_off; in FreeSlot() local
901 DCHECK_NE(*vec & mask, 0U); in FreeSlot()
902 *vec &= ~mask; in FreeSlot()
903 DCHECK_EQ(*vec & mask, 0U); in FreeSlot()
1031 const uint32_t mask = 1U << vec_off; in MarkFreeBitMapShared() local
1032 DCHECK_EQ(*vec & mask, 0U); in MarkFreeBitMapShared()
1033 *vec |= mask; in MarkFreeBitMapShared()
1034 DCHECK_NE(*vec & mask, 0U); in MarkFreeBitMapShared()
/art/compiler/dex/quick/mips/
Dtarget_mips.cc465 void MipsMir2Lir::DumpResourceMask(LIR *mips_lir, const ResourceMask& mask, const char *prefix) { in DumpResourceMask() argument
469 if (mask.Equals(kEncodeAll)) { in DumpResourceMask()
476 if (mask.HasBit(i)) { in DumpResourceMask()
482 if (mask.HasBit(ResourceMask::kCCode)) { in DumpResourceMask()
485 if (mask.HasBit(ResourceMask::kFPStatus)) { in DumpResourceMask()
489 if (mips_lir && (mask.HasBit(ResourceMask::kDalvikReg))) { in DumpResourceMask()
494 if (mask.HasBit(ResourceMask::kLiteral)) { in DumpResourceMask()
498 if (mask.HasBit(ResourceMask::kHeapRef)) { in DumpResourceMask()
501 if (mask.HasBit(ResourceMask::kMustNotAlias)) { in DumpResourceMask()
848 uint32_t mask = core_spill_mask_; in SpillCoreRegs() local
[all …]
DREADME.mips31 o Expand the def/use mask (which, unfortunately, is a significant change)
/art/compiler/dex/quick/arm64/
Dtarget_arm64.cc313 unsigned mask = (unsigned)(width - 1); in DecodeLogicalImmediate() local
314 DCHECK_NE((imm_s & mask), mask); in DecodeLogicalImmediate()
315 uint64_t bits = bit_mask((imm_s & mask) + 1); in DecodeLogicalImmediate()
316 return RepeatBitsAcrossReg(is_wide, RotateRight(bits, imm_r & mask, width), width); in DecodeLogicalImmediate()
546 void Arm64Mir2Lir::DumpResourceMask(LIR* arm_lir, const ResourceMask& mask, const char* prefix) { in DumpResourceMask() argument
550 if (mask.Equals(kEncodeAll)) { in DumpResourceMask()
557 if (mask.HasBit(i)) { in DumpResourceMask()
563 if (mask.HasBit(ResourceMask::kCCode)) { in DumpResourceMask()
566 if (mask.HasBit(ResourceMask::kFPStatus)) { in DumpResourceMask()
571 if (arm_lir && (mask.HasBit(ResourceMask::kDalvikReg))) { in DumpResourceMask()
[all …]
/art/compiler/dex/quick/x86/
Dtarget_x86.cc422 void X86Mir2Lir::DumpResourceMask(LIR *x86LIR, const ResourceMask& mask, const char *prefix) { in DumpResourceMask() argument
426 if (mask.Equals(kEncodeAll)) { in DumpResourceMask()
433 if (mask.HasBit(i)) { in DumpResourceMask()
439 if (mask.HasBit(ResourceMask::kCCode)) { in DumpResourceMask()
443 if (x86LIR && (mask.HasBit(ResourceMask::kDalvikReg))) { in DumpResourceMask()
448 if (mask.HasBit(ResourceMask::kLiteral)) { in DumpResourceMask()
452 if (mask.HasBit(ResourceMask::kHeapRef)) { in DumpResourceMask()
455 if (mask.HasBit(ResourceMask::kMustNotAlias)) { in DumpResourceMask()
740 uint32_t mask = core_spill_mask_ & ~(1 << rs_rRET.GetRegNum()); in SpillCoreRegs() local
745 for (int reg = 0; mask != 0u; mask >>= 1, reg++) { in SpillCoreRegs()
[all …]
/art/compiler/dex/quick/arm/
Dtarget_arm.cc515 void ArmMir2Lir::DumpResourceMask(LIR* arm_lir, const ResourceMask& mask, const char* prefix) { in DumpResourceMask() argument
519 if (mask.Equals(kEncodeAll)) { in DumpResourceMask()
526 if (mask.HasBit(i)) { in DumpResourceMask()
532 if (mask.HasBit(ResourceMask::kCCode)) { in DumpResourceMask()
535 if (mask.HasBit(ResourceMask::kFPStatus)) { in DumpResourceMask()
540 if (arm_lir && (mask.HasBit(ResourceMask::kDalvikReg))) { in DumpResourceMask()
545 if (mask.HasBit(ResourceMask::kLiteral)) { in DumpResourceMask()
549 if (mask.HasBit(ResourceMask::kHeapRef)) { in DumpResourceMask()
552 if (mask.HasBit(ResourceMask::kMustNotAlias)) { in DumpResourceMask()
Dint_arm.cc51 int mask; in OpIT() local
75 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) | in OpIT()
77 return NewLIR2(kThumb2It, code, mask); in OpIT()
81 int mask; in UpdateIT() local
105 mask = (mask3 << 3) | (mask2 << 2) | (mask1 << 1) | in UpdateIT()
107 it->operands[1] = mask; in UpdateIT()
/art/compiler/optimizing/
Dintrinsics_x86_64.cc353 CpuRegister mask = locations->GetTemp(0).AsRegister<CpuRegister>(); in GenAbsInteger() local
357 __ movq(mask, out); in GenAbsInteger()
358 __ sarq(mask, Immediate(63)); in GenAbsInteger()
360 __ addq(out, mask); in GenAbsInteger()
361 __ xorq(out, mask); in GenAbsInteger()
364 __ movl(mask, out); in GenAbsInteger()
365 __ sarl(mask, Immediate(31)); in GenAbsInteger()
367 __ addl(out, mask); in GenAbsInteger()
368 __ xorl(out, mask); in GenAbsInteger()
1506 static void SwapBits(CpuRegister reg, CpuRegister temp, int32_t shift, int32_t mask, in SwapBits() argument
[all …]
Dintrinsics_arm.cc268 Register mask = locations->GetTemp(0).AsRegister<Register>(); in GenAbsInteger() local
278 __ Asr(mask, in_reg_hi, 31); in GenAbsInteger()
279 __ adds(out_reg_lo, in_reg_lo, ShifterOperand(mask)); in GenAbsInteger()
280 __ adc(out_reg_hi, in_reg_hi, ShifterOperand(mask)); in GenAbsInteger()
281 __ eor(out_reg_lo, mask, ShifterOperand(out_reg_lo)); in GenAbsInteger()
282 __ eor(out_reg_hi, mask, ShifterOperand(out_reg_hi)); in GenAbsInteger()
287 __ Asr(mask, in_reg, 31); in GenAbsInteger()
288 __ add(out_reg, in_reg, ShifterOperand(mask)); in GenAbsInteger()
289 __ eor(out_reg, mask, ShifterOperand(out_reg)); in GenAbsInteger()
Dcode_generator.h186 uint32_t mask = 0; in ComputeRegisterMask() local
188 mask |= (1 << registers[i]); in ComputeRegisterMask()
190 return mask; in ComputeRegisterMask()
/art/runtime/
Dstack_map.cc157 void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) { in SetRegisterMask() argument
161 mask); in SetRegisterMask()
Dart_method.h148 uint32_t mask = kAccFastNative | kAccNative; in IsFastNative() local
149 return (GetAccessFlags() & mask) == mask; in IsFastNative()
Dstack.cc439 uint64_t mask = 0xffffffff; in SetRegisterIfAccessible() local
441 mask = mask << 32; in SetRegisterIfAccessible()
445 full_new_value = static_cast<uintptr_t>((old_reg_val_as_wide & mask) | new_vreg_portion); in SetRegisterIfAccessible()
Dmem_map.cc118 constexpr uintptr_t mask = mask_ones & ~(kPageSize - 1); in CreateStartPos() local
121 return (input & mask) + LOW_MEM_START; in CreateStartPos()
/art/disassembler/
Ddisassembler_mips.cc30 uint32_t mask; member
36 return (instruction & mask) == value; in Matches()
/art/compiler/dex/
Dtype_inference.h341 bool MergeBits(Type src_type, uint32_t mask) { in MergeBits()
342 uint32_t new_bits = raw_bits_ | (src_type.raw_bits_ & mask); in MergeBits()

12