Searched refs:MaskedBits (Results 1 – 6 of 6) sorted by relevance
7 ; ((%x << MaskedBits) a>> MaskedBits) != %x10 ; Where KeptBits = bitwidth(%x) - MaskedBits
7 ; ((%x << MaskedBits) a>> MaskedBits) == %x10 ; Where KeptBits = bitwidth(%x) - MaskedBits
304 Value *MaskedBits = B.CreateAnd(Bits, BitMask); in createMaskedBitTest() local305 return B.CreateICmpNE(MaskedBits, ConstantInt::get(BitsType, 0)); in createMaskedBitTest()
528 Value *MaskedBits = B.CreateAnd(Bits, BitMask); in createMaskedBitTest() local529 return B.CreateICmpNE(MaskedBits, ConstantInt::get(BitsType, 0)); in createMaskedBitTest()
1924 const unsigned MaskedBits = XVT.getSizeInBits() - KeptBits; in optimizeSetCCOfSignedTruncationCheck() local1925 assert(MaskedBits > 0 && MaskedBits < XVT.getSizeInBits() && "unreachable"); in optimizeSetCCOfSignedTruncationCheck()1929 SDValue ShiftAmt = DAG.getConstant(MaskedBits, DL, XVT); in optimizeSetCCOfSignedTruncationCheck()
2986 const APInt &MaskedBits = *C0; in foldICmpWithTruncSignExtendedVal() local2987 assert(MaskedBits != 0 && "shift by zero should be folded away already."); in foldICmpWithTruncSignExtendedVal()3011 assert(BitWidth.ugt(MaskedBits) && "shifts should leave some bits untouched"); in foldICmpWithTruncSignExtendedVal()3014 const APInt KeptBits = BitWidth - MaskedBits; in foldICmpWithTruncSignExtendedVal()