Home
last modified time | relevance | path

Searched refs:IsBitSet (Results 1 – 25 of 25) sorted by relevance

/art/compiler/dex/
Dtype_inference.h155 return IsBitSet(kFlagNonNull); in NonNull()
159 return IsBitSet(kFlagWide); in Wide()
163 return IsBitSet(kFlagNarrow); in Narrow()
167 return IsBitSet(kFlagFp); in Fp()
171 return IsBitSet(kFlagCore); in Core()
175 return IsBitSet(kFlagRef); in Ref()
179 return IsBitSet(kFlagLowWord); in LowWord()
183 return IsBitSet(kFlagHighWord); in HighWord()
333 bool IsBitSet(uint32_t flag) const { in IsBitSet() function
Dgvn_dead_code_elimination.cc104 : vreg_high_words_.IsBitSet(v_reg); in AddMIRWithDef()
115 : !vreg_high_words_.IsBitSet(v_reg + 1); in AddMIRWithDef()
131 DCHECK(!vreg_high_words_.IsBitSet(data->vreg_def)); in RemoveLastMIRData()
138 DCHECK(vreg_high_words_.IsBitSet(data->vreg_def + 1)); in RemoveLastMIRData()
225 DCHECK(!vreg_high_words_.IsBitSet(v_reg)); // Keep marked as low word. in UpdateInitialVRegValue()
241 DCHECK(!vreg_high_words_.IsBitSet(v_reg)); // Keep marked as low word. in UpdateInitialVRegValue()
253 DCHECK(!vreg_high_words_.IsBitSet(v_reg + 1)); // Keep marked as low word. in UpdateInitialVRegValue()
320 DCHECK_EQ(vreg_high_words_.IsBitSet(v_reg), v_reg == old_data->vreg_def + 1); in ReplaceChange()
338 DCHECK_EQ(vreg_high_words_.IsBitSet(v_reg), v_reg == data->vreg_def + 1); in RemoveChange()
892 if (data->has_def && vregs_to_kill_->IsBitSet(data->vreg_def)) { in FindChangesToKill()
[all …]
Dmir_graph.cc600 bool in_try_block = try_block_addr->IsBitSet(cur_offset); in ProcessCanThrow()
1875 !current_loop->IsBitSet(candidate->id))) { in SelectTopologicalSortOrderFallBack()
1893 !current_loop->IsBitSet(child_bb->id))) { in SelectTopologicalSortOrderFallBack()
1896 if (!candidate_reachable.IsBitSet(child_bb->id)) { in SelectTopologicalSortOrderFallBack()
1927 if (!pred_bb->visited && !reachable->IsBitSet(pred_bb->id)) { in ComputeUnvisitedReachableFrom()
1997 if (reachable == nullptr || reachable->IsBitSet(candidate_id)) { in ComputeTopologicalSortOrder()
2030 !loop_head_reachable_from[loop_head_stack.back()]->IsBitSet(candidate->id))) { in ComputeTopologicalSortOrder()
2038 !pred_bb->dominators->IsBitSet(candidate->id)) { in ComputeTopologicalSortOrder()
2080 loop_head_reachable_from[loop_head_stack.back()]->IsBitSet(successor->id)) { in ComputeTopologicalSortOrder()
2083 DCHECK(!loop_exit_blocks.IsBitSet(successor->id)); in ComputeTopologicalSortOrder()
Dmir_optimization.cc105 if (!is_constant_v_->IsBitSet(mir->ssa_rep->uses[i])) break; in DoConstantPropagation()
984 if (!copied_first || !vregs_to_check->IsBitSet(pred_bb->last_mir_insn->dalvikInsn.vA)) { in EliminateNullChecks()
1027 if (!vregs_to_check->IsBitSet(src_vreg)) { in EliminateNullChecks()
1079 if (vregs_to_check->IsBitSet(mir->dalvikInsn.vB)) { in EliminateNullChecks()
1303 if (!classes_to_check->IsBitSet(check_dex_cache_index)) { in EliminateClassInitChecks()
1314 if (!classes_to_check->IsBitSet(check_clinit_index)) { in EliminateClassInitChecks()
1464 if (temp_.smi.processed_indexes->IsBitSet(method_index)) { in ComputeInlineIFieldLoweringInfo()
Dmir_dataflow.cc914 if (!def_v->IsBitSet(dalvik_reg_id)) { in HandleLiveInUse()
1196 if (temp_.ssa.phi_node_blocks[dalvik_reg]->IsBitSet(bb_id)) { in DoSSAConversion()
1197 if (!bb->data_flow_info->live_in_v->IsBitSet(dalvik_reg)) { in DoSSAConversion()
Dtype_inference.cc234 if (data.def_phi_blocks_->IsBitSet(bb_id)) { in AddPseudoPhis()
289 if (entry.second.def_phi_blocks_->IsBitSet(bb->id)) { in ProcessPseudoPhis()
378 if (!bb->data_flow_info->live_in_v->IsBitSet(v_reg)) { in IsSRegLiveAtStart()
Dmir_graph.h784 return is_constant_v_->IsBitSet(s_reg); in IsConst()
Dlocal_value_numbering.cc531 bool live = live_in_v->IsBitSet(gvn_->GetMirGraph()->SRegToVReg(entry.first)); in CopyLiveSregValues()
554 bool live_and_same = live_in_v->IsBitSet(gvn_->GetMirGraph()->SRegToVReg(entry.first)); in IntersectSregValueMaps()
/art/runtime/base/
Dbit_vector_test.cc37 EXPECT_FALSE(bv.IsBitSet(i)); in TEST()
50 EXPECT_TRUE(bv.IsBitSet(0)); in TEST()
52 EXPECT_FALSE(bv.IsBitSet(i)); in TEST()
54 EXPECT_TRUE(bv.IsBitSet(kBits - 1)); in TEST()
166 EXPECT_TRUE(first.IsBitSet(64)); in TEST()
Dbit_vector.h155 bool IsBitSet(uint32_t idx) const { in IsBitSet() function
158 return (idx < (storage_size_ * kWordBits)) && IsBitSet(storage_, idx); in IsBitSet()
232 static bool IsBitSet(const uint32_t* storage, uint32_t idx) { in IsBitSet() function
Dbit_vector.cc347 buffer << IsBitSet(i); in DumpHelper()
/art/compiler/optimizing/
Ddead_code_elimination.cc25 if (visited->IsBitSet(block_id)) { in MarkReachableBlocks()
78 if (live_blocks.IsBitSet(id)) { in RemoveDeadBlocks()
79 if (affected_loops.IsBitSet(id)) { in RemoveDeadBlocks()
Dstack_map_stream.cc155 if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) { in ComputeDexRegisterMapSize()
266 if (entry.live_dex_registers_mask->IsBitSet(dex_register_number)) { in FillIn()
342 if (a.live_dex_registers_mask->IsBitSet(i) != b.live_dex_registers_mask->IsBitSet(i)) { in HaveTheSameDexMaps()
345 if (a.live_dex_registers_mask->IsBitSet(i)) { in HaveTheSameDexMaps()
Dgvn.cc83 if (!buckets_owned_.IsBitSet(index)) { in Add()
173 DCHECK(!buckets_owned_.IsBitSet(index)); in CloneBucket()
213 if (!buckets_owned_.IsBitSet(i)) { in DeleteAllImpureWhich()
232 DCHECK(buckets_owned_.IsBitSet(i) || node == nullptr); in DeleteAllImpureWhich()
Dlicm.cc102 DCHECK(visited.IsBitSet(inner->GetBlockId())); in Run()
Dnodes.cc55 if (!visited.IsBitSet(i)) { in RemoveInstructionsAsUsersFromDeadBlocks()
67 if (!visited.IsBitSet(i)) { in RemoveDeadBlocks()
84 if (visited->IsBitSet(id)) return; in VisitBlockForBackEdges()
90 if (visiting->IsBitSet(successor->GetBlockId())) { in VisitBlockForBackEdges()
154 if (visited.IsBitSet(second->GetBlockId())) { in FindCommonDominator()
358 if (blocks_.IsBitSet(block->GetBlockId())) { in PopulateRecursive()
428 return blocks_.IsBitSet(block.GetBlockId()); in Contains()
432 return other.blocks_.IsBitSet(header_->GetBlockId()); in IsIn()
Dliveness_test.cc41 buffer << vector->IsBitSet(i); in DumpBitVector()
Dgraph_checker.cc137 if (seen_ids_.IsBitSet(instruction->GetId())) { in VisitInstruction()
361 if (!loop_blocks.IsBitSet(back_edge_id)) { in CheckLoop()
Dfind_loops_test.cc134 ASSERT_TRUE(blocks.IsBitSet(blocks_in_loop[i])); in TestBlock()
Dstack_map_test.cc27 if (region.LoadBit(i) != bit_vector.IsBitSet(i)) { in SameBits()
Dnodes.h4091 if (!blocks_in_loop_.IsBitSet(index_)) { in HBlocksInLoopIterator()
4101 if (blocks_in_loop_.IsBitSet(index_)) { in Advance()
4124 if (!blocks_in_loop_.IsBitSet(blocks_.Get(index_)->GetBlockId())) { in HBlocksInLoopReversePostOrderIterator()
4134 if (blocks_in_loop_.IsBitSet(blocks_.Get(index_)->GetBlockId())) { in Advance()
Dregister_allocator.cc494 if (liveness_of_spill_slot->IsBitSet(j)) { in ValidateIntervals()
511 if (liveness_of_register->IsBitSet(j)) { in ValidateIntervals()
/art/runtime/verifier/
Dreg_type_test.cc483 EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_0.GetId())); in TEST_F()
484 EXPECT_TRUE(unresolved_parts.IsBitSet(ref_type_1.GetId())); in TEST_F()
/art/runtime/
Dstack_map.h585 region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i)); in SetLiveBitMask()
715 region.StoreBit(i, sp_map.IsBitSet(i)); in SetStackMask()
Doat_file.cc725 if (!BitVector::IsBitSet(bitmap_, method_index)) { in GetOatMethodOffsets()