Home
last modified time | relevance | path

Searched refs:RoundUp (Results 1 – 25 of 104) sorted by relevance

12345

/art/runtime/
Dimage.cc67 CHECK_EQ(image_begin, RoundUp(image_begin, kPageSize)); in ImageHeader()
68 CHECK_EQ(oat_file_begin, RoundUp(oat_file_begin, kPageSize)); in ImageHeader()
69 CHECK_EQ(oat_data_begin, RoundUp(oat_data_begin, kPageSize)); in ImageHeader()
105 return image_reservation_size_ == RoundUp(image_size_, kPageSize); in IsAppImage()
161 static const size_t kStartPos = RoundUp(sizeof(ImageHeader), kObjectAlignment); in VisitObjects()
165 pos += RoundUp(object->SizeOf(), kObjectAlignment); in VisitObjects()
Dnterp_helpers.cc137 return RoundUp(frame_size, kStackAlignment); in NterpGetFrameSize()
161 RoundUp(out_regs * kVRegSize, kPointerSize) + // out arguments and pointer alignment in NterpGetReferenceArray()
171 RoundUp(out_regs * kVRegSize, kPointerSize); // out arguments and pointer alignment in NterpGetDexPC()
/art/runtime/arch/arm/
Djni_frame_arm.h71 return RoundUp(size, kAapcsStackAlignment); in GetCriticalNativeStubFrameSize()
81 return RoundUp(size, kAapcsStackAlignment); in GetCriticalNativeDirectCallFrameSize()
Dquick_entrypoints_cc_arm.cc50 fpr_double_index = std::max(fpr_double_index, RoundUp(fpr_index, 2)); in quick_invoke_reg_setup()
/art/runtime/arch/x86/
Djni_frame_x86.h67 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeStubFrameSize()
77 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/runtime/gc/space/
Dmalloc_space.cc105 *growth_limit = RoundUp(*growth_limit, kPageSize); in CreateMemMap()
106 *capacity = RoundUp(*capacity, kPageSize); in CreateMemMap()
143 growth_limit = RoundUp(growth_limit, kPageSize); in SetGrowthLimit()
186 SetEnd(reinterpret_cast<uint8_t*>(RoundUp(reinterpret_cast<uintptr_t>(End()), kPageSize))); in CreateZygoteSpace()
191 size_t size = RoundUp(Size(), kPageSize); in CreateZygoteSpace()
203 SetGrowthLimit(RoundUp(size, kPageSize)); in CreateZygoteSpace()
Dbump_pointer_space.cc28 capacity = RoundUp(capacity, kPageSize); in Create()
102 return reinterpret_cast<mirror::Object*>(RoundUp(position, kAlignment)); in GetNextObject()
150 bytes = RoundUp(bytes, kAlignment); in AllocBlock()
238 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
Dbump_pointer_space-inl.h31 num_bytes = RoundUp(num_bytes, kAlignment); in Alloc()
48 num_bytes = RoundUp(num_bytes, kAlignment); in AllocThreadUnsafe()
Dregion_space.cc275 const size_t bytes_allocated = RoundUp(BytesAllocated(), kRegionSize); in ShouldBeEvacuated()
305 size_t obj_alloc_size = RoundUp(obj_size, space::RegionSpace::kRegionSize); in ZeroLiveBytesForLargeObject()
385 num_expected_large_tails = RoundUp(r->BytesAllocated(), kRegionSize) / kRegionSize - 1; in SetFromSpace()
663 size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment); in CheckLiveBytesAgainstRegionBitmap()
968 << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); in Dump()
995 prev_object_end = RoundUp(object_end, kAlignment); in GetLongestConsecutiveFreeBytes()
1008 *usable_size = RoundUp(num_bytes, kAlignment); in AllocationSizeNonvirtual()
1011 *usable_size = RoundUp(num_bytes, kRegionSize); in AllocationSizeNonvirtual()
/art/runtime/arch/x86_64/
Djni_frame_x86_64.h84 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeStubFrameSize()
94 return RoundUp(size, kNativeStackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/runtime/arch/arm64/
Djni_frame_arm64.h78 return RoundUp(size, kAapcs64StackAlignment); in GetCriticalNativeStubFrameSize()
88 return RoundUp(size, kAapcs64StackAlignment); in GetCriticalNativeDirectCallFrameSize()
/art/odrefresh/
Dodr_fs_utils_test.cc142 uint64_t expected_bytes_used = RoundUp(kFirstFileBytes, sb.st_blocks * kBytesPerBlock) + in TEST_F()
143 RoundUp(kSecondFileBytes, sb.st_blocks * kBytesPerBlock); in TEST_F()
151 expected_bytes_used += RoundUp(i, sb.st_blocks * kBytesPerBlock); in TEST_F()
/art/libartbase/base/metrics/
Dmetrics.h260 static_assert(RoundUp(sizeof(*this), sizeof(uint64_t))
261 == RoundUp(sizeof(intptr_t) + sizeof(value_t), sizeof(uint64_t)));
293 static_assert(RoundUp(sizeof(*this), sizeof(uint64_t))
294 == RoundUp(sizeof(intptr_t) + sizeof(value_t) + sizeof(count_t),
345 static_assert(RoundUp(sizeof(*this), sizeof(uint64_t))
346 == RoundUp(sizeof(intptr_t) + sizeof(value_t) * num_buckets_, sizeof(uint64_t)));
400 static_assert(RoundUp(sizeof(*this), sizeof(uint64_t)) ==
401 RoundUp(sizeof(intptr_t) + sizeof(T), sizeof(uint64_t)));
/art/compiler/utils/
Dswap_space.cc110 size = RoundUp(size, 8U); in Alloc()
149 size_t next_part = std::max(RoundUp(min_size, kPageSize), RoundUp(kMininumMapSize, kPageSize)); in NewFileChunk()
177 size = RoundUp(size, 8U); in Free()
/art/runtime/gc/accounting/
Dbitmap.cc48 const size_t bitmap_size = RoundUp( in AllocateMemMap()
49 RoundUp(num_bits, kBitsPerBitmapWord) / kBitsPerBitmapWord * sizeof(uintptr_t), kPageSize); in AllocateMemMap()
/art/dex2oat/linker/arm/
Drelative_patcher_thumb2_test.cc728 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); in TestBakerFieldWide()
794 thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); in TestBakerFieldWide()
826 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArmAlignment); in TestBakerFieldNarrow()
895 thunk_offset += RoundUp(expected_thunk.size(), kArmAlignment); in TestBakerFieldNarrow()
950 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
971 - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArmAlignment) in TEST_F()
972 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArmAlignment) in TEST_F()
1018 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
1025 const uint32_t bne = BneWWithOffset(kLiteralOffset1, RoundUp(raw_code1.size(), kArmAlignment)); in TEST_F()
1048 RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArmAlignment); in TEST_F()
[all …]
/art/compiler/jni/quick/arm/
Dcalling_convention_arm.cc231 gpr_index_ = RoundUp(gpr_index_, 2u) + 2u; in Next()
249 return RoundUp(gpr_index_, 2u) + 1u < kHFCoreArgumentRegistersCount; in IsCurrentParamInRegister()
272 CHECK_EQ(RoundUp(gpr_index_, 2u), 2u); in CurrentParamRegister()
429 return RoundUp(total_size, kStackAlignment); in FrameSize()
450 size_t out_args_size = RoundUp(size, kAapcsStackAlignment); in OutFrameSize()
/art/runtime/mirror/
Darray.h81 size_t data_offset = RoundUp(OFFSETOF_MEMBER(Array, first_element_), component_size); in DataOffset()
82 DCHECK_EQ(RoundUp(data_offset, component_size), data_offset) in DataOffset()
89 constexpr size_t data_offset = RoundUp(kFirstElementOffset, kComponentSize); in DataOffset()
90 static_assert(RoundUp(data_offset, kComponentSize) == data_offset, "RoundUp fail"); in DataOffset()
/art/libartbase/base/
Dbit_vector.h138 return RoundUp(bits, kWordBits) / kWordBits; in BitsToWords()
315 : num_columns_(RoundUp(num_columns, BitVector::kWordBits)), num_rows_(num_rows) {} in BaseBitVectorArray()
366 return rows * RoundUp(cols, BitVector::kWordBits); in RequiredBitVectorSize()
370 return cols != 0 ? bv.GetBitSizeOf() / RoundUp(cols, BitVector::kWordBits) : 0; in MaxRowsFor()
Dscoped_arena_allocator.cc96 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); in AllocWithMemoryTool()
150 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8); in Reset()
Darena_allocator.h272 bytes = RoundUp(bytes, kAlignment);
312 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
318 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
/art/dex2oat/linker/arm64/
Drelative_patcher_arm64_test.cc1068 uint32_t thunk_offset = GetMethodOffset(method_idx) + RoundUp(kMethodCodeSize, kArm64Alignment); in TestBakerField()
1121 thunk_offset += RoundUp(expected_thunk.size(), kArm64Alignment); in TestBakerField()
1158 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1173 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1174 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1218 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1225 const uint32_t cbnz_offset = RoundUp(raw_code1.size(), kArm64Alignment) - kLiteralOffset1; in TEST_F()
1247 1 * MB - RoundUp(raw_code1.size() + sizeof(OatQuickMethodHeader), kArm64Alignment); in TEST_F()
1262 1 * MB - RoundUp(thunk_size + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
1263 - RoundUp(kNopCode.size() + sizeof(OatQuickMethodHeader), kArm64Alignment) in TEST_F()
[all …]
/art/runtime/gc/allocator/
Drosalloc.h550 return RoundUp(size, kThreadLocalBracketQuantumSize); in RoundToBracketSize()
552 return RoundUp(size, kBracketQuantumSize); in RoundToBracketSize()
564 return RoundUp(size, kThreadLocalBracketQuantumSize) / kThreadLocalBracketQuantumSize - 1; in SizeToIndex()
566 return (RoundUp(size, kBracketQuantumSize) - kMaxThreadLocalBracketSize) / kBracketQuantumSize in SizeToIndex()
581 bracket_size = RoundUp(size, kThreadLocalBracketQuantumSize); in SizeToIndexAndBracketSize()
584 bracket_size = RoundUp(size, kBracketQuantumSize); in SizeToIndexAndBracketSize()
875 return RoundUp(bytes, kPageSize); in UsableSize()
Ddlmalloc.cc72 start = reinterpret_cast<void*>(art::RoundUp(reinterpret_cast<uintptr_t>(start), art::kPageSize)); in DlmallocMadviseCallback()
/art/compiler/jni/quick/x86/
Dcalling_convention_x86.cc231 return RoundUp(total_size, kStackAlignment); in FrameSize()
264 size_t out_args_size = RoundUp(size, kNativeStackAlignment); in OutFrameSize()

12345