Home
last modified time | relevance | path

Searched refs:ptr (Results 1 – 25 of 99) sorted by relevance

1234

/art/runtime/
Dleb128.h32 const uint8_t* ptr = *data; in DecodeUnsignedLeb128() local
33 int result = *(ptr++); in DecodeUnsignedLeb128()
35 int cur = *(ptr++); in DecodeUnsignedLeb128()
38 cur = *(ptr++); in DecodeUnsignedLeb128()
41 cur = *(ptr++); in DecodeUnsignedLeb128()
46 cur = *(ptr++); in DecodeUnsignedLeb128()
52 *data = ptr; in DecodeUnsignedLeb128()
59 const uint8_t* ptr = *data; in DecodeUnsignedLeb128Checked() local
60 if (ptr >= end) { in DecodeUnsignedLeb128Checked()
63 int result = *(ptr++); in DecodeUnsignedLeb128Checked()
[all …]
Dmethod_info.h33 explicit MethodInfo(const uint8_t* ptr) { in MethodInfo() argument
34 if (ptr != nullptr) { in MethodInfo()
35 num_method_indices_ = DecodeUnsignedLeb128(&ptr); in MethodInfo()
36 region_ = MemoryRegion(const_cast<uint8_t*>(ptr), in MethodInfo()
42 MethodInfo(uint8_t* ptr, size_t num_method_indices) : num_method_indices_(num_method_indices) { in MethodInfo() argument
43 DCHECK(ptr != nullptr); in MethodInfo()
44 ptr = EncodeUnsignedLeb128(ptr, num_method_indices_); in MethodInfo()
45 region_ = MemoryRegion(ptr, num_method_indices_ * sizeof(MethodIndexType)); in MethodInfo()
50 uint8_t* ptr = temp; in ComputeSize() local
51 ptr = EncodeUnsignedLeb128(ptr, num_method_indices); in ComputeSize()
[all …]
Dobj_ptr.h58 ALWAYS_INLINE ObjPtr(Type* ptr) // NOLINT in ObjPtr() argument
60 : reference_(Encode(static_cast<MirrorType*>(ptr))) { in ObjPtr()
78 ALWAYS_INLINE ObjPtr& operator=(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { in REQUIRES_SHARED()
79 Assign(ptr); in REQUIRES_SHARED()
83 ALWAYS_INLINE void Assign(MirrorType* ptr) REQUIRES_SHARED(Locks::mutator_lock_) { in Assign() argument
84 reference_ = Encode(ptr); in Assign()
105 ALWAYS_INLINE bool operator==(const ObjPtr& ptr) const REQUIRES_SHARED(Locks::mutator_lock_) {
106 return Ptr() == ptr.Ptr();
110 ALWAYS_INLINE bool operator==(const PointerType* ptr) const
112 return Ptr() == ptr;
[all …]
Dlinear_alloc.cc26 void* LinearAlloc::Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) { in Realloc() argument
28 return allocator_.Realloc(ptr, old_size, new_size); in Realloc()
51 bool LinearAlloc::Contains(void* ptr) const { in Contains()
53 return allocator_.Contains(ptr); in Contains()
56 bool LinearAlloc::ContainsUnsafe(void* ptr) const { in ContainsUnsafe()
57 return allocator_.Contains(ptr); in ContainsUnsafe()
Dutf_test.cc61 const char* ptr = start; in TEST_F() local
65 pair = GetUtf16FromUtf8(&ptr); in TEST_F()
68 EXPECT_ARRAY_POSITION(1, ptr, start); in TEST_F()
71 pair = GetUtf16FromUtf8(&ptr); in TEST_F()
74 EXPECT_ARRAY_POSITION(3, ptr, start); in TEST_F()
77 pair = GetUtf16FromUtf8(&ptr); in TEST_F()
80 EXPECT_ARRAY_POSITION(6, ptr, start); in TEST_F()
83 pair = GetUtf16FromUtf8(&ptr); in TEST_F()
86 EXPECT_ARRAY_POSITION(10, ptr, start); in TEST_F()
89 pair = GetUtf16FromUtf8(&ptr); in TEST_F()
[all …]
Dimtable.h46 uint8_t* ptr = AddressOfElement(index, pointer_size); in Get() local
48 uint32_t value = *reinterpret_cast<uint32_t*>(ptr); in Get()
51 uint64_t value = *reinterpret_cast<uint64_t*>(ptr); in Get()
58 uint8_t* ptr = AddressOfElement(index, pointer_size); in Set() local
62 *reinterpret_cast<uint32_t*>(ptr) = static_cast<uint32_t>(value); in Set()
64 *reinterpret_cast<uint64_t*>(ptr) = reinterpret_cast<uint64_t>(method); in Set()
Dobj_ptr-inl.h42 inline uintptr_t ObjPtr<MirrorType>::Encode(MirrorType* ptr) { in Encode() argument
43 uintptr_t ref = reinterpret_cast<uintptr_t>(ptr); in Encode()
57 inline std::ostream& operator<<(std::ostream& os, ObjPtr<MirrorType> ptr) {
59 return os << ptr.PtrUnchecked();
Dlinear_alloc.h35 void* Realloc(Thread* self, void* ptr, size_t old_size, size_t new_size) REQUIRES(!lock_);
49 bool Contains(void* ptr) const REQUIRES(!lock_);
53 bool ContainsUnsafe(void* ptr) const NO_THREAD_SAFETY_ANALYSIS;
Doat.cc409 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); in GetStoreValueByKey() local
410 const char* end = ptr + key_value_store_size_; in GetStoreValueByKey()
412 while (ptr < end) { in GetStoreValueByKey()
414 const char* str_end = ParseString(ptr, end); in GetStoreValueByKey()
416 if (strcmp(key, ptr) == 0) { in GetStoreValueByKey()
423 ptr = ParseString(str_end + 1, end) + 1; in GetStoreValueByKey()
435 const char* ptr = reinterpret_cast<const char*>(&key_value_store_); in GetStoreKeyValuePairByIndex() local
436 const char* end = ptr + key_value_store_size_; in GetStoreKeyValuePairByIndex()
439 while (ptr < end && counter >= 0) { in GetStoreKeyValuePairByIndex()
441 const char* str_end = ParseString(ptr, end); in GetStoreKeyValuePairByIndex()
[all …]
Dmem_map.cc141 bool MemMap::ContainedWithinExistingMap(uint8_t* ptr, size_t size, std::string* error_msg) { in ContainedWithinExistingMap() argument
142 uintptr_t begin = reinterpret_cast<uintptr_t>(ptr); in ContainedWithinExistingMap()
264 static inline void* TryMemMapLow4GB(void* ptr, in TryMemMapLow4GB() argument
270 void* actual = mmap(ptr, page_aligned_byte_count, prot, flags, fd, offset); in TryMemMapLow4GB()
840 for (uintptr_t ptr = next_mem_pos_; ptr < 4 * GB; ptr += kPageSize) { in MapInternal() local
843 auto it = gMaps->upper_bound(reinterpret_cast<void*>(ptr)); in MapInternal()
848 ptr = std::max(ptr, reinterpret_cast<uintptr_t>(before_it->second->BaseEnd())); in MapInternal()
849 CHECK_ALIGNED(ptr, kPageSize); in MapInternal()
853 size_t delta = reinterpret_cast<uintptr_t>(it->first) - ptr; in MapInternal()
859 ptr = reinterpret_cast<uintptr_t>(it->second->BaseEnd()); in MapInternal()
[all …]
Dstack_map.h762 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this); in Encode() local
763 dest->insert(dest->end(), ptr, ptr + sizeof(*this)); in Encode()
767 void Decode(const uint8_t** ptr) { in Decode() argument
768 *this = *reinterpret_cast<const StackMapEncoding*>(*ptr); in Decode()
769 *ptr += sizeof(*this); in Decode()
942 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this); in Encode() local
943 dest->insert(dest->end(), ptr, ptr + sizeof(*this)); in Encode()
947 void Decode(const uint8_t** ptr) { in Decode() argument
948 *this = *reinterpret_cast<const InlineInfoEncoding*>(*ptr); in Decode()
949 *ptr += sizeof(*this); in Decode()
[all …]
/art/runtime/base/
Dscoped_arena_allocator.h63 static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { in ArenaTagForAllocation() argument
65 return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1); in ArenaTagForAllocation()
95 uint8_t* ptr = top_ptr_; in Alloc() local
96 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { in Alloc()
97 ptr = AllocateFromNextArena(rounded_bytes); in Alloc()
100 top_ptr_ = ptr + rounded_bytes; in Alloc()
102 ptr += kAlignment; in Alloc()
103 ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; in Alloc()
105 return ptr; in Alloc()
169 static void operator delete(void* ptr ATTRIBUTE_UNUSED) {} in delete()
Darena_allocator.h174 void MakeDefined(void* ptr, size_t size) { in MakeDefined() argument
176 DoMakeDefined(ptr, size); in MakeDefined()
179 void MakeUndefined(void* ptr, size_t size) { in MakeUndefined() argument
181 DoMakeUndefined(ptr, size); in MakeUndefined()
184 void MakeInaccessible(void* ptr, size_t size) { in MakeInaccessible() argument
186 DoMakeInaccessible(ptr, size); in MakeInaccessible()
191 void DoMakeDefined(void* ptr, size_t size);
192 void DoMakeUndefined(void* ptr, size_t size);
193 void DoMakeInaccessible(void* ptr, size_t size);
226 bool Contains(const void* ptr) const { in Contains() argument
[all …]
Dscoped_arena_allocator.cc98 uint8_t* ptr = top_ptr_; in AllocWithMemoryTool() local
99 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { in AllocWithMemoryTool()
100 ptr = AllocateFromNextArena(rounded_bytes); in AllocWithMemoryTool()
101 CHECK(ptr != nullptr) << "Failed to allocate memory"; in AllocWithMemoryTool()
102 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr); in AllocWithMemoryTool()
105 top_ptr_ = ptr + rounded_bytes; in AllocWithMemoryTool()
106 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes); in AllocWithMemoryTool()
107 return ptr; in AllocWithMemoryTool()
Dscoped_arena_containers.h207 ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const { in ProtectMemory() argument
211 memset(ptr, kMagicFill, size); in ProtectMemory()
212 MEMORY_TOOL_MAKE_NOACCESS(ptr, size); in ProtectMemory()
214 CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed) in ProtectMemory()
215 << "Freeing invalid object " << ptr; in ProtectMemory()
216 ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree; in ProtectMemory()
218 memset(ptr, kMagicFill, size); in ProtectMemory()
223 void operator()(T* ptr) const { in operator()
224 if (ptr != nullptr) { in operator()
225 ptr->~T(); in operator()
[all …]
Dhash_set.h211 HashSet(const uint8_t* ptr, bool make_copy_of_data, size_t* read_count) noexcept { in HashSet() argument
214 offset = ReadFromBytes(ptr, offset, &temp); in HashSet()
216 offset = ReadFromBytes(ptr, offset, &temp); in HashSet()
219 offset = ReadFromBytes(ptr, offset, &temp); in HashSet()
221 offset = ReadFromBytes(ptr, offset, &min_load_factor_); in HashSet()
222 offset = ReadFromBytes(ptr, offset, &max_load_factor_); in HashSet()
225 data_ = const_cast<T*>(reinterpret_cast<const T*>(ptr + offset)); in HashSet()
232 offset = ReadFromBytes(ptr, offset, &data_[i]); in HashSet()
241 size_t WriteToMemory(uint8_t* ptr) const { in WriteToMemory() argument
243 offset = WriteToBytes(ptr, offset, static_cast<uint64_t>(num_elements_)); in WriteToMemory()
[all …]
Dvariant_map.h235 auto* ptr = Get(key); in GetOrDefault() local
236 return (ptr == nullptr) ? key.CreateDefaultValue() : *ptr; in GetOrDefault()
259 TValue* ptr = Get(key); in ReleaseOrDefault() local
260 if (ptr != nullptr) { in ReleaseOrDefault()
261 return std::move(*ptr); in ReleaseOrDefault()
289 TValue* ptr = Get(key); in SetIfMissing() local
290 if (ptr == nullptr) { in SetIfMissing()
Darena_allocator.cc166 void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) { in DoMakeDefined() argument
167 MEMORY_TOOL_MAKE_DEFINED(ptr, size); in DoMakeDefined()
170 void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) { in DoMakeUndefined() argument
171 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size); in DoMakeUndefined()
174 void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) { in DoMakeInaccessible() argument
175 MEMORY_TOOL_MAKE_NOACCESS(ptr, size); in DoMakeInaccessible()
480 bool ArenaAllocator::Contains(const void* ptr) const { in Contains()
481 if (ptr >= begin_ && ptr < end_) { in Contains()
485 if (cur_arena->Contains(ptr)) { in Contains()
/art/compiler/utils/
Dswap_space.cc74 if (munmap(chunk.ptr, chunk.size) != 0) { in ~SwapSpace()
76 << static_cast<const void*>(chunk.ptr) << " size=" << chunk.size; in ~SwapSpace()
127 it->free_by_start_entry->ptr += size; in Alloc()
148 return old_chunk.ptr; in Alloc()
154 SpaceChunk remainder = { new_chunk.ptr + size, new_chunk.size - size }; in Alloc()
157 return new_chunk.ptr; in Alloc()
168 uint8_t* ptr = reinterpret_cast<uint8_t*>( in NewFileChunk() local
170 if (ptr == MAP_FAILED) { in NewFileChunk()
179 SpaceChunk new_chunk = {ptr, next_part}; in NewFileChunk()
189 void SwapSpace::Free(void* ptr, size_t size) { in Free() argument
[all …]
Dswap_space.h39 void Free(void* ptr, size_t size) REQUIRES(!lock_);
50 mutable uint8_t* ptr; member
54 return reinterpret_cast<uintptr_t>(ptr); in Start()
57 return reinterpret_cast<uintptr_t>(ptr) + size; in End()
64 return reinterpret_cast<uintptr_t>(a.ptr) < reinterpret_cast<uintptr_t>(b.ptr); in operator()
/art/runtime/mirror/
Dobject_reference-inl.h28 void ObjectReference<kPoisonReferences, MirrorType>::Assign(ObjPtr<MirrorType> ptr) { in Assign() argument
29 Assign(ptr.Ptr()); in Assign()
33 HeapReference<MirrorType> HeapReference<MirrorType>::FromObjPtr(ObjPtr<MirrorType> ptr) { in FromObjPtr() argument
34 return HeapReference<MirrorType>(ptr.Ptr()); in FromObjPtr()
/art/test/ti-agent/
Dscoped_local_ref.h38 void reset(T ptr = nullptr) {
39 if (ptr != mLocalRef) {
43 mLocalRef = ptr;
/art/runtime/arch/arm/
Dfault_handler_arm.cc77 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); in GetMethodAndReturnPcAndSp() local
78 VLOG(signals) << "pc: " << std::hex << static_cast<void*>(ptr); in GetMethodAndReturnPcAndSp()
80 if (ptr == nullptr) { in GetMethodAndReturnPcAndSp()
86 uint32_t instr_size = GetInstructionSize(ptr); in GetMethodAndReturnPcAndSp()
104 uint8_t* ptr = reinterpret_cast<uint8_t*>(sc->arm_pc); in Action() local
105 uint32_t instr_size = GetInstructionSize(ptr); in Action()
/art/compiler/debug/dwarf/
Dwriter.h117 void PushData(const uint8_t* ptr, size_t num_bytes) { in PushData() argument
118 data_->insert(data_->end(), ptr, ptr + num_bytes); in PushData()
121 void PushData(const char* ptr, size_t num_bytes) { in PushData() argument
122 data_->insert(data_->end(), ptr, ptr + num_bytes); in PushData()
/art/runtime/gc/space/
Ddlmalloc_space.cc167 size_t DlMallocSpace::Free(Thread* self, mirror::Object* ptr) { in Free() argument
170 CHECK(ptr != nullptr); in Free()
171 CHECK(Contains(ptr)) << "Free (" << ptr << ") not in bounds of heap " << *this; in Free()
173 const size_t bytes_freed = AllocationSizeNonvirtual(ptr, nullptr); in Free()
175 RegisterRecentFree(ptr); in Free()
177 mspace_free(mspace_, ptr); in Free()
187 mirror::Object* ptr = ptrs[i]; in FreeList() local
193 bytes_freed += AllocationSizeNonvirtual(ptr, nullptr); in FreeList()

1234