/system/core/debuggerd/libdebuggerd/include/libdebuggerd/ |
D | types.h | 45 uintptr_t abort_msg_address = 0; 46 uintptr_t fdsan_table_address = 0; 47 uintptr_t gwp_asan_state = 0; 48 uintptr_t gwp_asan_metadata = 0; 49 uintptr_t scudo_stack_depot = 0; 50 uintptr_t scudo_region_info = 0; 51 uintptr_t scudo_ring_buffer = 0; 54 uintptr_t untagged_fault_address = 0; 55 uintptr_t maybe_tagged_fault_address = 0;
|
D | gwp_asan.h | 53 uintptr_t GetFaultAddress() const; 91 uintptr_t crash_address_ = 0u;
|
/system/memory/libmemunreachable/ |
D | HeapWalker.h | 32 uintptr_t begin; 33 uintptr_t end; 77 bool Allocation(uintptr_t begin, uintptr_t end); 78 void Mapping(uintptr_t begin, uintptr_t end); 79 void Root(uintptr_t begin, uintptr_t end); 80 void Root(const allocator::vector<uintptr_t>& vals); 100 bool WordContainsAllocationPtr(uintptr_t ptr, Range* range, AllocationInfo** info); 112 allocator::vector<uintptr_t> root_vals_; 116 volatile uintptr_t walking_ptr_; 124 uintptr_t begin = (range.begin + (sizeof(uintptr_t) - 1)) & ~(sizeof(uintptr_t) - 1); in ForEachPtrInRange() [all …]
|
D | HeapWalker.cpp | 37 static inline uintptr_t UntagAddress(uintptr_t addr) { in UntagAddress() 39 constexpr uintptr_t mask = (static_cast<uintptr_t>(1) << 56) - 1; in UntagAddress() 45 bool HeapWalker::Allocation(uintptr_t begin, uintptr_t end) { in Allocation() 78 static uintptr_t ReadWordAtAddressUnsafe(uintptr_t word_ptr) in ReadWordAtAddressUnsafe() 83 return *reinterpret_cast<uintptr_t*>(word_ptr); in ReadWordAtAddressUnsafe() 86 bool HeapWalker::WordContainsAllocationPtr(uintptr_t word_ptr, Range* range, AllocationInfo** info)… in WordContainsAllocationPtr() 91 uintptr_t value = ReadWordAtAddressUnsafe(word_ptr); in WordContainsAllocationPtr() 122 void HeapWalker::Mapping(uintptr_t begin, uintptr_t end) { in Mapping() 127 void HeapWalker::Root(uintptr_t begin, uintptr_t end) { in Root() 131 void HeapWalker::Root(const allocator::vector<uintptr_t>& vals) { in Root() [all …]
|
D | Binder.cpp | 30 ssize_t __attribute__((weak)) getBinderKernelReferences(size_t, uintptr_t*); 31 ssize_t __attribute__((weak)) getHWBinderKernelReferences(size_t, uintptr_t*); 37 static bool BinderReferencesToVector(allocator::vector<uintptr_t>& refs, in BinderReferencesToVector() 38 std::function<ssize_t(size_t, uintptr_t*)> fn) { in BinderReferencesToVector() 60 bool BinderReferences(allocator::vector<uintptr_t>& refs) { in BinderReferences() 63 allocator::vector<uintptr_t> binder_refs{refs.get_allocator()}; in BinderReferences() 70 allocator::vector<uintptr_t> hwbinder_refs{refs.get_allocator()}; in BinderReferences()
|
D | ProcessMappings.h | 27 uintptr_t begin; 28 uintptr_t end; 35 Mapping(uintptr_t begin, uintptr_t end, bool read, bool write, bool execute, const char* name) in Mapping()
|
D | bionic.h | 29 extern int malloc_iterate(uintptr_t base, size_t size, 30 void (*callback)(uintptr_t base, size_t size, void* arg), void* arg); 31 extern ssize_t malloc_backtrace(void* pointer, uintptr_t* frames, size_t frame_count);
|
D | ThreadCapture.cpp | 180 void* sig_ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(signal)); in PtraceDetach() 215 uintptr_t regs[max_num_regs]; in PtraceThreadInfo() 225 unsigned int num_regs = iovec.iov_len / sizeof(uintptr_t); in PtraceThreadInfo() 230 offsetof(struct pt_regs, rsp) / sizeof(uintptr_t) in PtraceThreadInfo() 232 offsetof(struct pt_regs, esp) / sizeof(uintptr_t) in PtraceThreadInfo() 234 offsetof(struct pt_regs, ARM_sp) / sizeof(uintptr_t) in PtraceThreadInfo() 236 offsetof(struct user_pt_regs, sp) / sizeof(uintptr_t) in PtraceThreadInfo() 238 offsetof(struct pt_regs, regs[29]) / sizeof(uintptr_t) in PtraceThreadInfo() 246 thread_info.stack = std::pair<uintptr_t, uintptr_t>(regs[sp], 0); in PtraceThreadInfo() 327 … ThreadInfo t{0, allocator::vector<uintptr_t>(allocator_), std::pair<uintptr_t, uintptr_t>(0, 0)}; in CapturedThreadInfo()
|
D | ThreadCapture.h | 28 allocator::vector<uintptr_t> regs; 29 std::pair<uintptr_t, uintptr_t> stack;
|
/system/memory/libmemunreachable/tests/ |
D | LeakFolding_test.cpp | 42 #define buffer_begin(buffer) reinterpret_cast<uintptr_t>(&(buffer)[0]) 43 #define buffer_end(buffer) (reinterpret_cast<uintptr_t>(&(buffer)[0]) + sizeof(buffer)) 64 EXPECT_EQ(sizeof(uintptr_t), leaked_bytes); in TEST_F() 89 EXPECT_EQ(2 * sizeof(uintptr_t), leaked_bytes); in TEST_F() 118 EXPECT_EQ(2 * sizeof(uintptr_t), leaked_bytes); in TEST_F() 121 EXPECT_EQ(sizeof(uintptr_t), leaked[0].referenced_size); in TEST_F() 149 EXPECT_EQ(3 * sizeof(uintptr_t), leaked_bytes); in TEST_F() 152 EXPECT_EQ(2 * sizeof(uintptr_t), leaked[0].referenced_size); in TEST_F() 180 EXPECT_EQ(5 * sizeof(uintptr_t), leaked_bytes); in TEST_F() 184 EXPECT_EQ(3 * sizeof(uintptr_t), leaked[0].referenced_size); in TEST_F() [all …]
|
/system/core/debuggerd/ |
D | protocol.h | 91 uintptr_t abort_msg_address; 95 uintptr_t fdsan_table_address; 96 uintptr_t gwp_asan_state; 97 uintptr_t gwp_asan_metadata; 98 uintptr_t scudo_stack_depot; 99 uintptr_t scudo_region_info; 100 uintptr_t scudo_ring_buffer;
|
/system/extras/memory_replay/ |
D | Pointers.h | 32 void Add(uintptr_t key_pointer, void* pointer); 34 void* Remove(uintptr_t key_pointer); 41 pointer_data* FindEmpty(uintptr_t key_pointer); 42 pointer_data* Find(uintptr_t key_pointer); 43 size_t GetHash(uintptr_t key_pointer);
|
D | Pointers.cpp | 52 void Pointers::Add(uintptr_t key_pointer, void* pointer) { in Add() 61 void* Pointers::Remove(uintptr_t key_pointer) { in Remove() 72 atomic_store(&data->key_pointer, uintptr_t(0)); in Remove() 77 Pointers::pointer_data* Pointers::Find(uintptr_t key_pointer) { in Find() 90 Pointers::pointer_data* Pointers::FindEmpty(uintptr_t key_pointer) { in FindEmpty() 93 uintptr_t empty = 0; in FindEmpty() 95 uintptr_t(1))) { in FindEmpty() 105 size_t Pointers::GetHash(uintptr_t key_pointer) { in GetHash()
|
/system/core/trusty/coverage/ |
D | coverage.cpp | 46 static inline uintptr_t RoundPageUp(uintptr_t val) { in RoundPageUp() 193 volatile uintptr_t* begin = nullptr; in ResetPCs() 194 volatile uintptr_t* end = nullptr; in ResetPCs() 197 for (volatile uintptr_t* x = begin; x < end; x++) { in ResetPCs() 244 void CoverageRecord::GetRawPCs(volatile uintptr_t** begin, volatile uintptr_t** end) { in GetRawPCs() 254 *begin = (volatile uintptr_t*)((volatile uint8_t*)shm_ + region->first); in GetRawPCs() 255 *end = (volatile uintptr_t*)((volatile uint8_t*)shm_ + region->second); in GetRawPCs() 282 if (sizeof(uintptr_t) == 8) { in SaveSancovFile() 284 } else if (sizeof(uintptr_t) == 4) { in SaveSancovFile() 289 volatile uintptr_t* begin = nullptr; in SaveSancovFile() [all …]
|
/system/unwinding/libunwindstack/tests/ |
D | MemoryMteTest.cpp | 32 static uintptr_t CreateTagMapping() { in CreateTagMapping() 34 uintptr_t mapping = in CreateTagMapping() 35 reinterpret_cast<uintptr_t>(mmap(nullptr, getpagesize(), PROT_READ | PROT_WRITE | PROT_MTE, in CreateTagMapping() 59 uintptr_t mapping = CreateTagMapping(); in TEST() 90 uintptr_t mapping = CreateTagMapping(); in TEST()
|
/system/core/libutils/include/utils/ |
D | Flattenable.h | 46 uintptr_t b = uintptr_t(buffer); in align() 47 buffer = reinterpret_cast<void*>((uintptr_t(buffer) + (N-1)) & ~(N-1)); in align() 48 return size_t(uintptr_t(buffer) - b); in align() 55 buffer = reinterpret_cast<void*>((uintptr_t(buffer) + (N-1)) & ~(N-1)); in align() 56 size_t delta = size_t(uintptr_t(buffer) - uintptr_t(b)); in align() 62 buffer = reinterpret_cast<void*>( uintptr_t(buffer) + offset ); in advance() 67 buffer = reinterpret_cast<void const*>( uintptr_t(buffer) + offset ); in advance()
|
/system/core/debuggerd/libdebuggerd/ |
D | gwp_asan.cpp | 33 static bool retrieve_gwp_asan_state(unwindstack::Memory* process_memory, uintptr_t state_addr, in retrieve_gwp_asan_state() 45 uintptr_t metadata_addr) { in retrieve_gwp_asan_metadata() 89 uintptr_t internal_crash_address = __gwp_asan_get_internal_crash_address(&state_); in GwpAsanCrashData() 142 std::unique_ptr<uintptr_t[]> frames(new uintptr_t[kMaxTraceLength]); in AddCauseProtos() 183 uintptr_t alloc_address = __gwp_asan_get_allocation_address(responsible_allocation_); in DumpCause() 186 uintptr_t diff; in DumpCause() 229 std::unique_ptr<uintptr_t[]> frames(new uintptr_t[kMaxTraceLength]); in DumpDeallocationTrace() 256 std::unique_ptr<uintptr_t[]> frames(new uintptr_t[kMaxTraceLength]); in DumpAllocationTrace()
|
D | utility.cpp | 152 if (bytes % sizeof(uintptr_t) != 0) { in dump_memory() 154 ALOGE("Bytes read %zu, is not a multiple of %zu", bytes, sizeof(uintptr_t)); in dump_memory() 155 bytes &= ~(sizeof(uintptr_t) - 1); in dump_memory() 180 if (bytes2 > 0 && bytes % sizeof(uintptr_t) != 0) { in dump_memory() 182 ALOGE("Bytes after second read %zu, is not a multiple of %zu", bytes, sizeof(uintptr_t)); in dump_memory() 183 bytes &= ~(sizeof(uintptr_t) - 1); in dump_memory() 207 uintptr_t data[MEMORY_BYTES_TO_DUMP / sizeof(uintptr_t)]; in dump_memory() 223 uintptr_t* data_ptr = data; in dump_memory() 232 for (size_t i = 0; i < MEMORY_BYTES_PER_LINE / sizeof(uintptr_t); i++) { in dump_memory() 237 for (size_t val = 0; val < sizeof(uintptr_t); val++, ptr++) { in dump_memory()
|
/system/chre/util/include/chre/util/ |
D | memory_pool_impl.h | 54 uintptr_t elementAddress = reinterpret_cast<uintptr_t>(element); in deallocate() 55 uintptr_t baseAddress = reinterpret_cast<uintptr_t>(&blocks()[0].mElement); in deallocate()
|
/system/chre/host/msm/daemon/generated/ |
D | chre_slpi_skel.c | 214 if((_ALIGN_SIZE((uintptr_t)me->stackEnd, al) + size) < (uintptr_t)me->stack + me->nSize) { in _allocator_alloc() 215 *ppbuf = (uint8_t*)_ALIGN_SIZE((uintptr_t)me->stackEnd, al); in _allocator_alloc() 216 me->stackEnd = (uint8_t*)_ALIGN_SIZE((uintptr_t)me->stackEnd, al) + size; in _allocator_alloc() 293 const uintptr_t p1;\ 294 const uintptr_t p2;\ 390 #define SLIM_IFPTR32(is32,is64) (sizeof(uintptr_t) == 4 ? (is32) : (is64)) 434 static const Type types[1] = {{0x1,{{(const uintptr_t)0,(const uintptr_t)1}}, 2,0x1}}; 435 …uintptr_t)&(types[0]),(const uintptr_t)0x0}}, 9,SLIM_IFPTR32(0x4,0x8),3,0},{0x4,{{(const uintptr_t…
|
D | chre_slpi_stub.c | 214 if((_ALIGN_SIZE((uintptr_t)me->stackEnd, al) + size) < (uintptr_t)me->stack + me->nSize) { in _allocator_alloc() 215 *ppbuf = (uint8_t*)_ALIGN_SIZE((uintptr_t)me->stackEnd, al); in _allocator_alloc() 216 me->stackEnd = (uint8_t*)_ALIGN_SIZE((uintptr_t)me->stackEnd, al) + size; in _allocator_alloc() 293 const uintptr_t p1;\ 294 const uintptr_t p2;\ 390 #define SLIM_IFPTR32(is32,is64) (sizeof(uintptr_t) == 4 ? (is32) : (is64)) 434 static const Type types[1] = {{0x1,{{(const uintptr_t)0,(const uintptr_t)1}}, 2,0x1}}; 435 …uintptr_t)&(types[0]),(const uintptr_t)0x0}}, 9,SLIM_IFPTR32(0x4,0x8),3,0},{0x4,{{(const uintptr_t…
|
/system/bt/osi/include/ |
D | osi.h | 43 #define PTR_TO_UINT(p) ((unsigned int)((uintptr_t)(p))) 44 #define UINT_TO_PTR(u) ((void*)((uintptr_t)(u)))
|
/system/logging/logwrapper/ |
D | logwrapper.cpp | 90 uintptr_t fault_address = (uintptr_t)status; in main()
|
/system/chre/platform/shared/ |
D | nanoapp_loader.cc | 370 uintptr_t initArray = reinterpret_cast<uintptr_t>( in callInitArray() 372 uintptr_t offset = 0; in callInitArray() 375 uintptr_t initFunction = reinterpret_cast<uintptr_t>(*funcPtr); in callInitArray() 393 uintptr_t NanoappLoader::roundDownToAlign(uintptr_t virtualAddr) { in roundDownToAlign() 689 uintptr_t adjustedFirstLoadSegAddr = roundDownToAlign(first->p_vaddr); in createMappings() 691 reinterpret_cast<uintptr_t>(mMapping) - adjustedFirstLoadSegAddr; in createMappings() 817 *addr += reinterpret_cast<uintptr_t>(mMapping); in fixRelocations() 827 *addr = reinterpret_cast<uintptr_t>(mMapping + sym->st_value); in fixRelocations() 917 uintptr_t finiArray = reinterpret_cast<uintptr_t>( in callTerminatorArray() 919 uintptr_t offset = 0; in callTerminatorArray() [all …]
|
/system/extras/kexec_tools/ |
D | kexecload.c | 137 segment[0].mem = (void*)((uintptr_t)start_address + KEXEC_ARM_ZIMAGE_OFFSET); in main() 142 segment[1].mem = (void*)((uintptr_t)start_address + KEXEC_ARM_ATAGS_OFFSET); in main() 145 rv = kexec_load(((uintptr_t)start_address + KEXEC_ARM_ZIMAGE_OFFSET), 2, (void*)segment, in main()
|