1 /******************************************************************************/
2 #ifdef JEMALLOC_H_TYPES
3 
4 /*
5  * Size and alignment of memory chunks that are allocated by the OS's virtual
6  * memory system.
7  */
8 #define	LG_CHUNK_DEFAULT	18
9 
10 /* Return the chunk address for allocation address a. */
11 #define	CHUNK_ADDR2BASE(a)						\
12 	((void *)((uintptr_t)(a) & ~chunksize_mask))
13 
14 /* Return the chunk offset of address a. */
15 #define	CHUNK_ADDR2OFFSET(a)						\
16 	((size_t)((uintptr_t)(a) & chunksize_mask))
17 
18 /* Return the smallest chunk multiple that is >= s. */
19 #define	CHUNK_CEILING(s)						\
20 	(((s) + chunksize_mask) & ~chunksize_mask)
21 
22 #endif /* JEMALLOC_H_TYPES */
23 /******************************************************************************/
24 #ifdef JEMALLOC_H_STRUCTS
25 
26 #endif /* JEMALLOC_H_STRUCTS */
27 /******************************************************************************/
28 #ifdef JEMALLOC_H_EXTERNS
29 
30 extern size_t		opt_lg_chunk;
31 extern const char	*opt_dss;
32 
33 extern rtree_t		chunks_rtree;
34 
35 extern size_t		chunksize;
36 extern size_t		chunksize_mask; /* (chunksize - 1). */
37 extern size_t		chunk_npages;
38 
39 bool	chunk_register(const void *chunk, const extent_node_t *node);
40 void	chunk_deregister(const void *chunk, const extent_node_t *node);
41 void	*chunk_alloc_base(size_t size);
42 void	*chunk_alloc_cache(arena_t *arena, void *new_addr, size_t size,
43     size_t alignment, bool *zero, bool dalloc_node);
44 void	*chunk_alloc_default(void *new_addr, size_t size, size_t alignment,
45     bool *zero, unsigned arena_ind);
46 void	*chunk_alloc_wrapper(arena_t *arena, chunk_alloc_t *chunk_alloc,
47     void *new_addr, size_t size, size_t alignment, bool *zero);
48 void	chunk_record(arena_t *arena, extent_tree_t *chunks_szad,
49     extent_tree_t *chunks_ad, bool cache, void *chunk, size_t size,
50     bool zeroed);
51 void	chunk_dalloc_cache(arena_t *arena, void *chunk, size_t size);
52 void	chunk_dalloc_arena(arena_t *arena, void *chunk, size_t size,
53     bool zeroed);
54 bool	chunk_dalloc_default(void *chunk, size_t size, unsigned arena_ind);
55 void	chunk_dalloc_wrapper(arena_t *arena, chunk_dalloc_t *chunk_dalloc,
56     void *chunk, size_t size);
57 bool	chunk_purge_arena(arena_t *arena, void *chunk, size_t offset,
58     size_t length);
59 bool	chunk_purge_default(void *chunk, size_t offset, size_t length,
60     unsigned arena_ind);
61 bool	chunk_purge_wrapper(arena_t *arena, chunk_purge_t *chunk_purge,
62     void *chunk, size_t offset, size_t length);
63 bool	chunk_boot(void);
64 void	chunk_prefork(void);
65 void	chunk_postfork_parent(void);
66 void	chunk_postfork_child(void);
67 
68 #endif /* JEMALLOC_H_EXTERNS */
69 /******************************************************************************/
70 #ifdef JEMALLOC_H_INLINES
71 
72 #ifndef JEMALLOC_ENABLE_INLINE
73 extent_node_t	*chunk_lookup(const void *chunk, bool dependent);
74 #endif
75 
76 #if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_CHUNK_C_))
77 JEMALLOC_INLINE extent_node_t *
chunk_lookup(const void * ptr,bool dependent)78 chunk_lookup(const void *ptr, bool dependent)
79 {
80 
81 	return (rtree_get(&chunks_rtree, (uintptr_t)ptr, dependent));
82 }
83 #endif
84 
85 #endif /* JEMALLOC_H_INLINES */
86 /******************************************************************************/
87 
88 #include "jemalloc/internal/chunk_dss.h"
89 #include "jemalloc/internal/chunk_mmap.h"
90