1 #include "test/jemalloc_test.h"
2 
3 #ifdef JEMALLOC_PROF
4 const char *malloc_conf =
5     "prof:true,prof_active:false,lg_prof_sample:0";
6 #endif
7 
8 static int
prof_dump_open_intercept(bool propagate_err,const char * filename)9 prof_dump_open_intercept(bool propagate_err, const char *filename)
10 {
11 	int fd;
12 
13 	fd = open("/dev/null", O_WRONLY);
14 	assert_d_ne(fd, -1, "Unexpected open() failure");
15 
16 	return (fd);
17 }
18 
19 static void
set_prof_active(bool active)20 set_prof_active(bool active)
21 {
22 
23 	assert_d_eq(mallctl("prof.active", NULL, NULL, (void *)&active,
24 	    sizeof(active)), 0, "Unexpected mallctl failure");
25 }
26 
27 static size_t
get_lg_prof_sample(void)28 get_lg_prof_sample(void)
29 {
30 	size_t lg_prof_sample;
31 	size_t sz = sizeof(size_t);
32 
33 	assert_d_eq(mallctl("prof.lg_sample", (void *)&lg_prof_sample, &sz,
34 	    NULL, 0), 0,
35 	    "Unexpected mallctl failure while reading profiling sample rate");
36 	return (lg_prof_sample);
37 }
38 
39 static void
do_prof_reset(size_t lg_prof_sample)40 do_prof_reset(size_t lg_prof_sample)
41 {
42 	assert_d_eq(mallctl("prof.reset", NULL, NULL,
43 	    (void *)&lg_prof_sample, sizeof(size_t)), 0,
44 	    "Unexpected mallctl failure while resetting profile data");
45 	assert_zu_eq(lg_prof_sample, get_lg_prof_sample(),
46 	    "Expected profile sample rate change");
47 }
48 
TEST_BEGIN(test_prof_reset_basic)49 TEST_BEGIN(test_prof_reset_basic)
50 {
51 	size_t lg_prof_sample_orig, lg_prof_sample, lg_prof_sample_next;
52 	size_t sz;
53 	unsigned i;
54 
55 	test_skip_if(!config_prof);
56 
57 	sz = sizeof(size_t);
58 	assert_d_eq(mallctl("opt.lg_prof_sample", (void *)&lg_prof_sample_orig,
59 	    &sz, NULL, 0), 0,
60 	    "Unexpected mallctl failure while reading profiling sample rate");
61 	assert_zu_eq(lg_prof_sample_orig, 0,
62 	    "Unexpected profiling sample rate");
63 	lg_prof_sample = get_lg_prof_sample();
64 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
65 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
66 	    "\"prof.lg_sample\"");
67 
68 	/* Test simple resets. */
69 	for (i = 0; i < 2; i++) {
70 		assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
71 		    "Unexpected mallctl failure while resetting profile data");
72 		lg_prof_sample = get_lg_prof_sample();
73 		assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
74 		    "Unexpected profile sample rate change");
75 	}
76 
77 	/* Test resets with prof.lg_sample changes. */
78 	lg_prof_sample_next = 1;
79 	for (i = 0; i < 2; i++) {
80 		do_prof_reset(lg_prof_sample_next);
81 		lg_prof_sample = get_lg_prof_sample();
82 		assert_zu_eq(lg_prof_sample, lg_prof_sample_next,
83 		    "Expected profile sample rate change");
84 		lg_prof_sample_next = lg_prof_sample_orig;
85 	}
86 
87 	/* Make sure the test code restored prof.lg_sample. */
88 	lg_prof_sample = get_lg_prof_sample();
89 	assert_zu_eq(lg_prof_sample_orig, lg_prof_sample,
90 	    "Unexpected disagreement between \"opt.lg_prof_sample\" and "
91 	    "\"prof.lg_sample\"");
92 }
93 TEST_END
94 
95 bool prof_dump_header_intercepted = false;
96 prof_cnt_t cnt_all_copy = {0, 0, 0, 0};
97 static bool
prof_dump_header_intercept(tsdn_t * tsdn,bool propagate_err,const prof_cnt_t * cnt_all)98 prof_dump_header_intercept(tsdn_t *tsdn, bool propagate_err,
99     const prof_cnt_t *cnt_all)
100 {
101 
102 	prof_dump_header_intercepted = true;
103 	memcpy(&cnt_all_copy, cnt_all, sizeof(prof_cnt_t));
104 
105 	return (false);
106 }
107 
TEST_BEGIN(test_prof_reset_cleanup)108 TEST_BEGIN(test_prof_reset_cleanup)
109 {
110 	void *p;
111 	prof_dump_header_t *prof_dump_header_orig;
112 
113 	test_skip_if(!config_prof);
114 
115 	set_prof_active(true);
116 
117 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
118 	p = mallocx(1, 0);
119 	assert_ptr_not_null(p, "Unexpected mallocx() failure");
120 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
121 
122 	prof_dump_header_orig = prof_dump_header;
123 	prof_dump_header = prof_dump_header_intercept;
124 	assert_false(prof_dump_header_intercepted, "Unexpected intercept");
125 
126 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
127 	    0, "Unexpected error while dumping heap profile");
128 	assert_true(prof_dump_header_intercepted, "Expected intercept");
129 	assert_u64_eq(cnt_all_copy.curobjs, 1, "Expected 1 allocation");
130 
131 	assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0), 0,
132 	    "Unexpected error while resetting heap profile data");
133 	assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
134 	    0, "Unexpected error while dumping heap profile");
135 	assert_u64_eq(cnt_all_copy.curobjs, 0, "Expected 0 allocations");
136 	assert_zu_eq(prof_bt_count(), 1, "Expected 1 backtrace");
137 
138 	prof_dump_header = prof_dump_header_orig;
139 
140 	dallocx(p, 0);
141 	assert_zu_eq(prof_bt_count(), 0, "Expected 0 backtraces");
142 
143 	set_prof_active(false);
144 }
145 TEST_END
146 
147 #define	NTHREADS		4
148 #define	NALLOCS_PER_THREAD	(1U << 13)
149 #define	OBJ_RING_BUF_COUNT	1531
150 #define	RESET_INTERVAL		(1U << 10)
151 #define	DUMP_INTERVAL		3677
152 static void *
thd_start(void * varg)153 thd_start(void *varg)
154 {
155 	unsigned thd_ind = *(unsigned *)varg;
156 	unsigned i;
157 	void *objs[OBJ_RING_BUF_COUNT];
158 
159 	memset(objs, 0, sizeof(objs));
160 
161 	for (i = 0; i < NALLOCS_PER_THREAD; i++) {
162 		if (i % RESET_INTERVAL == 0) {
163 			assert_d_eq(mallctl("prof.reset", NULL, NULL, NULL, 0),
164 			    0, "Unexpected error while resetting heap profile "
165 			    "data");
166 		}
167 
168 		if (i % DUMP_INTERVAL == 0) {
169 			assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
170 			    0, "Unexpected error while dumping heap profile");
171 		}
172 
173 		{
174 			void **pp = &objs[i % OBJ_RING_BUF_COUNT];
175 			if (*pp != NULL) {
176 				dallocx(*pp, 0);
177 				*pp = NULL;
178 			}
179 			*pp = btalloc(1, thd_ind*NALLOCS_PER_THREAD + i);
180 			assert_ptr_not_null(*pp,
181 			    "Unexpected btalloc() failure");
182 		}
183 	}
184 
185 	/* Clean up any remaining objects. */
186 	for (i = 0; i < OBJ_RING_BUF_COUNT; i++) {
187 		void **pp = &objs[i % OBJ_RING_BUF_COUNT];
188 		if (*pp != NULL) {
189 			dallocx(*pp, 0);
190 			*pp = NULL;
191 		}
192 	}
193 
194 	return (NULL);
195 }
196 
TEST_BEGIN(test_prof_reset)197 TEST_BEGIN(test_prof_reset)
198 {
199 	size_t lg_prof_sample_orig;
200 	thd_t thds[NTHREADS];
201 	unsigned thd_args[NTHREADS];
202 	unsigned i;
203 	size_t bt_count, tdata_count;
204 
205 	test_skip_if(!config_prof);
206 
207 	bt_count = prof_bt_count();
208 	assert_zu_eq(bt_count, 0,
209 	    "Unexpected pre-existing tdata structures");
210 	tdata_count = prof_tdata_count();
211 
212 	lg_prof_sample_orig = get_lg_prof_sample();
213 	do_prof_reset(5);
214 
215 	set_prof_active(true);
216 
217 	for (i = 0; i < NTHREADS; i++) {
218 		thd_args[i] = i;
219 		thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
220 	}
221 	for (i = 0; i < NTHREADS; i++)
222 		thd_join(thds[i], NULL);
223 
224 	assert_zu_eq(prof_bt_count(), bt_count,
225 	    "Unexpected bactrace count change");
226 	assert_zu_eq(prof_tdata_count(), tdata_count,
227 	    "Unexpected remaining tdata structures");
228 
229 	set_prof_active(false);
230 
231 	do_prof_reset(lg_prof_sample_orig);
232 }
233 TEST_END
234 #undef NTHREADS
235 #undef NALLOCS_PER_THREAD
236 #undef OBJ_RING_BUF_COUNT
237 #undef RESET_INTERVAL
238 #undef DUMP_INTERVAL
239 
240 /* Test sampling at the same allocation site across resets. */
241 #define	NITER 10
TEST_BEGIN(test_xallocx)242 TEST_BEGIN(test_xallocx)
243 {
244 	size_t lg_prof_sample_orig;
245 	unsigned i;
246 	void *ptrs[NITER];
247 
248 	test_skip_if(!config_prof);
249 
250 	lg_prof_sample_orig = get_lg_prof_sample();
251 	set_prof_active(true);
252 
253 	/* Reset profiling. */
254 	do_prof_reset(0);
255 
256 	for (i = 0; i < NITER; i++) {
257 		void *p;
258 		size_t sz, nsz;
259 
260 		/* Reset profiling. */
261 		do_prof_reset(0);
262 
263 		/* Allocate small object (which will be promoted). */
264 		p = ptrs[i] = mallocx(1, 0);
265 		assert_ptr_not_null(p, "Unexpected mallocx() failure");
266 
267 		/* Reset profiling. */
268 		do_prof_reset(0);
269 
270 		/* Perform successful xallocx(). */
271 		sz = sallocx(p, 0);
272 		assert_zu_eq(xallocx(p, sz, 0, 0), sz,
273 		    "Unexpected xallocx() failure");
274 
275 		/* Perform unsuccessful xallocx(). */
276 		nsz = nallocx(sz+1, 0);
277 		assert_zu_eq(xallocx(p, nsz, 0, 0), sz,
278 		    "Unexpected xallocx() success");
279 	}
280 
281 	for (i = 0; i < NITER; i++) {
282 		/* dallocx. */
283 		dallocx(ptrs[i], 0);
284 	}
285 
286 	set_prof_active(false);
287 	do_prof_reset(lg_prof_sample_orig);
288 }
289 TEST_END
290 #undef NITER
291 
292 int
main(void)293 main(void)
294 {
295 
296 	/* Intercept dumping prior to running any tests. */
297 	prof_dump_open = prof_dump_open_intercept;
298 
299 	return (test(
300 	    test_prof_reset_basic,
301 	    test_prof_reset_cleanup,
302 	    test_prof_reset,
303 	    test_xallocx));
304 }
305