prof_accum.c revision 898960247a8b2e6534738b7a3a244855f379faf9
1#include "test/jemalloc_test.h"
2
3#define	NTHREADS		4
4#define	NALLOCS_PER_THREAD	50
5#define	DUMP_INTERVAL		1
6#define	BT_COUNT_CHECK_INTERVAL	5
7
8#ifdef JEMALLOC_PROF
9const char *malloc_conf =
10    "prof:true,prof_accum:true,prof_active:false,lg_prof_sample:0";
11#endif
12
13static int
14prof_dump_open_intercept(bool propagate_err, const char *filename)
15{
16	int fd;
17
18	fd = open("/dev/null", O_WRONLY);
19	assert_d_ne(fd, -1, "Unexpected open() failure");
20
21	return (fd);
22}
23
24#define	alloc_n_proto(n)						\
25static void	*alloc_##n(unsigned bits);
26
27#define	alloc_n_gen(n)							\
28static void *								\
29alloc_##n(unsigned bits)						\
30{									\
31	void *p;							\
32									\
33	if (bits == 0)							\
34		p = mallocx(1, 0);					\
35	else {								\
36		switch (bits & 0x1U) {					\
37		case 0:							\
38			p = alloc_0(bits >> 1);				\
39			break;						\
40		case 1:							\
41			p = alloc_1(bits >> 1);				\
42			break;						\
43		default: not_reached();					\
44		}							\
45	}								\
46	/* Intentionally sabotage tail call optimization. */		\
47	assert_ptr_not_null(p, "Unexpected mallocx() failure");		\
48	return (p);							\
49}
50alloc_n_proto(0)
51alloc_n_proto(1)
52alloc_n_gen(0)
53alloc_n_gen(1)
54
55static void *
56alloc_from_permuted_backtrace(unsigned thd_ind, unsigned iteration)
57{
58
59	return (alloc_0(thd_ind*NALLOCS_PER_THREAD + iteration));
60}
61
62static void *
63thd_start(void *varg)
64{
65	unsigned thd_ind = *(unsigned *)varg;
66	size_t bt_count_prev, bt_count;
67	unsigned i_prev, i;
68
69	i_prev = 0;
70	bt_count_prev = 0;
71	for (i = 0; i < NALLOCS_PER_THREAD; i++) {
72		void *p = alloc_from_permuted_backtrace(thd_ind, i);
73		dallocx(p, 0);
74		if (i % DUMP_INTERVAL == 0) {
75			assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
76			    0, "Unexpected error while dumping heap profile");
77		}
78
79		if (i % BT_COUNT_CHECK_INTERVAL == 0 ||
80		    i+1 == NALLOCS_PER_THREAD) {
81			bt_count = prof_bt_count();
82			assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
83			    "Expected larger backtrace count increase");
84			i_prev = i;
85			bt_count_prev = bt_count;
86		}
87	}
88
89	return (NULL);
90}
91
92TEST_BEGIN(test_idump)
93{
94	bool active;
95	thd_t thds[NTHREADS];
96	unsigned thd_args[NTHREADS];
97	unsigned i;
98
99	test_skip_if(!config_prof);
100
101	active = true;
102	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
103	    0, "Unexpected mallctl failure while activating profiling");
104
105	prof_dump_open = prof_dump_open_intercept;
106
107	for (i = 0; i < NTHREADS; i++) {
108		thd_args[i] = i;
109		thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
110	}
111	for (i = 0; i < NTHREADS; i++)
112		thd_join(thds[i], NULL);
113}
114TEST_END
115
116int
117main(void)
118{
119
120	return (test(
121	    test_idump));
122}
123