prof_accum.c revision 772163b4f3d8e9a12343e9215f6b070068507604
1#include "test/jemalloc_test.h"
2
3#define	NTHREADS		4
4#define	NALLOCS_PER_THREAD	50
5#define	DUMP_INTERVAL		1
6#define	BT_COUNT_CHECK_INTERVAL	5
7
8#ifdef JEMALLOC_PROF
9const char *malloc_conf =
10    "prof:true,prof_accum:true,prof_active:false,lg_prof_sample:0";
11#endif
12
13static int
14prof_dump_open_intercept(bool propagate_err, const char *filename)
15{
16	int fd;
17
18	fd = open("/dev/null", O_WRONLY);
19	assert_d_ne(fd, -1, "Unexpected open() failure");
20
21	return (fd);
22}
23
24#define	alloc_n_proto(n)						\
25static void	*alloc_##n(unsigned bits);
26
27#define	alloc_n_gen(n)							\
28static void *								\
29alloc_##n(unsigned bits)						\
30{									\
31									\
32	if (bits == 0) {						\
33		void *p = mallocx(1, 0);				\
34		assert_ptr_not_null(p, "Unexpected mallocx() failure");	\
35		return (p);						\
36	}								\
37									\
38	switch (bits & 0x1U) {						\
39	case 0: return (alloc_0(bits >> 1));				\
40	case 1: return (alloc_1(bits >> 1));				\
41	default: not_reached();						\
42	}								\
43}
44alloc_n_proto(0)
45alloc_n_proto(1)
46alloc_n_gen(0)
47alloc_n_gen(1)
48
49static void *
50alloc_from_permuted_backtrace(unsigned thd_ind, unsigned iteration)
51{
52
53	return (alloc_0(thd_ind*NALLOCS_PER_THREAD + iteration));
54}
55
56static void *
57thd_start(void *varg)
58{
59	unsigned thd_ind = *(unsigned *)varg;
60	size_t bt_count_prev, bt_count;
61	unsigned i_prev, i;
62
63	i_prev = 0;
64	bt_count_prev = 0;
65	for (i = 0; i < NALLOCS_PER_THREAD; i++) {
66		void *p = alloc_from_permuted_backtrace(thd_ind, i);
67		dallocx(p, 0);
68		if (i % DUMP_INTERVAL == 0) {
69			assert_d_eq(mallctl("prof.dump", NULL, NULL, NULL, 0),
70			    0, "Unexpected error while dumping heap profile");
71		}
72
73		if (i % BT_COUNT_CHECK_INTERVAL == 0 ||
74		    i+1 == NALLOCS_PER_THREAD) {
75			bt_count = prof_bt_count();
76			assert_zu_le(bt_count_prev+(i-i_prev), bt_count,
77			    "Expected larger bactrace count increase");
78			i_prev = i;
79			bt_count_prev = bt_count;
80		}
81	}
82
83	return (NULL);
84}
85
86TEST_BEGIN(test_idump)
87{
88	bool active;
89	thd_t thds[NTHREADS];
90	unsigned thd_args[NTHREADS];
91	unsigned i;
92
93	test_skip_if(!config_prof);
94
95	active = true;
96	assert_d_eq(mallctl("prof.active", NULL, NULL, &active, sizeof(active)),
97	    0, "Unexpected mallctl failure while activating profiling");
98
99	prof_dump_open = prof_dump_open_intercept;
100
101	for (i = 0; i < NTHREADS; i++) {
102		thd_args[i] = i;
103		thd_create(&thds[i], thd_start, (void *)&thd_args[i]);
104	}
105	for (i = 0; i < NTHREADS; i++)
106		thd_join(thds[i], NULL);
107}
108TEST_END
109
110int
111main(void)
112{
113
114	return (test(
115	    test_idump));
116}
117