1/******************************************************************************/
2#ifdef JEMALLOC_H_TYPES
3
4typedef struct extent_node_s extent_node_t;
5
6#endif /* JEMALLOC_H_TYPES */
7/******************************************************************************/
8#ifdef JEMALLOC_H_STRUCTS
9
10/* Tree of extents.  Use accessor functions for en_* fields. */
11struct extent_node_s {
12	/* Arena from which this extent came, if any. */
13	arena_t			*en_arena;
14
15	/* Pointer to the extent that this tree node is responsible for. */
16	void			*en_addr;
17
18	/* Total region size. */
19	size_t			en_size;
20
21	/*
22	 * Serial number (potentially non-unique).
23	 *
24	 * In principle serial numbers can wrap around on 32-bit systems if
25	 * JEMALLOC_MUNMAP is defined, but as long as comparison functions fall
26	 * back on address comparison for equal serial numbers, stable (if
27	 * imperfect) ordering is maintained.
28	 *
29	 * Serial numbers may not be unique even in the absence of wrap-around,
30	 * e.g. when splitting an extent and assigning the same serial number to
31	 * both resulting adjacent extents.
32	 */
33	size_t			en_sn;
34
35	/*
36	 * The zeroed flag is used by chunk recycling code to track whether
37	 * memory is zero-filled.
38	 */
39	bool			en_zeroed;
40
41	/*
42	 * True if physical memory is committed to the extent, whether
43	 * explicitly or implicitly as on a system that overcommits and
44	 * satisfies physical memory needs on demand via soft page faults.
45	 */
46	bool			en_committed;
47
48	/*
49	 * The achunk flag is used to validate that huge allocation lookups
50	 * don't return arena chunks.
51	 */
52	bool			en_achunk;
53
54	/* Profile counters, used for huge objects. */
55	prof_tctx_t		*en_prof_tctx;
56
57	/* Linkage for arena's runs_dirty and chunks_cache rings. */
58	arena_runs_dirty_link_t	rd;
59	qr(extent_node_t)	cc_link;
60
61	union {
62		/* Linkage for the size/sn/address-ordered tree. */
63		rb_node(extent_node_t)	szsnad_link;
64
65		/* Linkage for arena's achunks, huge, and node_cache lists. */
66		ql_elm(extent_node_t)	ql_link;
67	};
68
69	/* Linkage for the address-ordered tree. */
70	rb_node(extent_node_t)	ad_link;
71};
72typedef rb_tree(extent_node_t) extent_tree_t;
73
74#endif /* JEMALLOC_H_STRUCTS */
75/******************************************************************************/
76#ifdef JEMALLOC_H_EXTERNS
77
78rb_proto(, extent_tree_szsnad_, extent_tree_t, extent_node_t)
79
80rb_proto(, extent_tree_ad_, extent_tree_t, extent_node_t)
81
82#endif /* JEMALLOC_H_EXTERNS */
83/******************************************************************************/
84#ifdef JEMALLOC_H_INLINES
85
86#ifndef JEMALLOC_ENABLE_INLINE
87arena_t	*extent_node_arena_get(const extent_node_t *node);
88void	*extent_node_addr_get(const extent_node_t *node);
89size_t	extent_node_size_get(const extent_node_t *node);
90size_t	extent_node_sn_get(const extent_node_t *node);
91bool	extent_node_zeroed_get(const extent_node_t *node);
92bool	extent_node_committed_get(const extent_node_t *node);
93bool	extent_node_achunk_get(const extent_node_t *node);
94prof_tctx_t	*extent_node_prof_tctx_get(const extent_node_t *node);
95void	extent_node_arena_set(extent_node_t *node, arena_t *arena);
96void	extent_node_addr_set(extent_node_t *node, void *addr);
97void	extent_node_size_set(extent_node_t *node, size_t size);
98void	extent_node_sn_set(extent_node_t *node, size_t sn);
99void	extent_node_zeroed_set(extent_node_t *node, bool zeroed);
100void	extent_node_committed_set(extent_node_t *node, bool committed);
101void	extent_node_achunk_set(extent_node_t *node, bool achunk);
102void	extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx);
103void	extent_node_init(extent_node_t *node, arena_t *arena, void *addr,
104    size_t size, size_t sn, bool zeroed, bool committed);
105void	extent_node_dirty_linkage_init(extent_node_t *node);
106void	extent_node_dirty_insert(extent_node_t *node,
107    arena_runs_dirty_link_t *runs_dirty, extent_node_t *chunks_dirty);
108void	extent_node_dirty_remove(extent_node_t *node);
109#endif
110
111#if (defined(JEMALLOC_ENABLE_INLINE) || defined(JEMALLOC_EXTENT_C_))
112JEMALLOC_INLINE arena_t *
113extent_node_arena_get(const extent_node_t *node)
114{
115
116	return (node->en_arena);
117}
118
119JEMALLOC_INLINE void *
120extent_node_addr_get(const extent_node_t *node)
121{
122
123	return (node->en_addr);
124}
125
126JEMALLOC_INLINE size_t
127extent_node_size_get(const extent_node_t *node)
128{
129
130	return (node->en_size);
131}
132
133JEMALLOC_INLINE size_t
134extent_node_sn_get(const extent_node_t *node)
135{
136
137	return (node->en_sn);
138}
139
140JEMALLOC_INLINE bool
141extent_node_zeroed_get(const extent_node_t *node)
142{
143
144	return (node->en_zeroed);
145}
146
147JEMALLOC_INLINE bool
148extent_node_committed_get(const extent_node_t *node)
149{
150
151	assert(!node->en_achunk);
152	return (node->en_committed);
153}
154
155JEMALLOC_INLINE bool
156extent_node_achunk_get(const extent_node_t *node)
157{
158
159	return (node->en_achunk);
160}
161
162JEMALLOC_INLINE prof_tctx_t *
163extent_node_prof_tctx_get(const extent_node_t *node)
164{
165
166	return (node->en_prof_tctx);
167}
168
169JEMALLOC_INLINE void
170extent_node_arena_set(extent_node_t *node, arena_t *arena)
171{
172
173	node->en_arena = arena;
174}
175
176JEMALLOC_INLINE void
177extent_node_addr_set(extent_node_t *node, void *addr)
178{
179
180	node->en_addr = addr;
181}
182
183JEMALLOC_INLINE void
184extent_node_size_set(extent_node_t *node, size_t size)
185{
186
187	node->en_size = size;
188}
189
190JEMALLOC_INLINE void
191extent_node_sn_set(extent_node_t *node, size_t sn)
192{
193
194	node->en_sn = sn;
195}
196
197JEMALLOC_INLINE void
198extent_node_zeroed_set(extent_node_t *node, bool zeroed)
199{
200
201	node->en_zeroed = zeroed;
202}
203
204JEMALLOC_INLINE void
205extent_node_committed_set(extent_node_t *node, bool committed)
206{
207
208	node->en_committed = committed;
209}
210
211JEMALLOC_INLINE void
212extent_node_achunk_set(extent_node_t *node, bool achunk)
213{
214
215	node->en_achunk = achunk;
216}
217
218JEMALLOC_INLINE void
219extent_node_prof_tctx_set(extent_node_t *node, prof_tctx_t *tctx)
220{
221
222	node->en_prof_tctx = tctx;
223}
224
225JEMALLOC_INLINE void
226extent_node_init(extent_node_t *node, arena_t *arena, void *addr, size_t size,
227    size_t sn, bool zeroed, bool committed)
228{
229
230	extent_node_arena_set(node, arena);
231	extent_node_addr_set(node, addr);
232	extent_node_size_set(node, size);
233	extent_node_sn_set(node, sn);
234	extent_node_zeroed_set(node, zeroed);
235	extent_node_committed_set(node, committed);
236	extent_node_achunk_set(node, false);
237	if (config_prof)
238		extent_node_prof_tctx_set(node, NULL);
239}
240
241JEMALLOC_INLINE void
242extent_node_dirty_linkage_init(extent_node_t *node)
243{
244
245	qr_new(&node->rd, rd_link);
246	qr_new(node, cc_link);
247}
248
249JEMALLOC_INLINE void
250extent_node_dirty_insert(extent_node_t *node,
251    arena_runs_dirty_link_t *runs_dirty, extent_node_t *chunks_dirty)
252{
253
254	qr_meld(runs_dirty, &node->rd, rd_link);
255	qr_meld(chunks_dirty, node, cc_link);
256}
257
258JEMALLOC_INLINE void
259extent_node_dirty_remove(extent_node_t *node)
260{
261
262	qr_remove(&node->rd, rd_link);
263	qr_remove(node, cc_link);
264}
265
266#endif
267
268#endif /* JEMALLOC_H_INLINES */
269/******************************************************************************/
270
271