1 | #define JEMALLOC_CTL_C_ |
2 | #include "jemalloc/internal/jemalloc_internal.h" |
3 | |
4 | /******************************************************************************/ |
5 | /* Data. */ |
6 | |
7 | /* |
8 | * ctl_mtx protects the following: |
9 | * - ctl_stats.* |
10 | */ |
11 | static malloc_mutex_t ctl_mtx; |
12 | static bool ctl_initialized; |
13 | static uint64_t ctl_epoch; |
14 | static ctl_stats_t ctl_stats; |
15 | |
16 | /******************************************************************************/ |
17 | /* Helpers for named and indexed nodes. */ |
18 | |
19 | JEMALLOC_INLINE_C const ctl_named_node_t * |
20 | ctl_named_node(const ctl_node_t *node) |
21 | { |
22 | |
23 | return ((node->named) ? (const ctl_named_node_t *)node : NULL); |
24 | } |
25 | |
26 | JEMALLOC_INLINE_C const ctl_named_node_t * |
27 | ctl_named_children(const ctl_named_node_t *node, size_t index) |
28 | { |
29 | const ctl_named_node_t *children = ctl_named_node(node->children); |
30 | |
31 | return (children ? &children[index] : NULL); |
32 | } |
33 | |
34 | JEMALLOC_INLINE_C const ctl_indexed_node_t * |
35 | ctl_indexed_node(const ctl_node_t *node) |
36 | { |
37 | |
38 | return (!node->named ? (const ctl_indexed_node_t *)node : NULL); |
39 | } |
40 | |
41 | /******************************************************************************/ |
42 | /* Function prototypes for non-inline static functions. */ |
43 | |
44 | #define CTL_PROTO(n) \ |
45 | static int n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, \ |
46 | void *oldp, size_t *oldlenp, void *newp, size_t newlen); |
47 | |
48 | #define INDEX_PROTO(n) \ |
49 | static const ctl_named_node_t *n##_index(tsdn_t *tsdn, \ |
50 | const size_t *mib, size_t miblen, size_t i); |
51 | |
52 | static bool ctl_arena_init(ctl_arena_stats_t *astats); |
53 | static void ctl_arena_clear(ctl_arena_stats_t *astats); |
54 | static void ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats, |
55 | arena_t *arena); |
56 | static void ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, |
57 | ctl_arena_stats_t *astats); |
58 | static void ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i); |
59 | static bool ctl_grow(tsdn_t *tsdn); |
60 | static void ctl_refresh(tsdn_t *tsdn); |
61 | static bool ctl_init(tsdn_t *tsdn); |
62 | static int ctl_lookup(tsdn_t *tsdn, const char *name, |
63 | ctl_node_t const **nodesp, size_t *mibp, size_t *depthp); |
64 | |
65 | CTL_PROTO(version) |
66 | CTL_PROTO(epoch) |
67 | CTL_PROTO(thread_tcache_enabled) |
68 | CTL_PROTO(thread_tcache_flush) |
69 | CTL_PROTO(thread_prof_name) |
70 | CTL_PROTO(thread_prof_active) |
71 | CTL_PROTO(thread_arena) |
72 | CTL_PROTO(thread_allocated) |
73 | CTL_PROTO(thread_allocatedp) |
74 | CTL_PROTO(thread_deallocated) |
75 | CTL_PROTO(thread_deallocatedp) |
76 | CTL_PROTO(config_cache_oblivious) |
77 | CTL_PROTO(config_debug) |
78 | CTL_PROTO(config_fill) |
79 | CTL_PROTO(config_lazy_lock) |
80 | CTL_PROTO(config_malloc_conf) |
81 | CTL_PROTO(config_munmap) |
82 | CTL_PROTO(config_prof) |
83 | CTL_PROTO(config_prof_libgcc) |
84 | CTL_PROTO(config_prof_libunwind) |
85 | CTL_PROTO(config_stats) |
86 | CTL_PROTO(config_tcache) |
87 | CTL_PROTO(config_tls) |
88 | CTL_PROTO(config_utrace) |
89 | CTL_PROTO(config_valgrind) |
90 | CTL_PROTO(config_xmalloc) |
91 | CTL_PROTO(opt_abort) |
92 | CTL_PROTO(opt_dss) |
93 | CTL_PROTO(opt_lg_chunk) |
94 | CTL_PROTO(opt_narenas) |
95 | CTL_PROTO(opt_purge) |
96 | CTL_PROTO(opt_lg_dirty_mult) |
97 | CTL_PROTO(opt_decay_time) |
98 | CTL_PROTO(opt_stats_print) |
99 | CTL_PROTO(opt_junk) |
100 | CTL_PROTO(opt_zero) |
101 | CTL_PROTO(opt_quarantine) |
102 | CTL_PROTO(opt_redzone) |
103 | CTL_PROTO(opt_utrace) |
104 | CTL_PROTO(opt_xmalloc) |
105 | CTL_PROTO(opt_tcache) |
106 | CTL_PROTO(opt_lg_tcache_max) |
107 | CTL_PROTO(opt_prof) |
108 | CTL_PROTO(opt_prof_prefix) |
109 | CTL_PROTO(opt_prof_active) |
110 | CTL_PROTO(opt_prof_thread_active_init) |
111 | CTL_PROTO(opt_lg_prof_sample) |
112 | CTL_PROTO(opt_lg_prof_interval) |
113 | CTL_PROTO(opt_prof_gdump) |
114 | CTL_PROTO(opt_prof_final) |
115 | CTL_PROTO(opt_prof_leak) |
116 | CTL_PROTO(opt_prof_accum) |
117 | CTL_PROTO(tcache_create) |
118 | CTL_PROTO(tcache_flush) |
119 | CTL_PROTO(tcache_destroy) |
120 | static void arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all); |
121 | CTL_PROTO(arena_i_purge) |
122 | CTL_PROTO(arena_i_decay) |
123 | CTL_PROTO(arena_i_reset) |
124 | CTL_PROTO(arena_i_dss) |
125 | CTL_PROTO(arena_i_lg_dirty_mult) |
126 | CTL_PROTO(arena_i_decay_time) |
127 | CTL_PROTO(arena_i_chunk_hooks) |
128 | INDEX_PROTO(arena_i) |
129 | CTL_PROTO(arenas_bin_i_size) |
130 | CTL_PROTO(arenas_bin_i_nregs) |
131 | CTL_PROTO(arenas_bin_i_run_size) |
132 | INDEX_PROTO(arenas_bin_i) |
133 | CTL_PROTO(arenas_lrun_i_size) |
134 | INDEX_PROTO(arenas_lrun_i) |
135 | CTL_PROTO(arenas_hchunk_i_size) |
136 | INDEX_PROTO(arenas_hchunk_i) |
137 | CTL_PROTO(arenas_narenas) |
138 | CTL_PROTO(arenas_initialized) |
139 | CTL_PROTO(arenas_lg_dirty_mult) |
140 | CTL_PROTO(arenas_decay_time) |
141 | CTL_PROTO(arenas_quantum) |
142 | CTL_PROTO(arenas_page) |
143 | CTL_PROTO(arenas_tcache_max) |
144 | CTL_PROTO(arenas_nbins) |
145 | CTL_PROTO(arenas_nhbins) |
146 | CTL_PROTO(arenas_nlruns) |
147 | CTL_PROTO(arenas_nhchunks) |
148 | CTL_PROTO(arenas_extend) |
149 | CTL_PROTO(prof_thread_active_init) |
150 | CTL_PROTO(prof_active) |
151 | CTL_PROTO(prof_dump) |
152 | CTL_PROTO(prof_gdump) |
153 | CTL_PROTO(prof_reset) |
154 | CTL_PROTO(prof_interval) |
155 | CTL_PROTO(lg_prof_sample) |
156 | CTL_PROTO(stats_arenas_i_small_allocated) |
157 | CTL_PROTO(stats_arenas_i_small_nmalloc) |
158 | CTL_PROTO(stats_arenas_i_small_ndalloc) |
159 | CTL_PROTO(stats_arenas_i_small_nrequests) |
160 | CTL_PROTO(stats_arenas_i_large_allocated) |
161 | CTL_PROTO(stats_arenas_i_large_nmalloc) |
162 | CTL_PROTO(stats_arenas_i_large_ndalloc) |
163 | CTL_PROTO(stats_arenas_i_large_nrequests) |
164 | CTL_PROTO(stats_arenas_i_huge_allocated) |
165 | CTL_PROTO(stats_arenas_i_huge_nmalloc) |
166 | CTL_PROTO(stats_arenas_i_huge_ndalloc) |
167 | CTL_PROTO(stats_arenas_i_huge_nrequests) |
168 | CTL_PROTO(stats_arenas_i_bins_j_nmalloc) |
169 | CTL_PROTO(stats_arenas_i_bins_j_ndalloc) |
170 | CTL_PROTO(stats_arenas_i_bins_j_nrequests) |
171 | CTL_PROTO(stats_arenas_i_bins_j_curregs) |
172 | CTL_PROTO(stats_arenas_i_bins_j_nfills) |
173 | CTL_PROTO(stats_arenas_i_bins_j_nflushes) |
174 | CTL_PROTO(stats_arenas_i_bins_j_nruns) |
175 | CTL_PROTO(stats_arenas_i_bins_j_nreruns) |
176 | CTL_PROTO(stats_arenas_i_bins_j_curruns) |
177 | INDEX_PROTO(stats_arenas_i_bins_j) |
178 | CTL_PROTO(stats_arenas_i_lruns_j_nmalloc) |
179 | CTL_PROTO(stats_arenas_i_lruns_j_ndalloc) |
180 | CTL_PROTO(stats_arenas_i_lruns_j_nrequests) |
181 | CTL_PROTO(stats_arenas_i_lruns_j_curruns) |
182 | INDEX_PROTO(stats_arenas_i_lruns_j) |
183 | CTL_PROTO(stats_arenas_i_hchunks_j_nmalloc) |
184 | CTL_PROTO(stats_arenas_i_hchunks_j_ndalloc) |
185 | CTL_PROTO(stats_arenas_i_hchunks_j_nrequests) |
186 | CTL_PROTO(stats_arenas_i_hchunks_j_curhchunks) |
187 | INDEX_PROTO(stats_arenas_i_hchunks_j) |
188 | CTL_PROTO(stats_arenas_i_nthreads) |
189 | CTL_PROTO(stats_arenas_i_dss) |
190 | CTL_PROTO(stats_arenas_i_lg_dirty_mult) |
191 | CTL_PROTO(stats_arenas_i_decay_time) |
192 | CTL_PROTO(stats_arenas_i_pactive) |
193 | CTL_PROTO(stats_arenas_i_pdirty) |
194 | CTL_PROTO(stats_arenas_i_mapped) |
195 | CTL_PROTO(stats_arenas_i_retained) |
196 | CTL_PROTO(stats_arenas_i_npurge) |
197 | CTL_PROTO(stats_arenas_i_nmadvise) |
198 | CTL_PROTO(stats_arenas_i_purged) |
199 | CTL_PROTO(stats_arenas_i_metadata_mapped) |
200 | CTL_PROTO(stats_arenas_i_metadata_allocated) |
201 | INDEX_PROTO(stats_arenas_i) |
202 | CTL_PROTO(stats_cactive) |
203 | CTL_PROTO(stats_allocated) |
204 | CTL_PROTO(stats_active) |
205 | CTL_PROTO(stats_metadata) |
206 | CTL_PROTO(stats_resident) |
207 | CTL_PROTO(stats_mapped) |
208 | CTL_PROTO(stats_retained) |
209 | |
210 | /******************************************************************************/ |
211 | /* mallctl tree. */ |
212 | |
213 | /* Maximum tree depth. */ |
214 | #define CTL_MAX_DEPTH 6 |
215 | |
216 | #define NAME(n) {true}, n |
217 | #define CHILD(t, c) \ |
218 | sizeof(c##_node) / sizeof(ctl_##t##_node_t), \ |
219 | (ctl_node_t *)c##_node, \ |
220 | NULL |
221 | #define CTL(c) 0, NULL, c##_ctl |
222 | |
223 | /* |
224 | * Only handles internal indexed nodes, since there are currently no external |
225 | * ones. |
226 | */ |
227 | #define INDEX(i) {false}, i##_index |
228 | |
229 | static const ctl_named_node_t thread_tcache_node[] = { |
230 | {NAME("enabled" ), CTL(thread_tcache_enabled)}, |
231 | {NAME("flush" ), CTL(thread_tcache_flush)} |
232 | }; |
233 | |
234 | static const ctl_named_node_t thread_prof_node[] = { |
235 | {NAME("name" ), CTL(thread_prof_name)}, |
236 | {NAME("active" ), CTL(thread_prof_active)} |
237 | }; |
238 | |
239 | static const ctl_named_node_t thread_node[] = { |
240 | {NAME("arena" ), CTL(thread_arena)}, |
241 | {NAME("allocated" ), CTL(thread_allocated)}, |
242 | {NAME("allocatedp" ), CTL(thread_allocatedp)}, |
243 | {NAME("deallocated" ), CTL(thread_deallocated)}, |
244 | {NAME("deallocatedp" ), CTL(thread_deallocatedp)}, |
245 | {NAME("tcache" ), CHILD(named, thread_tcache)}, |
246 | {NAME("prof" ), CHILD(named, thread_prof)} |
247 | }; |
248 | |
249 | static const ctl_named_node_t config_node[] = { |
250 | {NAME("cache_oblivious" ), CTL(config_cache_oblivious)}, |
251 | {NAME("debug" ), CTL(config_debug)}, |
252 | {NAME("fill" ), CTL(config_fill)}, |
253 | {NAME("lazy_lock" ), CTL(config_lazy_lock)}, |
254 | {NAME("malloc_conf" ), CTL(config_malloc_conf)}, |
255 | {NAME("munmap" ), CTL(config_munmap)}, |
256 | {NAME("prof" ), CTL(config_prof)}, |
257 | {NAME("prof_libgcc" ), CTL(config_prof_libgcc)}, |
258 | {NAME("prof_libunwind" ), CTL(config_prof_libunwind)}, |
259 | {NAME("stats" ), CTL(config_stats)}, |
260 | {NAME("tcache" ), CTL(config_tcache)}, |
261 | {NAME("tls" ), CTL(config_tls)}, |
262 | {NAME("utrace" ), CTL(config_utrace)}, |
263 | {NAME("valgrind" ), CTL(config_valgrind)}, |
264 | {NAME("xmalloc" ), CTL(config_xmalloc)} |
265 | }; |
266 | |
267 | static const ctl_named_node_t opt_node[] = { |
268 | {NAME("abort" ), CTL(opt_abort)}, |
269 | {NAME("dss" ), CTL(opt_dss)}, |
270 | {NAME("lg_chunk" ), CTL(opt_lg_chunk)}, |
271 | {NAME("narenas" ), CTL(opt_narenas)}, |
272 | {NAME("purge" ), CTL(opt_purge)}, |
273 | {NAME("lg_dirty_mult" ), CTL(opt_lg_dirty_mult)}, |
274 | {NAME("decay_time" ), CTL(opt_decay_time)}, |
275 | {NAME("stats_print" ), CTL(opt_stats_print)}, |
276 | {NAME("junk" ), CTL(opt_junk)}, |
277 | {NAME("zero" ), CTL(opt_zero)}, |
278 | {NAME("quarantine" ), CTL(opt_quarantine)}, |
279 | {NAME("redzone" ), CTL(opt_redzone)}, |
280 | {NAME("utrace" ), CTL(opt_utrace)}, |
281 | {NAME("xmalloc" ), CTL(opt_xmalloc)}, |
282 | {NAME("tcache" ), CTL(opt_tcache)}, |
283 | {NAME("lg_tcache_max" ), CTL(opt_lg_tcache_max)}, |
284 | {NAME("prof" ), CTL(opt_prof)}, |
285 | {NAME("prof_prefix" ), CTL(opt_prof_prefix)}, |
286 | {NAME("prof_active" ), CTL(opt_prof_active)}, |
287 | {NAME("prof_thread_active_init" ), CTL(opt_prof_thread_active_init)}, |
288 | {NAME("lg_prof_sample" ), CTL(opt_lg_prof_sample)}, |
289 | {NAME("lg_prof_interval" ), CTL(opt_lg_prof_interval)}, |
290 | {NAME("prof_gdump" ), CTL(opt_prof_gdump)}, |
291 | {NAME("prof_final" ), CTL(opt_prof_final)}, |
292 | {NAME("prof_leak" ), CTL(opt_prof_leak)}, |
293 | {NAME("prof_accum" ), CTL(opt_prof_accum)} |
294 | }; |
295 | |
296 | static const ctl_named_node_t tcache_node[] = { |
297 | {NAME("create" ), CTL(tcache_create)}, |
298 | {NAME("flush" ), CTL(tcache_flush)}, |
299 | {NAME("destroy" ), CTL(tcache_destroy)} |
300 | }; |
301 | |
302 | static const ctl_named_node_t arena_i_node[] = { |
303 | {NAME("purge" ), CTL(arena_i_purge)}, |
304 | {NAME("decay" ), CTL(arena_i_decay)}, |
305 | {NAME("reset" ), CTL(arena_i_reset)}, |
306 | {NAME("dss" ), CTL(arena_i_dss)}, |
307 | {NAME("lg_dirty_mult" ), CTL(arena_i_lg_dirty_mult)}, |
308 | {NAME("decay_time" ), CTL(arena_i_decay_time)}, |
309 | {NAME("chunk_hooks" ), CTL(arena_i_chunk_hooks)} |
310 | }; |
311 | static const ctl_named_node_t super_arena_i_node[] = { |
312 | {NAME("" ), CHILD(named, arena_i)} |
313 | }; |
314 | |
315 | static const ctl_indexed_node_t arena_node[] = { |
316 | {INDEX(arena_i)} |
317 | }; |
318 | |
319 | static const ctl_named_node_t arenas_bin_i_node[] = { |
320 | {NAME("size" ), CTL(arenas_bin_i_size)}, |
321 | {NAME("nregs" ), CTL(arenas_bin_i_nregs)}, |
322 | {NAME("run_size" ), CTL(arenas_bin_i_run_size)} |
323 | }; |
324 | static const ctl_named_node_t super_arenas_bin_i_node[] = { |
325 | {NAME("" ), CHILD(named, arenas_bin_i)} |
326 | }; |
327 | |
328 | static const ctl_indexed_node_t arenas_bin_node[] = { |
329 | {INDEX(arenas_bin_i)} |
330 | }; |
331 | |
332 | static const ctl_named_node_t arenas_lrun_i_node[] = { |
333 | {NAME("size" ), CTL(arenas_lrun_i_size)} |
334 | }; |
335 | static const ctl_named_node_t super_arenas_lrun_i_node[] = { |
336 | {NAME("" ), CHILD(named, arenas_lrun_i)} |
337 | }; |
338 | |
339 | static const ctl_indexed_node_t arenas_lrun_node[] = { |
340 | {INDEX(arenas_lrun_i)} |
341 | }; |
342 | |
343 | static const ctl_named_node_t arenas_hchunk_i_node[] = { |
344 | {NAME("size" ), CTL(arenas_hchunk_i_size)} |
345 | }; |
346 | static const ctl_named_node_t super_arenas_hchunk_i_node[] = { |
347 | {NAME("" ), CHILD(named, arenas_hchunk_i)} |
348 | }; |
349 | |
350 | static const ctl_indexed_node_t arenas_hchunk_node[] = { |
351 | {INDEX(arenas_hchunk_i)} |
352 | }; |
353 | |
354 | static const ctl_named_node_t arenas_node[] = { |
355 | {NAME("narenas" ), CTL(arenas_narenas)}, |
356 | {NAME("initialized" ), CTL(arenas_initialized)}, |
357 | {NAME("lg_dirty_mult" ), CTL(arenas_lg_dirty_mult)}, |
358 | {NAME("decay_time" ), CTL(arenas_decay_time)}, |
359 | {NAME("quantum" ), CTL(arenas_quantum)}, |
360 | {NAME("page" ), CTL(arenas_page)}, |
361 | {NAME("tcache_max" ), CTL(arenas_tcache_max)}, |
362 | {NAME("nbins" ), CTL(arenas_nbins)}, |
363 | {NAME("nhbins" ), CTL(arenas_nhbins)}, |
364 | {NAME("bin" ), CHILD(indexed, arenas_bin)}, |
365 | {NAME("nlruns" ), CTL(arenas_nlruns)}, |
366 | {NAME("lrun" ), CHILD(indexed, arenas_lrun)}, |
367 | {NAME("nhchunks" ), CTL(arenas_nhchunks)}, |
368 | {NAME("hchunk" ), CHILD(indexed, arenas_hchunk)}, |
369 | {NAME("extend" ), CTL(arenas_extend)} |
370 | }; |
371 | |
372 | static const ctl_named_node_t prof_node[] = { |
373 | {NAME("thread_active_init" ), CTL(prof_thread_active_init)}, |
374 | {NAME("active" ), CTL(prof_active)}, |
375 | {NAME("dump" ), CTL(prof_dump)}, |
376 | {NAME("gdump" ), CTL(prof_gdump)}, |
377 | {NAME("reset" ), CTL(prof_reset)}, |
378 | {NAME("interval" ), CTL(prof_interval)}, |
379 | {NAME("lg_sample" ), CTL(lg_prof_sample)} |
380 | }; |
381 | |
382 | static const ctl_named_node_t stats_arenas_i_metadata_node[] = { |
383 | {NAME("mapped" ), CTL(stats_arenas_i_metadata_mapped)}, |
384 | {NAME("allocated" ), CTL(stats_arenas_i_metadata_allocated)} |
385 | }; |
386 | |
387 | static const ctl_named_node_t stats_arenas_i_small_node[] = { |
388 | {NAME("allocated" ), CTL(stats_arenas_i_small_allocated)}, |
389 | {NAME("nmalloc" ), CTL(stats_arenas_i_small_nmalloc)}, |
390 | {NAME("ndalloc" ), CTL(stats_arenas_i_small_ndalloc)}, |
391 | {NAME("nrequests" ), CTL(stats_arenas_i_small_nrequests)} |
392 | }; |
393 | |
394 | static const ctl_named_node_t stats_arenas_i_large_node[] = { |
395 | {NAME("allocated" ), CTL(stats_arenas_i_large_allocated)}, |
396 | {NAME("nmalloc" ), CTL(stats_arenas_i_large_nmalloc)}, |
397 | {NAME("ndalloc" ), CTL(stats_arenas_i_large_ndalloc)}, |
398 | {NAME("nrequests" ), CTL(stats_arenas_i_large_nrequests)} |
399 | }; |
400 | |
401 | static const ctl_named_node_t stats_arenas_i_huge_node[] = { |
402 | {NAME("allocated" ), CTL(stats_arenas_i_huge_allocated)}, |
403 | {NAME("nmalloc" ), CTL(stats_arenas_i_huge_nmalloc)}, |
404 | {NAME("ndalloc" ), CTL(stats_arenas_i_huge_ndalloc)}, |
405 | {NAME("nrequests" ), CTL(stats_arenas_i_huge_nrequests)} |
406 | }; |
407 | |
408 | static const ctl_named_node_t stats_arenas_i_bins_j_node[] = { |
409 | {NAME("nmalloc" ), CTL(stats_arenas_i_bins_j_nmalloc)}, |
410 | {NAME("ndalloc" ), CTL(stats_arenas_i_bins_j_ndalloc)}, |
411 | {NAME("nrequests" ), CTL(stats_arenas_i_bins_j_nrequests)}, |
412 | {NAME("curregs" ), CTL(stats_arenas_i_bins_j_curregs)}, |
413 | {NAME("nfills" ), CTL(stats_arenas_i_bins_j_nfills)}, |
414 | {NAME("nflushes" ), CTL(stats_arenas_i_bins_j_nflushes)}, |
415 | {NAME("nruns" ), CTL(stats_arenas_i_bins_j_nruns)}, |
416 | {NAME("nreruns" ), CTL(stats_arenas_i_bins_j_nreruns)}, |
417 | {NAME("curruns" ), CTL(stats_arenas_i_bins_j_curruns)} |
418 | }; |
419 | static const ctl_named_node_t super_stats_arenas_i_bins_j_node[] = { |
420 | {NAME("" ), CHILD(named, stats_arenas_i_bins_j)} |
421 | }; |
422 | |
423 | static const ctl_indexed_node_t stats_arenas_i_bins_node[] = { |
424 | {INDEX(stats_arenas_i_bins_j)} |
425 | }; |
426 | |
427 | static const ctl_named_node_t stats_arenas_i_lruns_j_node[] = { |
428 | {NAME("nmalloc" ), CTL(stats_arenas_i_lruns_j_nmalloc)}, |
429 | {NAME("ndalloc" ), CTL(stats_arenas_i_lruns_j_ndalloc)}, |
430 | {NAME("nrequests" ), CTL(stats_arenas_i_lruns_j_nrequests)}, |
431 | {NAME("curruns" ), CTL(stats_arenas_i_lruns_j_curruns)} |
432 | }; |
433 | static const ctl_named_node_t super_stats_arenas_i_lruns_j_node[] = { |
434 | {NAME("" ), CHILD(named, stats_arenas_i_lruns_j)} |
435 | }; |
436 | |
437 | static const ctl_indexed_node_t stats_arenas_i_lruns_node[] = { |
438 | {INDEX(stats_arenas_i_lruns_j)} |
439 | }; |
440 | |
441 | static const ctl_named_node_t stats_arenas_i_hchunks_j_node[] = { |
442 | {NAME("nmalloc" ), CTL(stats_arenas_i_hchunks_j_nmalloc)}, |
443 | {NAME("ndalloc" ), CTL(stats_arenas_i_hchunks_j_ndalloc)}, |
444 | {NAME("nrequests" ), CTL(stats_arenas_i_hchunks_j_nrequests)}, |
445 | {NAME("curhchunks" ), CTL(stats_arenas_i_hchunks_j_curhchunks)} |
446 | }; |
447 | static const ctl_named_node_t super_stats_arenas_i_hchunks_j_node[] = { |
448 | {NAME("" ), CHILD(named, stats_arenas_i_hchunks_j)} |
449 | }; |
450 | |
451 | static const ctl_indexed_node_t stats_arenas_i_hchunks_node[] = { |
452 | {INDEX(stats_arenas_i_hchunks_j)} |
453 | }; |
454 | |
455 | static const ctl_named_node_t stats_arenas_i_node[] = { |
456 | {NAME("nthreads" ), CTL(stats_arenas_i_nthreads)}, |
457 | {NAME("dss" ), CTL(stats_arenas_i_dss)}, |
458 | {NAME("lg_dirty_mult" ), CTL(stats_arenas_i_lg_dirty_mult)}, |
459 | {NAME("decay_time" ), CTL(stats_arenas_i_decay_time)}, |
460 | {NAME("pactive" ), CTL(stats_arenas_i_pactive)}, |
461 | {NAME("pdirty" ), CTL(stats_arenas_i_pdirty)}, |
462 | {NAME("mapped" ), CTL(stats_arenas_i_mapped)}, |
463 | {NAME("retained" ), CTL(stats_arenas_i_retained)}, |
464 | {NAME("npurge" ), CTL(stats_arenas_i_npurge)}, |
465 | {NAME("nmadvise" ), CTL(stats_arenas_i_nmadvise)}, |
466 | {NAME("purged" ), CTL(stats_arenas_i_purged)}, |
467 | {NAME("metadata" ), CHILD(named, stats_arenas_i_metadata)}, |
468 | {NAME("small" ), CHILD(named, stats_arenas_i_small)}, |
469 | {NAME("large" ), CHILD(named, stats_arenas_i_large)}, |
470 | {NAME("huge" ), CHILD(named, stats_arenas_i_huge)}, |
471 | {NAME("bins" ), CHILD(indexed, stats_arenas_i_bins)}, |
472 | {NAME("lruns" ), CHILD(indexed, stats_arenas_i_lruns)}, |
473 | {NAME("hchunks" ), CHILD(indexed, stats_arenas_i_hchunks)} |
474 | }; |
475 | static const ctl_named_node_t super_stats_arenas_i_node[] = { |
476 | {NAME("" ), CHILD(named, stats_arenas_i)} |
477 | }; |
478 | |
479 | static const ctl_indexed_node_t stats_arenas_node[] = { |
480 | {INDEX(stats_arenas_i)} |
481 | }; |
482 | |
483 | static const ctl_named_node_t stats_node[] = { |
484 | {NAME("cactive" ), CTL(stats_cactive)}, |
485 | {NAME("allocated" ), CTL(stats_allocated)}, |
486 | {NAME("active" ), CTL(stats_active)}, |
487 | {NAME("metadata" ), CTL(stats_metadata)}, |
488 | {NAME("resident" ), CTL(stats_resident)}, |
489 | {NAME("mapped" ), CTL(stats_mapped)}, |
490 | {NAME("retained" ), CTL(stats_retained)}, |
491 | {NAME("arenas" ), CHILD(indexed, stats_arenas)} |
492 | }; |
493 | |
494 | static const ctl_named_node_t root_node[] = { |
495 | {NAME("version" ), CTL(version)}, |
496 | {NAME("epoch" ), CTL(epoch)}, |
497 | {NAME("thread" ), CHILD(named, thread)}, |
498 | {NAME("config" ), CHILD(named, config)}, |
499 | {NAME("opt" ), CHILD(named, opt)}, |
500 | {NAME("tcache" ), CHILD(named, tcache)}, |
501 | {NAME("arena" ), CHILD(indexed, arena)}, |
502 | {NAME("arenas" ), CHILD(named, arenas)}, |
503 | {NAME("prof" ), CHILD(named, prof)}, |
504 | {NAME("stats" ), CHILD(named, stats)} |
505 | }; |
506 | static const ctl_named_node_t super_root_node[] = { |
507 | {NAME("" ), CHILD(named, root)} |
508 | }; |
509 | |
510 | #undef NAME |
511 | #undef CHILD |
512 | #undef CTL |
513 | #undef INDEX |
514 | |
515 | /******************************************************************************/ |
516 | |
517 | static bool |
518 | ctl_arena_init(ctl_arena_stats_t *astats) |
519 | { |
520 | |
521 | if (astats->lstats == NULL) { |
522 | astats->lstats = (malloc_large_stats_t *)a0malloc(nlclasses * |
523 | sizeof(malloc_large_stats_t)); |
524 | if (astats->lstats == NULL) |
525 | return (true); |
526 | } |
527 | |
528 | if (astats->hstats == NULL) { |
529 | astats->hstats = (malloc_huge_stats_t *)a0malloc(nhclasses * |
530 | sizeof(malloc_huge_stats_t)); |
531 | if (astats->hstats == NULL) |
532 | return (true); |
533 | } |
534 | |
535 | return (false); |
536 | } |
537 | |
538 | static void |
539 | ctl_arena_clear(ctl_arena_stats_t *astats) |
540 | { |
541 | |
542 | astats->nthreads = 0; |
543 | astats->dss = dss_prec_names[dss_prec_limit]; |
544 | astats->lg_dirty_mult = -1; |
545 | astats->decay_time = -1; |
546 | astats->pactive = 0; |
547 | astats->pdirty = 0; |
548 | if (config_stats) { |
549 | memset(&astats->astats, 0, sizeof(arena_stats_t)); |
550 | astats->allocated_small = 0; |
551 | astats->nmalloc_small = 0; |
552 | astats->ndalloc_small = 0; |
553 | astats->nrequests_small = 0; |
554 | memset(astats->bstats, 0, NBINS * sizeof(malloc_bin_stats_t)); |
555 | memset(astats->lstats, 0, nlclasses * |
556 | sizeof(malloc_large_stats_t)); |
557 | memset(astats->hstats, 0, nhclasses * |
558 | sizeof(malloc_huge_stats_t)); |
559 | } |
560 | } |
561 | |
562 | static void |
563 | ctl_arena_stats_amerge(tsdn_t *tsdn, ctl_arena_stats_t *cstats, arena_t *arena) |
564 | { |
565 | unsigned i; |
566 | |
567 | if (config_stats) { |
568 | arena_stats_merge(tsdn, arena, &cstats->nthreads, &cstats->dss, |
569 | &cstats->lg_dirty_mult, &cstats->decay_time, |
570 | &cstats->pactive, &cstats->pdirty, &cstats->astats, |
571 | cstats->bstats, cstats->lstats, cstats->hstats); |
572 | |
573 | for (i = 0; i < NBINS; i++) { |
574 | cstats->allocated_small += cstats->bstats[i].curregs * |
575 | index2size(i); |
576 | cstats->nmalloc_small += cstats->bstats[i].nmalloc; |
577 | cstats->ndalloc_small += cstats->bstats[i].ndalloc; |
578 | cstats->nrequests_small += cstats->bstats[i].nrequests; |
579 | } |
580 | } else { |
581 | arena_basic_stats_merge(tsdn, arena, &cstats->nthreads, |
582 | &cstats->dss, &cstats->lg_dirty_mult, &cstats->decay_time, |
583 | &cstats->pactive, &cstats->pdirty); |
584 | } |
585 | } |
586 | |
587 | static void |
588 | ctl_arena_stats_smerge(ctl_arena_stats_t *sstats, ctl_arena_stats_t *astats) |
589 | { |
590 | unsigned i; |
591 | |
592 | sstats->nthreads += astats->nthreads; |
593 | sstats->pactive += astats->pactive; |
594 | sstats->pdirty += astats->pdirty; |
595 | |
596 | if (config_stats) { |
597 | sstats->astats.mapped += astats->astats.mapped; |
598 | sstats->astats.retained += astats->astats.retained; |
599 | sstats->astats.npurge += astats->astats.npurge; |
600 | sstats->astats.nmadvise += astats->astats.nmadvise; |
601 | sstats->astats.purged += astats->astats.purged; |
602 | |
603 | sstats->astats.metadata_mapped += |
604 | astats->astats.metadata_mapped; |
605 | sstats->astats.metadata_allocated += |
606 | astats->astats.metadata_allocated; |
607 | |
608 | sstats->allocated_small += astats->allocated_small; |
609 | sstats->nmalloc_small += astats->nmalloc_small; |
610 | sstats->ndalloc_small += astats->ndalloc_small; |
611 | sstats->nrequests_small += astats->nrequests_small; |
612 | |
613 | sstats->astats.allocated_large += |
614 | astats->astats.allocated_large; |
615 | sstats->astats.nmalloc_large += astats->astats.nmalloc_large; |
616 | sstats->astats.ndalloc_large += astats->astats.ndalloc_large; |
617 | sstats->astats.nrequests_large += |
618 | astats->astats.nrequests_large; |
619 | |
620 | sstats->astats.allocated_huge += astats->astats.allocated_huge; |
621 | sstats->astats.nmalloc_huge += astats->astats.nmalloc_huge; |
622 | sstats->astats.ndalloc_huge += astats->astats.ndalloc_huge; |
623 | |
624 | for (i = 0; i < NBINS; i++) { |
625 | sstats->bstats[i].nmalloc += astats->bstats[i].nmalloc; |
626 | sstats->bstats[i].ndalloc += astats->bstats[i].ndalloc; |
627 | sstats->bstats[i].nrequests += |
628 | astats->bstats[i].nrequests; |
629 | sstats->bstats[i].curregs += astats->bstats[i].curregs; |
630 | if (config_tcache) { |
631 | sstats->bstats[i].nfills += |
632 | astats->bstats[i].nfills; |
633 | sstats->bstats[i].nflushes += |
634 | astats->bstats[i].nflushes; |
635 | } |
636 | sstats->bstats[i].nruns += astats->bstats[i].nruns; |
637 | sstats->bstats[i].reruns += astats->bstats[i].reruns; |
638 | sstats->bstats[i].curruns += astats->bstats[i].curruns; |
639 | } |
640 | |
641 | for (i = 0; i < nlclasses; i++) { |
642 | sstats->lstats[i].nmalloc += astats->lstats[i].nmalloc; |
643 | sstats->lstats[i].ndalloc += astats->lstats[i].ndalloc; |
644 | sstats->lstats[i].nrequests += |
645 | astats->lstats[i].nrequests; |
646 | sstats->lstats[i].curruns += astats->lstats[i].curruns; |
647 | } |
648 | |
649 | for (i = 0; i < nhclasses; i++) { |
650 | sstats->hstats[i].nmalloc += astats->hstats[i].nmalloc; |
651 | sstats->hstats[i].ndalloc += astats->hstats[i].ndalloc; |
652 | sstats->hstats[i].curhchunks += |
653 | astats->hstats[i].curhchunks; |
654 | } |
655 | } |
656 | } |
657 | |
658 | static void |
659 | ctl_arena_refresh(tsdn_t *tsdn, arena_t *arena, unsigned i) |
660 | { |
661 | ctl_arena_stats_t *astats = &ctl_stats.arenas[i]; |
662 | ctl_arena_stats_t *sstats = &ctl_stats.arenas[ctl_stats.narenas]; |
663 | |
664 | ctl_arena_clear(astats); |
665 | ctl_arena_stats_amerge(tsdn, astats, arena); |
666 | /* Merge into sum stats as well. */ |
667 | ctl_arena_stats_smerge(sstats, astats); |
668 | } |
669 | |
670 | static bool |
671 | ctl_grow(tsdn_t *tsdn) |
672 | { |
673 | ctl_arena_stats_t *astats; |
674 | |
675 | /* Initialize new arena. */ |
676 | if (arena_init(tsdn, ctl_stats.narenas) == NULL) |
677 | return (true); |
678 | |
679 | /* Allocate extended arena stats. */ |
680 | astats = (ctl_arena_stats_t *)a0malloc((ctl_stats.narenas + 2) * |
681 | sizeof(ctl_arena_stats_t)); |
682 | if (astats == NULL) |
683 | return (true); |
684 | |
685 | /* Initialize the new astats element. */ |
686 | memcpy(astats, ctl_stats.arenas, (ctl_stats.narenas + 1) * |
687 | sizeof(ctl_arena_stats_t)); |
688 | memset(&astats[ctl_stats.narenas + 1], 0, sizeof(ctl_arena_stats_t)); |
689 | if (ctl_arena_init(&astats[ctl_stats.narenas + 1])) { |
690 | a0dalloc(astats); |
691 | return (true); |
692 | } |
693 | /* Swap merged stats to their new location. */ |
694 | { |
695 | ctl_arena_stats_t tstats; |
696 | memcpy(&tstats, &astats[ctl_stats.narenas], |
697 | sizeof(ctl_arena_stats_t)); |
698 | memcpy(&astats[ctl_stats.narenas], |
699 | &astats[ctl_stats.narenas + 1], sizeof(ctl_arena_stats_t)); |
700 | memcpy(&astats[ctl_stats.narenas + 1], &tstats, |
701 | sizeof(ctl_arena_stats_t)); |
702 | } |
703 | a0dalloc(ctl_stats.arenas); |
704 | ctl_stats.arenas = astats; |
705 | ctl_stats.narenas++; |
706 | |
707 | return (false); |
708 | } |
709 | |
710 | static void |
711 | ctl_refresh(tsdn_t *tsdn) |
712 | { |
713 | unsigned i; |
714 | VARIABLE_ARRAY(arena_t *, tarenas, ctl_stats.narenas); |
715 | |
716 | /* |
717 | * Clear sum stats, since they will be merged into by |
718 | * ctl_arena_refresh(). |
719 | */ |
720 | ctl_arena_clear(&ctl_stats.arenas[ctl_stats.narenas]); |
721 | |
722 | for (i = 0; i < ctl_stats.narenas; i++) |
723 | tarenas[i] = arena_get(tsdn, i, false); |
724 | |
725 | for (i = 0; i < ctl_stats.narenas; i++) { |
726 | bool initialized = (tarenas[i] != NULL); |
727 | |
728 | ctl_stats.arenas[i].initialized = initialized; |
729 | if (initialized) |
730 | ctl_arena_refresh(tsdn, tarenas[i], i); |
731 | } |
732 | |
733 | if (config_stats) { |
734 | size_t base_allocated, base_resident, base_mapped; |
735 | base_stats_get(tsdn, &base_allocated, &base_resident, |
736 | &base_mapped); |
737 | ctl_stats.allocated = |
738 | ctl_stats.arenas[ctl_stats.narenas].allocated_small + |
739 | ctl_stats.arenas[ctl_stats.narenas].astats.allocated_large + |
740 | ctl_stats.arenas[ctl_stats.narenas].astats.allocated_huge; |
741 | ctl_stats.active = |
742 | (ctl_stats.arenas[ctl_stats.narenas].pactive << LG_PAGE); |
743 | ctl_stats.metadata = base_allocated + |
744 | ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped + |
745 | ctl_stats.arenas[ctl_stats.narenas].astats |
746 | .metadata_allocated; |
747 | ctl_stats.resident = base_resident + |
748 | ctl_stats.arenas[ctl_stats.narenas].astats.metadata_mapped + |
749 | ((ctl_stats.arenas[ctl_stats.narenas].pactive + |
750 | ctl_stats.arenas[ctl_stats.narenas].pdirty) << LG_PAGE); |
751 | ctl_stats.mapped = base_mapped + |
752 | ctl_stats.arenas[ctl_stats.narenas].astats.mapped; |
753 | ctl_stats.retained = |
754 | ctl_stats.arenas[ctl_stats.narenas].astats.retained; |
755 | } |
756 | |
757 | ctl_epoch++; |
758 | } |
759 | |
760 | static bool |
761 | ctl_init(tsdn_t *tsdn) |
762 | { |
763 | bool ret; |
764 | |
765 | malloc_mutex_lock(tsdn, &ctl_mtx); |
766 | if (!ctl_initialized) { |
767 | /* |
768 | * Allocate space for one extra arena stats element, which |
769 | * contains summed stats across all arenas. |
770 | */ |
771 | ctl_stats.narenas = narenas_total_get(); |
772 | ctl_stats.arenas = (ctl_arena_stats_t *)a0malloc( |
773 | (ctl_stats.narenas + 1) * sizeof(ctl_arena_stats_t)); |
774 | if (ctl_stats.arenas == NULL) { |
775 | ret = true; |
776 | goto label_return; |
777 | } |
778 | memset(ctl_stats.arenas, 0, (ctl_stats.narenas + 1) * |
779 | sizeof(ctl_arena_stats_t)); |
780 | |
781 | /* |
782 | * Initialize all stats structures, regardless of whether they |
783 | * ever get used. Lazy initialization would allow errors to |
784 | * cause inconsistent state to be viewable by the application. |
785 | */ |
786 | if (config_stats) { |
787 | unsigned i; |
788 | for (i = 0; i <= ctl_stats.narenas; i++) { |
789 | if (ctl_arena_init(&ctl_stats.arenas[i])) { |
790 | unsigned j; |
791 | for (j = 0; j < i; j++) { |
792 | a0dalloc( |
793 | ctl_stats.arenas[j].lstats); |
794 | a0dalloc( |
795 | ctl_stats.arenas[j].hstats); |
796 | } |
797 | a0dalloc(ctl_stats.arenas); |
798 | ctl_stats.arenas = NULL; |
799 | ret = true; |
800 | goto label_return; |
801 | } |
802 | } |
803 | } |
804 | ctl_stats.arenas[ctl_stats.narenas].initialized = true; |
805 | |
806 | ctl_epoch = 0; |
807 | ctl_refresh(tsdn); |
808 | ctl_initialized = true; |
809 | } |
810 | |
811 | ret = false; |
812 | label_return: |
813 | malloc_mutex_unlock(tsdn, &ctl_mtx); |
814 | return (ret); |
815 | } |
816 | |
817 | static int |
818 | ctl_lookup(tsdn_t *tsdn, const char *name, ctl_node_t const **nodesp, |
819 | size_t *mibp, size_t *depthp) |
820 | { |
821 | int ret; |
822 | const char *elm, *tdot, *dot; |
823 | size_t elen, i, j; |
824 | const ctl_named_node_t *node; |
825 | |
826 | elm = name; |
827 | /* Equivalent to strchrnul(). */ |
828 | dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : strchr(elm, '\0'); |
829 | elen = (size_t)((uintptr_t)dot - (uintptr_t)elm); |
830 | if (elen == 0) { |
831 | ret = ENOENT; |
832 | goto label_return; |
833 | } |
834 | node = super_root_node; |
835 | for (i = 0; i < *depthp; i++) { |
836 | assert(node); |
837 | assert(node->nchildren > 0); |
838 | if (ctl_named_node(node->children) != NULL) { |
839 | const ctl_named_node_t *pnode = node; |
840 | |
841 | /* Children are named. */ |
842 | for (j = 0; j < node->nchildren; j++) { |
843 | const ctl_named_node_t *child = |
844 | ctl_named_children(node, j); |
845 | if (strlen(child->name) == elen && |
846 | strncmp(elm, child->name, elen) == 0) { |
847 | node = child; |
848 | if (nodesp != NULL) |
849 | nodesp[i] = |
850 | (const ctl_node_t *)node; |
851 | mibp[i] = j; |
852 | break; |
853 | } |
854 | } |
855 | if (node == pnode) { |
856 | ret = ENOENT; |
857 | goto label_return; |
858 | } |
859 | } else { |
860 | uintmax_t index; |
861 | const ctl_indexed_node_t *inode; |
862 | |
863 | /* Children are indexed. */ |
864 | index = malloc_strtoumax(elm, NULL, 10); |
865 | if (index == UINTMAX_MAX || index > SIZE_T_MAX) { |
866 | ret = ENOENT; |
867 | goto label_return; |
868 | } |
869 | |
870 | inode = ctl_indexed_node(node->children); |
871 | node = inode->index(tsdn, mibp, *depthp, (size_t)index); |
872 | if (node == NULL) { |
873 | ret = ENOENT; |
874 | goto label_return; |
875 | } |
876 | |
877 | if (nodesp != NULL) |
878 | nodesp[i] = (const ctl_node_t *)node; |
879 | mibp[i] = (size_t)index; |
880 | } |
881 | |
882 | if (node->ctl != NULL) { |
883 | /* Terminal node. */ |
884 | if (*dot != '\0') { |
885 | /* |
886 | * The name contains more elements than are |
887 | * in this path through the tree. |
888 | */ |
889 | ret = ENOENT; |
890 | goto label_return; |
891 | } |
892 | /* Complete lookup successful. */ |
893 | *depthp = i + 1; |
894 | break; |
895 | } |
896 | |
897 | /* Update elm. */ |
898 | if (*dot == '\0') { |
899 | /* No more elements. */ |
900 | ret = ENOENT; |
901 | goto label_return; |
902 | } |
903 | elm = &dot[1]; |
904 | dot = ((tdot = strchr(elm, '.')) != NULL) ? tdot : |
905 | strchr(elm, '\0'); |
906 | elen = (size_t)((uintptr_t)dot - (uintptr_t)elm); |
907 | } |
908 | |
909 | ret = 0; |
910 | label_return: |
911 | return (ret); |
912 | } |
913 | |
914 | int |
915 | ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp, |
916 | void *newp, size_t newlen) |
917 | { |
918 | int ret; |
919 | size_t depth; |
920 | ctl_node_t const *nodes[CTL_MAX_DEPTH]; |
921 | size_t mib[CTL_MAX_DEPTH]; |
922 | const ctl_named_node_t *node; |
923 | |
924 | if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) { |
925 | ret = EAGAIN; |
926 | goto label_return; |
927 | } |
928 | |
929 | depth = CTL_MAX_DEPTH; |
930 | ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth); |
931 | if (ret != 0) |
932 | goto label_return; |
933 | |
934 | node = ctl_named_node(nodes[depth-1]); |
935 | if (node != NULL && node->ctl) |
936 | ret = node->ctl(tsd, mib, depth, oldp, oldlenp, newp, newlen); |
937 | else { |
938 | /* The name refers to a partial path through the ctl tree. */ |
939 | ret = ENOENT; |
940 | } |
941 | |
942 | label_return: |
943 | return(ret); |
944 | } |
945 | |
946 | int |
947 | ctl_nametomib(tsdn_t *tsdn, const char *name, size_t *mibp, size_t *miblenp) |
948 | { |
949 | int ret; |
950 | |
951 | if (!ctl_initialized && ctl_init(tsdn)) { |
952 | ret = EAGAIN; |
953 | goto label_return; |
954 | } |
955 | |
956 | ret = ctl_lookup(tsdn, name, NULL, mibp, miblenp); |
957 | label_return: |
958 | return(ret); |
959 | } |
960 | |
961 | int |
962 | ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
963 | size_t *oldlenp, void *newp, size_t newlen) |
964 | { |
965 | int ret; |
966 | const ctl_named_node_t *node; |
967 | size_t i; |
968 | |
969 | if (!ctl_initialized && ctl_init(tsd_tsdn(tsd))) { |
970 | ret = EAGAIN; |
971 | goto label_return; |
972 | } |
973 | |
974 | /* Iterate down the tree. */ |
975 | node = super_root_node; |
976 | for (i = 0; i < miblen; i++) { |
977 | assert(node); |
978 | assert(node->nchildren > 0); |
979 | if (ctl_named_node(node->children) != NULL) { |
980 | /* Children are named. */ |
981 | if (node->nchildren <= (unsigned)mib[i]) { |
982 | ret = ENOENT; |
983 | goto label_return; |
984 | } |
985 | node = ctl_named_children(node, mib[i]); |
986 | } else { |
987 | const ctl_indexed_node_t *inode; |
988 | |
989 | /* Indexed element. */ |
990 | inode = ctl_indexed_node(node->children); |
991 | node = inode->index(tsd_tsdn(tsd), mib, miblen, mib[i]); |
992 | if (node == NULL) { |
993 | ret = ENOENT; |
994 | goto label_return; |
995 | } |
996 | } |
997 | } |
998 | |
999 | /* Call the ctl function. */ |
1000 | if (node && node->ctl) |
1001 | ret = node->ctl(tsd, mib, miblen, oldp, oldlenp, newp, newlen); |
1002 | else { |
1003 | /* Partial MIB. */ |
1004 | ret = ENOENT; |
1005 | } |
1006 | |
1007 | label_return: |
1008 | return(ret); |
1009 | } |
1010 | |
1011 | bool |
1012 | ctl_boot(void) |
1013 | { |
1014 | |
1015 | if (malloc_mutex_init(&ctl_mtx, "ctl" , WITNESS_RANK_CTL)) |
1016 | return (true); |
1017 | |
1018 | ctl_initialized = false; |
1019 | |
1020 | return (false); |
1021 | } |
1022 | |
1023 | void |
1024 | ctl_prefork(tsdn_t *tsdn) |
1025 | { |
1026 | |
1027 | malloc_mutex_prefork(tsdn, &ctl_mtx); |
1028 | } |
1029 | |
1030 | void |
1031 | ctl_postfork_parent(tsdn_t *tsdn) |
1032 | { |
1033 | |
1034 | malloc_mutex_postfork_parent(tsdn, &ctl_mtx); |
1035 | } |
1036 | |
1037 | void |
1038 | ctl_postfork_child(tsdn_t *tsdn) |
1039 | { |
1040 | |
1041 | malloc_mutex_postfork_child(tsdn, &ctl_mtx); |
1042 | } |
1043 | |
1044 | /******************************************************************************/ |
1045 | /* *_ctl() functions. */ |
1046 | |
1047 | #define READONLY() do { \ |
1048 | if (newp != NULL || newlen != 0) { \ |
1049 | ret = EPERM; \ |
1050 | goto label_return; \ |
1051 | } \ |
1052 | } while (0) |
1053 | |
1054 | #define WRITEONLY() do { \ |
1055 | if (oldp != NULL || oldlenp != NULL) { \ |
1056 | ret = EPERM; \ |
1057 | goto label_return; \ |
1058 | } \ |
1059 | } while (0) |
1060 | |
1061 | #define READ_XOR_WRITE() do { \ |
1062 | if ((oldp != NULL && oldlenp != NULL) && (newp != NULL || \ |
1063 | newlen != 0)) { \ |
1064 | ret = EPERM; \ |
1065 | goto label_return; \ |
1066 | } \ |
1067 | } while (0) |
1068 | |
1069 | #define READ(v, t) do { \ |
1070 | if (oldp != NULL && oldlenp != NULL) { \ |
1071 | if (*oldlenp != sizeof(t)) { \ |
1072 | size_t copylen = (sizeof(t) <= *oldlenp) \ |
1073 | ? sizeof(t) : *oldlenp; \ |
1074 | memcpy(oldp, (void *)&(v), copylen); \ |
1075 | ret = EINVAL; \ |
1076 | goto label_return; \ |
1077 | } \ |
1078 | *(t *)oldp = (v); \ |
1079 | } \ |
1080 | } while (0) |
1081 | |
1082 | #define WRITE(v, t) do { \ |
1083 | if (newp != NULL) { \ |
1084 | if (newlen != sizeof(t)) { \ |
1085 | ret = EINVAL; \ |
1086 | goto label_return; \ |
1087 | } \ |
1088 | (v) = *(t *)newp; \ |
1089 | } \ |
1090 | } while (0) |
1091 | |
1092 | /* |
1093 | * There's a lot of code duplication in the following macros due to limitations |
1094 | * in how nested cpp macros are expanded. |
1095 | */ |
1096 | #define CTL_RO_CLGEN(c, l, n, v, t) \ |
1097 | static int \ |
1098 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1099 | size_t *oldlenp, void *newp, size_t newlen) \ |
1100 | { \ |
1101 | int ret; \ |
1102 | t oldval; \ |
1103 | \ |
1104 | if (!(c)) \ |
1105 | return (ENOENT); \ |
1106 | if (l) \ |
1107 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \ |
1108 | READONLY(); \ |
1109 | oldval = (v); \ |
1110 | READ(oldval, t); \ |
1111 | \ |
1112 | ret = 0; \ |
1113 | label_return: \ |
1114 | if (l) \ |
1115 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \ |
1116 | return (ret); \ |
1117 | } |
1118 | |
1119 | #define CTL_RO_CGEN(c, n, v, t) \ |
1120 | static int \ |
1121 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1122 | size_t *oldlenp, void *newp, size_t newlen) \ |
1123 | { \ |
1124 | int ret; \ |
1125 | t oldval; \ |
1126 | \ |
1127 | if (!(c)) \ |
1128 | return (ENOENT); \ |
1129 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \ |
1130 | READONLY(); \ |
1131 | oldval = (v); \ |
1132 | READ(oldval, t); \ |
1133 | \ |
1134 | ret = 0; \ |
1135 | label_return: \ |
1136 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \ |
1137 | return (ret); \ |
1138 | } |
1139 | |
1140 | #define CTL_RO_GEN(n, v, t) \ |
1141 | static int \ |
1142 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1143 | size_t *oldlenp, void *newp, size_t newlen) \ |
1144 | { \ |
1145 | int ret; \ |
1146 | t oldval; \ |
1147 | \ |
1148 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \ |
1149 | READONLY(); \ |
1150 | oldval = (v); \ |
1151 | READ(oldval, t); \ |
1152 | \ |
1153 | ret = 0; \ |
1154 | label_return: \ |
1155 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \ |
1156 | return (ret); \ |
1157 | } |
1158 | |
1159 | /* |
1160 | * ctl_mtx is not acquired, under the assumption that no pertinent data will |
1161 | * mutate during the call. |
1162 | */ |
1163 | #define CTL_RO_NL_CGEN(c, n, v, t) \ |
1164 | static int \ |
1165 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1166 | size_t *oldlenp, void *newp, size_t newlen) \ |
1167 | { \ |
1168 | int ret; \ |
1169 | t oldval; \ |
1170 | \ |
1171 | if (!(c)) \ |
1172 | return (ENOENT); \ |
1173 | READONLY(); \ |
1174 | oldval = (v); \ |
1175 | READ(oldval, t); \ |
1176 | \ |
1177 | ret = 0; \ |
1178 | label_return: \ |
1179 | return (ret); \ |
1180 | } |
1181 | |
1182 | #define CTL_RO_NL_GEN(n, v, t) \ |
1183 | static int \ |
1184 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1185 | size_t *oldlenp, void *newp, size_t newlen) \ |
1186 | { \ |
1187 | int ret; \ |
1188 | t oldval; \ |
1189 | \ |
1190 | READONLY(); \ |
1191 | oldval = (v); \ |
1192 | READ(oldval, t); \ |
1193 | \ |
1194 | ret = 0; \ |
1195 | label_return: \ |
1196 | return (ret); \ |
1197 | } |
1198 | |
1199 | #define CTL_TSD_RO_NL_CGEN(c, n, m, t) \ |
1200 | static int \ |
1201 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1202 | size_t *oldlenp, void *newp, size_t newlen) \ |
1203 | { \ |
1204 | int ret; \ |
1205 | t oldval; \ |
1206 | \ |
1207 | if (!(c)) \ |
1208 | return (ENOENT); \ |
1209 | READONLY(); \ |
1210 | oldval = (m(tsd)); \ |
1211 | READ(oldval, t); \ |
1212 | \ |
1213 | ret = 0; \ |
1214 | label_return: \ |
1215 | return (ret); \ |
1216 | } |
1217 | |
1218 | #define CTL_RO_CONFIG_GEN(n, t) \ |
1219 | static int \ |
1220 | n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \ |
1221 | size_t *oldlenp, void *newp, size_t newlen) \ |
1222 | { \ |
1223 | int ret; \ |
1224 | t oldval; \ |
1225 | \ |
1226 | READONLY(); \ |
1227 | oldval = n; \ |
1228 | READ(oldval, t); \ |
1229 | \ |
1230 | ret = 0; \ |
1231 | label_return: \ |
1232 | return (ret); \ |
1233 | } |
1234 | |
1235 | /******************************************************************************/ |
1236 | |
1237 | CTL_RO_NL_GEN(version, JEMALLOC_VERSION, const char *) |
1238 | |
1239 | static int |
1240 | epoch_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1241 | size_t *oldlenp, void *newp, size_t newlen) |
1242 | { |
1243 | int ret; |
1244 | UNUSED uint64_t newval; |
1245 | |
1246 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1247 | WRITE(newval, uint64_t); |
1248 | if (newp != NULL) |
1249 | ctl_refresh(tsd_tsdn(tsd)); |
1250 | READ(ctl_epoch, uint64_t); |
1251 | |
1252 | ret = 0; |
1253 | label_return: |
1254 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1255 | return (ret); |
1256 | } |
1257 | |
1258 | /******************************************************************************/ |
1259 | |
1260 | CTL_RO_CONFIG_GEN(config_cache_oblivious, bool) |
1261 | CTL_RO_CONFIG_GEN(config_debug, bool) |
1262 | CTL_RO_CONFIG_GEN(config_fill, bool) |
1263 | CTL_RO_CONFIG_GEN(config_lazy_lock, bool) |
1264 | CTL_RO_CONFIG_GEN(config_malloc_conf, const char *) |
1265 | CTL_RO_CONFIG_GEN(config_munmap, bool) |
1266 | CTL_RO_CONFIG_GEN(config_prof, bool) |
1267 | CTL_RO_CONFIG_GEN(config_prof_libgcc, bool) |
1268 | CTL_RO_CONFIG_GEN(config_prof_libunwind, bool) |
1269 | CTL_RO_CONFIG_GEN(config_stats, bool) |
1270 | CTL_RO_CONFIG_GEN(config_tcache, bool) |
1271 | CTL_RO_CONFIG_GEN(config_tls, bool) |
1272 | CTL_RO_CONFIG_GEN(config_utrace, bool) |
1273 | CTL_RO_CONFIG_GEN(config_valgrind, bool) |
1274 | CTL_RO_CONFIG_GEN(config_xmalloc, bool) |
1275 | |
1276 | /******************************************************************************/ |
1277 | |
1278 | CTL_RO_NL_GEN(opt_abort, opt_abort, bool) |
1279 | CTL_RO_NL_GEN(opt_dss, opt_dss, const char *) |
1280 | CTL_RO_NL_GEN(opt_lg_chunk, opt_lg_chunk, size_t) |
1281 | CTL_RO_NL_GEN(opt_narenas, opt_narenas, unsigned) |
1282 | CTL_RO_NL_GEN(opt_purge, purge_mode_names[opt_purge], const char *) |
1283 | CTL_RO_NL_GEN(opt_lg_dirty_mult, opt_lg_dirty_mult, ssize_t) |
1284 | CTL_RO_NL_GEN(opt_decay_time, opt_decay_time, ssize_t) |
1285 | CTL_RO_NL_GEN(opt_stats_print, opt_stats_print, bool) |
1286 | CTL_RO_NL_CGEN(config_fill, opt_junk, opt_junk, const char *) |
1287 | CTL_RO_NL_CGEN(config_fill, opt_quarantine, opt_quarantine, size_t) |
1288 | CTL_RO_NL_CGEN(config_fill, opt_redzone, opt_redzone, bool) |
1289 | CTL_RO_NL_CGEN(config_fill, opt_zero, opt_zero, bool) |
1290 | CTL_RO_NL_CGEN(config_utrace, opt_utrace, opt_utrace, bool) |
1291 | CTL_RO_NL_CGEN(config_xmalloc, opt_xmalloc, opt_xmalloc, bool) |
1292 | CTL_RO_NL_CGEN(config_tcache, opt_tcache, opt_tcache, bool) |
1293 | CTL_RO_NL_CGEN(config_tcache, opt_lg_tcache_max, opt_lg_tcache_max, ssize_t) |
1294 | CTL_RO_NL_CGEN(config_prof, opt_prof, opt_prof, bool) |
1295 | CTL_RO_NL_CGEN(config_prof, opt_prof_prefix, opt_prof_prefix, const char *) |
1296 | CTL_RO_NL_CGEN(config_prof, opt_prof_active, opt_prof_active, bool) |
1297 | CTL_RO_NL_CGEN(config_prof, opt_prof_thread_active_init, |
1298 | opt_prof_thread_active_init, bool) |
1299 | CTL_RO_NL_CGEN(config_prof, opt_lg_prof_sample, opt_lg_prof_sample, size_t) |
1300 | CTL_RO_NL_CGEN(config_prof, opt_prof_accum, opt_prof_accum, bool) |
1301 | CTL_RO_NL_CGEN(config_prof, opt_lg_prof_interval, opt_lg_prof_interval, ssize_t) |
1302 | CTL_RO_NL_CGEN(config_prof, opt_prof_gdump, opt_prof_gdump, bool) |
1303 | CTL_RO_NL_CGEN(config_prof, opt_prof_final, opt_prof_final, bool) |
1304 | CTL_RO_NL_CGEN(config_prof, opt_prof_leak, opt_prof_leak, bool) |
1305 | |
1306 | /******************************************************************************/ |
1307 | |
1308 | static int |
1309 | thread_arena_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1310 | size_t *oldlenp, void *newp, size_t newlen) |
1311 | { |
1312 | int ret; |
1313 | arena_t *oldarena; |
1314 | unsigned newind, oldind; |
1315 | |
1316 | oldarena = arena_choose(tsd, NULL); |
1317 | if (oldarena == NULL) |
1318 | return (EAGAIN); |
1319 | |
1320 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1321 | newind = oldind = oldarena->ind; |
1322 | WRITE(newind, unsigned); |
1323 | READ(oldind, unsigned); |
1324 | if (newind != oldind) { |
1325 | arena_t *newarena; |
1326 | |
1327 | if (newind >= ctl_stats.narenas) { |
1328 | /* New arena index is out of range. */ |
1329 | ret = EFAULT; |
1330 | goto label_return; |
1331 | } |
1332 | |
1333 | /* Initialize arena if necessary. */ |
1334 | newarena = arena_get(tsd_tsdn(tsd), newind, true); |
1335 | if (newarena == NULL) { |
1336 | ret = EAGAIN; |
1337 | goto label_return; |
1338 | } |
1339 | /* Set new arena/tcache associations. */ |
1340 | arena_migrate(tsd, oldind, newind); |
1341 | if (config_tcache) { |
1342 | tcache_t *tcache = tsd_tcache_get(tsd); |
1343 | if (tcache != NULL) { |
1344 | tcache_arena_reassociate(tsd_tsdn(tsd), tcache, |
1345 | oldarena, newarena); |
1346 | } |
1347 | } |
1348 | } |
1349 | |
1350 | ret = 0; |
1351 | label_return: |
1352 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1353 | return (ret); |
1354 | } |
1355 | |
1356 | CTL_TSD_RO_NL_CGEN(config_stats, thread_allocated, tsd_thread_allocated_get, |
1357 | uint64_t) |
1358 | CTL_TSD_RO_NL_CGEN(config_stats, thread_allocatedp, tsd_thread_allocatedp_get, |
1359 | uint64_t *) |
1360 | CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocated, tsd_thread_deallocated_get, |
1361 | uint64_t) |
1362 | CTL_TSD_RO_NL_CGEN(config_stats, thread_deallocatedp, |
1363 | tsd_thread_deallocatedp_get, uint64_t *) |
1364 | |
1365 | static int |
1366 | thread_tcache_enabled_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1367 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1368 | { |
1369 | int ret; |
1370 | bool oldval; |
1371 | |
1372 | if (!config_tcache) |
1373 | return (ENOENT); |
1374 | |
1375 | oldval = tcache_enabled_get(); |
1376 | if (newp != NULL) { |
1377 | if (newlen != sizeof(bool)) { |
1378 | ret = EINVAL; |
1379 | goto label_return; |
1380 | } |
1381 | tcache_enabled_set(*(bool *)newp); |
1382 | } |
1383 | READ(oldval, bool); |
1384 | |
1385 | ret = 0; |
1386 | label_return: |
1387 | return (ret); |
1388 | } |
1389 | |
1390 | static int |
1391 | thread_tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1392 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1393 | { |
1394 | int ret; |
1395 | |
1396 | if (!config_tcache) |
1397 | return (ENOENT); |
1398 | |
1399 | READONLY(); |
1400 | WRITEONLY(); |
1401 | |
1402 | tcache_flush(); |
1403 | |
1404 | ret = 0; |
1405 | label_return: |
1406 | return (ret); |
1407 | } |
1408 | |
1409 | static int |
1410 | thread_prof_name_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1411 | size_t *oldlenp, void *newp, size_t newlen) |
1412 | { |
1413 | int ret; |
1414 | |
1415 | if (!config_prof) |
1416 | return (ENOENT); |
1417 | |
1418 | READ_XOR_WRITE(); |
1419 | |
1420 | if (newp != NULL) { |
1421 | if (newlen != sizeof(const char *)) { |
1422 | ret = EINVAL; |
1423 | goto label_return; |
1424 | } |
1425 | |
1426 | if ((ret = prof_thread_name_set(tsd, *(const char **)newp)) != |
1427 | 0) |
1428 | goto label_return; |
1429 | } else { |
1430 | const char *oldname = prof_thread_name_get(tsd); |
1431 | READ(oldname, const char *); |
1432 | } |
1433 | |
1434 | ret = 0; |
1435 | label_return: |
1436 | return (ret); |
1437 | } |
1438 | |
1439 | static int |
1440 | thread_prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1441 | size_t *oldlenp, void *newp, size_t newlen) |
1442 | { |
1443 | int ret; |
1444 | bool oldval; |
1445 | |
1446 | if (!config_prof) |
1447 | return (ENOENT); |
1448 | |
1449 | oldval = prof_thread_active_get(tsd); |
1450 | if (newp != NULL) { |
1451 | if (newlen != sizeof(bool)) { |
1452 | ret = EINVAL; |
1453 | goto label_return; |
1454 | } |
1455 | if (prof_thread_active_set(tsd, *(bool *)newp)) { |
1456 | ret = EAGAIN; |
1457 | goto label_return; |
1458 | } |
1459 | } |
1460 | READ(oldval, bool); |
1461 | |
1462 | ret = 0; |
1463 | label_return: |
1464 | return (ret); |
1465 | } |
1466 | |
1467 | /******************************************************************************/ |
1468 | |
1469 | static int |
1470 | tcache_create_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1471 | size_t *oldlenp, void *newp, size_t newlen) |
1472 | { |
1473 | int ret; |
1474 | unsigned tcache_ind; |
1475 | |
1476 | if (!config_tcache) |
1477 | return (ENOENT); |
1478 | |
1479 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1480 | READONLY(); |
1481 | if (tcaches_create(tsd_tsdn(tsd), &tcache_ind)) { |
1482 | ret = EFAULT; |
1483 | goto label_return; |
1484 | } |
1485 | READ(tcache_ind, unsigned); |
1486 | |
1487 | ret = 0; |
1488 | label_return: |
1489 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1490 | return (ret); |
1491 | } |
1492 | |
1493 | static int |
1494 | tcache_flush_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1495 | size_t *oldlenp, void *newp, size_t newlen) |
1496 | { |
1497 | int ret; |
1498 | unsigned tcache_ind; |
1499 | |
1500 | if (!config_tcache) |
1501 | return (ENOENT); |
1502 | |
1503 | WRITEONLY(); |
1504 | tcache_ind = UINT_MAX; |
1505 | WRITE(tcache_ind, unsigned); |
1506 | if (tcache_ind == UINT_MAX) { |
1507 | ret = EFAULT; |
1508 | goto label_return; |
1509 | } |
1510 | tcaches_flush(tsd, tcache_ind); |
1511 | |
1512 | ret = 0; |
1513 | label_return: |
1514 | return (ret); |
1515 | } |
1516 | |
1517 | static int |
1518 | tcache_destroy_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1519 | size_t *oldlenp, void *newp, size_t newlen) |
1520 | { |
1521 | int ret; |
1522 | unsigned tcache_ind; |
1523 | |
1524 | if (!config_tcache) |
1525 | return (ENOENT); |
1526 | |
1527 | WRITEONLY(); |
1528 | tcache_ind = UINT_MAX; |
1529 | WRITE(tcache_ind, unsigned); |
1530 | if (tcache_ind == UINT_MAX) { |
1531 | ret = EFAULT; |
1532 | goto label_return; |
1533 | } |
1534 | tcaches_destroy(tsd, tcache_ind); |
1535 | |
1536 | ret = 0; |
1537 | label_return: |
1538 | return (ret); |
1539 | } |
1540 | |
1541 | /******************************************************************************/ |
1542 | |
1543 | static void |
1544 | arena_i_purge(tsdn_t *tsdn, unsigned arena_ind, bool all) |
1545 | { |
1546 | |
1547 | malloc_mutex_lock(tsdn, &ctl_mtx); |
1548 | { |
1549 | unsigned narenas = ctl_stats.narenas; |
1550 | |
1551 | if (arena_ind == narenas) { |
1552 | unsigned i; |
1553 | VARIABLE_ARRAY(arena_t *, tarenas, narenas); |
1554 | |
1555 | for (i = 0; i < narenas; i++) |
1556 | tarenas[i] = arena_get(tsdn, i, false); |
1557 | |
1558 | /* |
1559 | * No further need to hold ctl_mtx, since narenas and |
1560 | * tarenas contain everything needed below. |
1561 | */ |
1562 | malloc_mutex_unlock(tsdn, &ctl_mtx); |
1563 | |
1564 | for (i = 0; i < narenas; i++) { |
1565 | if (tarenas[i] != NULL) |
1566 | arena_purge(tsdn, tarenas[i], all); |
1567 | } |
1568 | } else { |
1569 | arena_t *tarena; |
1570 | |
1571 | assert(arena_ind < narenas); |
1572 | |
1573 | tarena = arena_get(tsdn, arena_ind, false); |
1574 | |
1575 | /* No further need to hold ctl_mtx. */ |
1576 | malloc_mutex_unlock(tsdn, &ctl_mtx); |
1577 | |
1578 | if (tarena != NULL) |
1579 | arena_purge(tsdn, tarena, all); |
1580 | } |
1581 | } |
1582 | } |
1583 | |
1584 | static int |
1585 | arena_i_purge_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1586 | size_t *oldlenp, void *newp, size_t newlen) |
1587 | { |
1588 | int ret; |
1589 | |
1590 | READONLY(); |
1591 | WRITEONLY(); |
1592 | arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], true); |
1593 | |
1594 | ret = 0; |
1595 | label_return: |
1596 | return (ret); |
1597 | } |
1598 | |
1599 | static int |
1600 | arena_i_decay_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1601 | size_t *oldlenp, void *newp, size_t newlen) |
1602 | { |
1603 | int ret; |
1604 | |
1605 | READONLY(); |
1606 | WRITEONLY(); |
1607 | arena_i_purge(tsd_tsdn(tsd), (unsigned)mib[1], false); |
1608 | |
1609 | ret = 0; |
1610 | label_return: |
1611 | return (ret); |
1612 | } |
1613 | |
1614 | static int |
1615 | arena_i_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1616 | size_t *oldlenp, void *newp, size_t newlen) |
1617 | { |
1618 | int ret; |
1619 | unsigned arena_ind; |
1620 | arena_t *arena; |
1621 | |
1622 | READONLY(); |
1623 | WRITEONLY(); |
1624 | |
1625 | if ((config_valgrind && unlikely(in_valgrind)) || (config_fill && |
1626 | unlikely(opt_quarantine))) { |
1627 | ret = EFAULT; |
1628 | goto label_return; |
1629 | } |
1630 | |
1631 | arena_ind = (unsigned)mib[1]; |
1632 | if (config_debug) { |
1633 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1634 | assert(arena_ind < ctl_stats.narenas); |
1635 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1636 | } |
1637 | assert(arena_ind >= opt_narenas); |
1638 | |
1639 | arena = arena_get(tsd_tsdn(tsd), arena_ind, false); |
1640 | |
1641 | arena_reset(tsd, arena); |
1642 | |
1643 | ret = 0; |
1644 | label_return: |
1645 | return (ret); |
1646 | } |
1647 | |
1648 | static int |
1649 | arena_i_dss_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1650 | size_t *oldlenp, void *newp, size_t newlen) |
1651 | { |
1652 | int ret; |
1653 | const char *dss = NULL; |
1654 | unsigned arena_ind = (unsigned)mib[1]; |
1655 | dss_prec_t dss_prec_old = dss_prec_limit; |
1656 | dss_prec_t dss_prec = dss_prec_limit; |
1657 | |
1658 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1659 | WRITE(dss, const char *); |
1660 | if (dss != NULL) { |
1661 | int i; |
1662 | bool match = false; |
1663 | |
1664 | for (i = 0; i < dss_prec_limit; i++) { |
1665 | if (strcmp(dss_prec_names[i], dss) == 0) { |
1666 | dss_prec = i; |
1667 | match = true; |
1668 | break; |
1669 | } |
1670 | } |
1671 | |
1672 | if (!match) { |
1673 | ret = EINVAL; |
1674 | goto label_return; |
1675 | } |
1676 | } |
1677 | |
1678 | if (arena_ind < ctl_stats.narenas) { |
1679 | arena_t *arena = arena_get(tsd_tsdn(tsd), arena_ind, false); |
1680 | if (arena == NULL || (dss_prec != dss_prec_limit && |
1681 | arena_dss_prec_set(tsd_tsdn(tsd), arena, dss_prec))) { |
1682 | ret = EFAULT; |
1683 | goto label_return; |
1684 | } |
1685 | dss_prec_old = arena_dss_prec_get(tsd_tsdn(tsd), arena); |
1686 | } else { |
1687 | if (dss_prec != dss_prec_limit && |
1688 | chunk_dss_prec_set(tsd_tsdn(tsd), dss_prec)) { |
1689 | ret = EFAULT; |
1690 | goto label_return; |
1691 | } |
1692 | dss_prec_old = chunk_dss_prec_get(tsd_tsdn(tsd)); |
1693 | } |
1694 | |
1695 | dss = dss_prec_names[dss_prec_old]; |
1696 | READ(dss, const char *); |
1697 | |
1698 | ret = 0; |
1699 | label_return: |
1700 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1701 | return (ret); |
1702 | } |
1703 | |
1704 | static int |
1705 | arena_i_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1706 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1707 | { |
1708 | int ret; |
1709 | unsigned arena_ind = (unsigned)mib[1]; |
1710 | arena_t *arena; |
1711 | |
1712 | arena = arena_get(tsd_tsdn(tsd), arena_ind, false); |
1713 | if (arena == NULL) { |
1714 | ret = EFAULT; |
1715 | goto label_return; |
1716 | } |
1717 | |
1718 | if (oldp != NULL && oldlenp != NULL) { |
1719 | size_t oldval = arena_lg_dirty_mult_get(tsd_tsdn(tsd), arena); |
1720 | READ(oldval, ssize_t); |
1721 | } |
1722 | if (newp != NULL) { |
1723 | if (newlen != sizeof(ssize_t)) { |
1724 | ret = EINVAL; |
1725 | goto label_return; |
1726 | } |
1727 | if (arena_lg_dirty_mult_set(tsd_tsdn(tsd), arena, |
1728 | *(ssize_t *)newp)) { |
1729 | ret = EFAULT; |
1730 | goto label_return; |
1731 | } |
1732 | } |
1733 | |
1734 | ret = 0; |
1735 | label_return: |
1736 | return (ret); |
1737 | } |
1738 | |
1739 | static int |
1740 | arena_i_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1741 | size_t *oldlenp, void *newp, size_t newlen) |
1742 | { |
1743 | int ret; |
1744 | unsigned arena_ind = (unsigned)mib[1]; |
1745 | arena_t *arena; |
1746 | |
1747 | arena = arena_get(tsd_tsdn(tsd), arena_ind, false); |
1748 | if (arena == NULL) { |
1749 | ret = EFAULT; |
1750 | goto label_return; |
1751 | } |
1752 | |
1753 | if (oldp != NULL && oldlenp != NULL) { |
1754 | size_t oldval = arena_decay_time_get(tsd_tsdn(tsd), arena); |
1755 | READ(oldval, ssize_t); |
1756 | } |
1757 | if (newp != NULL) { |
1758 | if (newlen != sizeof(ssize_t)) { |
1759 | ret = EINVAL; |
1760 | goto label_return; |
1761 | } |
1762 | if (arena_decay_time_set(tsd_tsdn(tsd), arena, |
1763 | *(ssize_t *)newp)) { |
1764 | ret = EFAULT; |
1765 | goto label_return; |
1766 | } |
1767 | } |
1768 | |
1769 | ret = 0; |
1770 | label_return: |
1771 | return (ret); |
1772 | } |
1773 | |
1774 | static int |
1775 | arena_i_chunk_hooks_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1776 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1777 | { |
1778 | int ret; |
1779 | unsigned arena_ind = (unsigned)mib[1]; |
1780 | arena_t *arena; |
1781 | |
1782 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1783 | if (arena_ind < narenas_total_get() && (arena = |
1784 | arena_get(tsd_tsdn(tsd), arena_ind, false)) != NULL) { |
1785 | if (newp != NULL) { |
1786 | chunk_hooks_t old_chunk_hooks, new_chunk_hooks; |
1787 | WRITE(new_chunk_hooks, chunk_hooks_t); |
1788 | old_chunk_hooks = chunk_hooks_set(tsd_tsdn(tsd), arena, |
1789 | &new_chunk_hooks); |
1790 | READ(old_chunk_hooks, chunk_hooks_t); |
1791 | } else { |
1792 | chunk_hooks_t old_chunk_hooks = |
1793 | chunk_hooks_get(tsd_tsdn(tsd), arena); |
1794 | READ(old_chunk_hooks, chunk_hooks_t); |
1795 | } |
1796 | } else { |
1797 | ret = EFAULT; |
1798 | goto label_return; |
1799 | } |
1800 | ret = 0; |
1801 | label_return: |
1802 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1803 | return (ret); |
1804 | } |
1805 | |
1806 | static const ctl_named_node_t * |
1807 | arena_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) |
1808 | { |
1809 | const ctl_named_node_t *ret; |
1810 | |
1811 | malloc_mutex_lock(tsdn, &ctl_mtx); |
1812 | if (i > ctl_stats.narenas) { |
1813 | ret = NULL; |
1814 | goto label_return; |
1815 | } |
1816 | |
1817 | ret = super_arena_i_node; |
1818 | label_return: |
1819 | malloc_mutex_unlock(tsdn, &ctl_mtx); |
1820 | return (ret); |
1821 | } |
1822 | |
1823 | /******************************************************************************/ |
1824 | |
1825 | static int |
1826 | arenas_narenas_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1827 | size_t *oldlenp, void *newp, size_t newlen) |
1828 | { |
1829 | int ret; |
1830 | unsigned narenas; |
1831 | |
1832 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1833 | READONLY(); |
1834 | if (*oldlenp != sizeof(unsigned)) { |
1835 | ret = EINVAL; |
1836 | goto label_return; |
1837 | } |
1838 | narenas = ctl_stats.narenas; |
1839 | READ(narenas, unsigned); |
1840 | |
1841 | ret = 0; |
1842 | label_return: |
1843 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1844 | return (ret); |
1845 | } |
1846 | |
1847 | static int |
1848 | arenas_initialized_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1849 | size_t *oldlenp, void *newp, size_t newlen) |
1850 | { |
1851 | int ret; |
1852 | unsigned nread, i; |
1853 | |
1854 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1855 | READONLY(); |
1856 | if (*oldlenp != ctl_stats.narenas * sizeof(bool)) { |
1857 | ret = EINVAL; |
1858 | nread = (*oldlenp < ctl_stats.narenas * sizeof(bool)) |
1859 | ? (unsigned)(*oldlenp / sizeof(bool)) : ctl_stats.narenas; |
1860 | } else { |
1861 | ret = 0; |
1862 | nread = ctl_stats.narenas; |
1863 | } |
1864 | |
1865 | for (i = 0; i < nread; i++) |
1866 | ((bool *)oldp)[i] = ctl_stats.arenas[i].initialized; |
1867 | |
1868 | label_return: |
1869 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1870 | return (ret); |
1871 | } |
1872 | |
1873 | static int |
1874 | arenas_lg_dirty_mult_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1875 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1876 | { |
1877 | int ret; |
1878 | |
1879 | if (oldp != NULL && oldlenp != NULL) { |
1880 | size_t oldval = arena_lg_dirty_mult_default_get(); |
1881 | READ(oldval, ssize_t); |
1882 | } |
1883 | if (newp != NULL) { |
1884 | if (newlen != sizeof(ssize_t)) { |
1885 | ret = EINVAL; |
1886 | goto label_return; |
1887 | } |
1888 | if (arena_lg_dirty_mult_default_set(*(ssize_t *)newp)) { |
1889 | ret = EFAULT; |
1890 | goto label_return; |
1891 | } |
1892 | } |
1893 | |
1894 | ret = 0; |
1895 | label_return: |
1896 | return (ret); |
1897 | } |
1898 | |
1899 | static int |
1900 | arenas_decay_time_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1901 | size_t *oldlenp, void *newp, size_t newlen) |
1902 | { |
1903 | int ret; |
1904 | |
1905 | if (oldp != NULL && oldlenp != NULL) { |
1906 | size_t oldval = arena_decay_time_default_get(); |
1907 | READ(oldval, ssize_t); |
1908 | } |
1909 | if (newp != NULL) { |
1910 | if (newlen != sizeof(ssize_t)) { |
1911 | ret = EINVAL; |
1912 | goto label_return; |
1913 | } |
1914 | if (arena_decay_time_default_set(*(ssize_t *)newp)) { |
1915 | ret = EFAULT; |
1916 | goto label_return; |
1917 | } |
1918 | } |
1919 | |
1920 | ret = 0; |
1921 | label_return: |
1922 | return (ret); |
1923 | } |
1924 | |
1925 | CTL_RO_NL_GEN(arenas_quantum, QUANTUM, size_t) |
1926 | CTL_RO_NL_GEN(arenas_page, PAGE, size_t) |
1927 | CTL_RO_NL_CGEN(config_tcache, arenas_tcache_max, tcache_maxclass, size_t) |
1928 | CTL_RO_NL_GEN(arenas_nbins, NBINS, unsigned) |
1929 | CTL_RO_NL_CGEN(config_tcache, arenas_nhbins, nhbins, unsigned) |
1930 | CTL_RO_NL_GEN(arenas_bin_i_size, arena_bin_info[mib[2]].reg_size, size_t) |
1931 | CTL_RO_NL_GEN(arenas_bin_i_nregs, arena_bin_info[mib[2]].nregs, uint32_t) |
1932 | CTL_RO_NL_GEN(arenas_bin_i_run_size, arena_bin_info[mib[2]].run_size, size_t) |
1933 | static const ctl_named_node_t * |
1934 | arenas_bin_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) |
1935 | { |
1936 | |
1937 | if (i > NBINS) |
1938 | return (NULL); |
1939 | return (super_arenas_bin_i_node); |
1940 | } |
1941 | |
1942 | CTL_RO_NL_GEN(arenas_nlruns, nlclasses, unsigned) |
1943 | CTL_RO_NL_GEN(arenas_lrun_i_size, index2size(NBINS+(szind_t)mib[2]), size_t) |
1944 | static const ctl_named_node_t * |
1945 | arenas_lrun_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) |
1946 | { |
1947 | |
1948 | if (i > nlclasses) |
1949 | return (NULL); |
1950 | return (super_arenas_lrun_i_node); |
1951 | } |
1952 | |
1953 | CTL_RO_NL_GEN(arenas_nhchunks, nhclasses, unsigned) |
1954 | CTL_RO_NL_GEN(arenas_hchunk_i_size, index2size(NBINS+nlclasses+(szind_t)mib[2]), |
1955 | size_t) |
1956 | static const ctl_named_node_t * |
1957 | arenas_hchunk_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) |
1958 | { |
1959 | |
1960 | if (i > nhclasses) |
1961 | return (NULL); |
1962 | return (super_arenas_hchunk_i_node); |
1963 | } |
1964 | |
1965 | static int |
1966 | arenas_extend_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
1967 | size_t *oldlenp, void *newp, size_t newlen) |
1968 | { |
1969 | int ret; |
1970 | unsigned narenas; |
1971 | |
1972 | malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); |
1973 | READONLY(); |
1974 | if (ctl_grow(tsd_tsdn(tsd))) { |
1975 | ret = EAGAIN; |
1976 | goto label_return; |
1977 | } |
1978 | narenas = ctl_stats.narenas - 1; |
1979 | READ(narenas, unsigned); |
1980 | |
1981 | ret = 0; |
1982 | label_return: |
1983 | malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); |
1984 | return (ret); |
1985 | } |
1986 | |
1987 | /******************************************************************************/ |
1988 | |
1989 | static int |
1990 | prof_thread_active_init_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, |
1991 | void *oldp, size_t *oldlenp, void *newp, size_t newlen) |
1992 | { |
1993 | int ret; |
1994 | bool oldval; |
1995 | |
1996 | if (!config_prof) |
1997 | return (ENOENT); |
1998 | |
1999 | if (newp != NULL) { |
2000 | if (newlen != sizeof(bool)) { |
2001 | ret = EINVAL; |
2002 | goto label_return; |
2003 | } |
2004 | oldval = prof_thread_active_init_set(tsd_tsdn(tsd), |
2005 | *(bool *)newp); |
2006 | } else |
2007 | oldval = prof_thread_active_init_get(tsd_tsdn(tsd)); |
2008 | READ(oldval, bool); |
2009 | |
2010 | ret = 0; |
2011 | label_return: |
2012 | return (ret); |
2013 | } |
2014 | |
2015 | static int |
2016 | prof_active_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
2017 | size_t *oldlenp, void *newp, size_t newlen) |
2018 | { |
2019 | int ret; |
2020 | bool oldval; |
2021 | |
2022 | if (!config_prof) |
2023 | return (ENOENT); |
2024 | |
2025 | if (newp != NULL) { |
2026 | if (newlen != sizeof(bool)) { |
2027 | ret = EINVAL; |
2028 | goto label_return; |
2029 | } |
2030 | oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp); |
2031 | } else |
2032 | oldval = prof_active_get(tsd_tsdn(tsd)); |
2033 | READ(oldval, bool); |
2034 | |
2035 | ret = 0; |
2036 | label_return: |
2037 | return (ret); |
2038 | } |
2039 | |
2040 | static int |
2041 | prof_dump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
2042 | size_t *oldlenp, void *newp, size_t newlen) |
2043 | { |
2044 | int ret; |
2045 | const char *filename = NULL; |
2046 | |
2047 | if (!config_prof) |
2048 | return (ENOENT); |
2049 | |
2050 | WRITEONLY(); |
2051 | WRITE(filename, const char *); |
2052 | |
2053 | if (prof_mdump(tsd, filename)) { |
2054 | ret = EFAULT; |
2055 | goto label_return; |
2056 | } |
2057 | |
2058 | ret = 0; |
2059 | label_return: |
2060 | return (ret); |
2061 | } |
2062 | |
2063 | static int |
2064 | prof_gdump_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
2065 | size_t *oldlenp, void *newp, size_t newlen) |
2066 | { |
2067 | int ret; |
2068 | bool oldval; |
2069 | |
2070 | if (!config_prof) |
2071 | return (ENOENT); |
2072 | |
2073 | if (newp != NULL) { |
2074 | if (newlen != sizeof(bool)) { |
2075 | ret = EINVAL; |
2076 | goto label_return; |
2077 | } |
2078 | oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp); |
2079 | } else |
2080 | oldval = prof_gdump_get(tsd_tsdn(tsd)); |
2081 | READ(oldval, bool); |
2082 | |
2083 | ret = 0; |
2084 | label_return: |
2085 | return (ret); |
2086 | } |
2087 | |
2088 | static int |
2089 | prof_reset_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, |
2090 | size_t *oldlenp, void *newp, size_t newlen) |
2091 | { |
2092 | int ret; |
2093 | size_t lg_sample = lg_prof_sample; |
2094 | |
2095 | if (!config_prof) |
2096 | return (ENOENT); |
2097 | |
2098 | WRITEONLY(); |
2099 | WRITE(lg_sample, size_t); |
2100 | if (lg_sample >= (sizeof(uint64_t) << 3)) |
2101 | lg_sample = (sizeof(uint64_t) << 3) - 1; |
2102 | |
2103 | prof_reset(tsd_tsdn(tsd), lg_sample); |
2104 | |
2105 | ret = 0; |
2106 | label_return: |
2107 | return (ret); |
2108 | } |
2109 | |
2110 | CTL_RO_NL_CGEN(config_prof, prof_interval, prof_interval, uint64_t) |
2111 | CTL_RO_NL_CGEN(config_prof, lg_prof_sample, lg_prof_sample, size_t) |
2112 | |
2113 | /******************************************************************************/ |
2114 | |
2115 | CTL_RO_CGEN(config_stats, stats_cactive, &stats_cactive, size_t *) |
2116 | CTL_RO_CGEN(config_stats, stats_allocated, ctl_stats.allocated, size_t) |
2117 | CTL_RO_CGEN(config_stats, stats_active, ctl_stats.active, size_t) |
2118 | CTL_RO_CGEN(config_stats, stats_metadata, ctl_stats.metadata, size_t) |
2119 | CTL_RO_CGEN(config_stats, stats_resident, ctl_stats.resident, size_t) |
2120 | CTL_RO_CGEN(config_stats, stats_mapped, ctl_stats.mapped, size_t) |
2121 | CTL_RO_CGEN(config_stats, stats_retained, ctl_stats.retained, size_t) |
2122 | |
2123 | CTL_RO_GEN(stats_arenas_i_dss, ctl_stats.arenas[mib[2]].dss, const char *) |
2124 | CTL_RO_GEN(stats_arenas_i_lg_dirty_mult, ctl_stats.arenas[mib[2]].lg_dirty_mult, |
2125 | ssize_t) |
2126 | CTL_RO_GEN(stats_arenas_i_decay_time, ctl_stats.arenas[mib[2]].decay_time, |
2127 | ssize_t) |
2128 | CTL_RO_GEN(stats_arenas_i_nthreads, ctl_stats.arenas[mib[2]].nthreads, unsigned) |
2129 | CTL_RO_GEN(stats_arenas_i_pactive, ctl_stats.arenas[mib[2]].pactive, size_t) |
2130 | CTL_RO_GEN(stats_arenas_i_pdirty, ctl_stats.arenas[mib[2]].pdirty, size_t) |
2131 | CTL_RO_CGEN(config_stats, stats_arenas_i_mapped, |
2132 | ctl_stats.arenas[mib[2]].astats.mapped, size_t) |
2133 | CTL_RO_CGEN(config_stats, stats_arenas_i_retained, |
2134 | ctl_stats.arenas[mib[2]].astats.retained, size_t) |
2135 | CTL_RO_CGEN(config_stats, stats_arenas_i_npurge, |
2136 | ctl_stats.arenas[mib[2]].astats.npurge, uint64_t) |
2137 | CTL_RO_CGEN(config_stats, stats_arenas_i_nmadvise, |
2138 | ctl_stats.arenas[mib[2]].astats.nmadvise, uint64_t) |
2139 | CTL_RO_CGEN(config_stats, stats_arenas_i_purged, |
2140 | ctl_stats.arenas[mib[2]].astats.purged, uint64_t) |
2141 | CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_mapped, |
2142 | ctl_stats.arenas[mib[2]].astats.metadata_mapped, size_t) |
2143 | CTL_RO_CGEN(config_stats, stats_arenas_i_metadata_allocated, |
2144 | ctl_stats.arenas[mib[2]].astats.metadata_allocated, size_t) |
2145 | |
2146 | CTL_RO_CGEN(config_stats, stats_arenas_i_small_allocated, |
2147 | ctl_stats.arenas[mib[2]].allocated_small, size_t) |
2148 | CTL_RO_CGEN(config_stats, stats_arenas_i_small_nmalloc, |
2149 | ctl_stats.arenas[mib[2]].nmalloc_small, uint64_t) |
2150 | CTL_RO_CGEN(config_stats, stats_arenas_i_small_ndalloc, |
2151 | ctl_stats.arenas[mib[2]].ndalloc_small, uint64_t) |
2152 | CTL_RO_CGEN(config_stats, stats_arenas_i_small_nrequests, |
2153 | ctl_stats.arenas[mib[2]].nrequests_small, uint64_t) |
2154 | CTL_RO_CGEN(config_stats, stats_arenas_i_large_allocated, |
2155 | ctl_stats.arenas[mib[2]].astats.allocated_large, size_t) |
2156 | CTL_RO_CGEN(config_stats, stats_arenas_i_large_nmalloc, |
2157 | ctl_stats.arenas[mib[2]].astats.nmalloc_large, uint64_t) |
2158 | CTL_RO_CGEN(config_stats, stats_arenas_i_large_ndalloc, |
2159 | ctl_stats.arenas[mib[2]].astats.ndalloc_large, uint64_t) |
2160 | CTL_RO_CGEN(config_stats, stats_arenas_i_large_nrequests, |
2161 | ctl_stats.arenas[mib[2]].astats.nrequests_large, uint64_t) |
2162 | CTL_RO_CGEN(config_stats, stats_arenas_i_huge_allocated, |
2163 | ctl_stats.arenas[mib[2]].astats.allocated_huge, size_t) |
2164 | CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nmalloc, |
2165 | ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) |
2166 | CTL_RO_CGEN(config_stats, stats_arenas_i_huge_ndalloc, |
2167 | ctl_stats.arenas[mib[2]].astats.ndalloc_huge, uint64_t) |
2168 | CTL_RO_CGEN(config_stats, stats_arenas_i_huge_nrequests, |
2169 | ctl_stats.arenas[mib[2]].astats.nmalloc_huge, uint64_t) /* Intentional. */ |
2170 | |
2171 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nmalloc, |
2172 | ctl_stats.arenas[mib[2]].bstats[mib[4]].nmalloc, uint64_t) |
2173 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_ndalloc, |
2174 | ctl_stats.arenas[mib[2]].bstats[mib[4]].ndalloc, uint64_t) |
2175 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nrequests, |
2176 | ctl_stats.arenas[mib[2]].bstats[mib[4]].nrequests, uint64_t) |
2177 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curregs, |
2178 | ctl_stats.arenas[mib[2]].bstats[mib[4]].curregs, size_t) |
2179 | CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nfills, |
2180 | ctl_stats.arenas[mib[2]].bstats[mib[4]].nfills, uint64_t) |
2181 | CTL_RO_CGEN(config_stats && config_tcache, stats_arenas_i_bins_j_nflushes, |
2182 | ctl_stats.arenas[mib[2]].bstats[mib[4]].nflushes, uint64_t) |
2183 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nruns, |
2184 | ctl_stats.arenas[mib[2]].bstats[mib[4]].nruns, uint64_t) |
2185 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_nreruns, |
2186 | ctl_stats.arenas[mib[2]].bstats[mib[4]].reruns, uint64_t) |
2187 | CTL_RO_CGEN(config_stats, stats_arenas_i_bins_j_curruns, |
2188 | ctl_stats.arenas[mib[2]].bstats[mib[4]].curruns, size_t) |
2189 | |
2190 | static const ctl_named_node_t * |
2191 | stats_arenas_i_bins_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, |
2192 | size_t j) |
2193 | { |
2194 | |
2195 | if (j > NBINS) |
2196 | return (NULL); |
2197 | return (super_stats_arenas_i_bins_j_node); |
2198 | } |
2199 | |
2200 | CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nmalloc, |
2201 | ctl_stats.arenas[mib[2]].lstats[mib[4]].nmalloc, uint64_t) |
2202 | CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_ndalloc, |
2203 | ctl_stats.arenas[mib[2]].lstats[mib[4]].ndalloc, uint64_t) |
2204 | CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_nrequests, |
2205 | ctl_stats.arenas[mib[2]].lstats[mib[4]].nrequests, uint64_t) |
2206 | CTL_RO_CGEN(config_stats, stats_arenas_i_lruns_j_curruns, |
2207 | ctl_stats.arenas[mib[2]].lstats[mib[4]].curruns, size_t) |
2208 | |
2209 | static const ctl_named_node_t * |
2210 | stats_arenas_i_lruns_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, |
2211 | size_t j) |
2212 | { |
2213 | |
2214 | if (j > nlclasses) |
2215 | return (NULL); |
2216 | return (super_stats_arenas_i_lruns_j_node); |
2217 | } |
2218 | |
2219 | CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nmalloc, |
2220 | ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, uint64_t) |
2221 | CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_ndalloc, |
2222 | ctl_stats.arenas[mib[2]].hstats[mib[4]].ndalloc, uint64_t) |
2223 | CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_nrequests, |
2224 | ctl_stats.arenas[mib[2]].hstats[mib[4]].nmalloc, /* Intentional. */ |
2225 | uint64_t) |
2226 | CTL_RO_CGEN(config_stats, stats_arenas_i_hchunks_j_curhchunks, |
2227 | ctl_stats.arenas[mib[2]].hstats[mib[4]].curhchunks, size_t) |
2228 | |
2229 | static const ctl_named_node_t * |
2230 | stats_arenas_i_hchunks_j_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, |
2231 | size_t j) |
2232 | { |
2233 | |
2234 | if (j > nhclasses) |
2235 | return (NULL); |
2236 | return (super_stats_arenas_i_hchunks_j_node); |
2237 | } |
2238 | |
2239 | static const ctl_named_node_t * |
2240 | stats_arenas_i_index(tsdn_t *tsdn, const size_t *mib, size_t miblen, size_t i) |
2241 | { |
2242 | const ctl_named_node_t * ret; |
2243 | |
2244 | malloc_mutex_lock(tsdn, &ctl_mtx); |
2245 | if (i > ctl_stats.narenas || !ctl_stats.arenas[i].initialized) { |
2246 | ret = NULL; |
2247 | goto label_return; |
2248 | } |
2249 | |
2250 | ret = super_stats_arenas_i_node; |
2251 | label_return: |
2252 | malloc_mutex_unlock(tsdn, &ctl_mtx); |
2253 | return (ret); |
2254 | } |
2255 | |