1//
2// immer: immutable data structures for C++
3// Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente
4//
5// This software is distributed under the Boost Software License, Version 1.0.
6// See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt
7//
8
9#pragma once
10
11#include <immer/heap/tags.hpp>
12#include <immer/detail/combine_standard_layout.hpp>
13#include <immer/detail/util.hpp>
14#include <immer/detail/rbts/bits.hpp>
15
16#include <cassert>
17#include <cstddef>
18#include <memory>
19#include <type_traits>
20
21namespace immer {
22namespace detail {
23namespace rbts {
24
25template <typename T,
26 typename MemoryPolicy,
27 bits_t B,
28 bits_t BL>
29struct node
30{
31 static constexpr auto bits = B;
32 static constexpr auto bits_leaf = BL;
33
34 using node_t = node;
35 using memory = MemoryPolicy;
36 using heap_policy = typename memory::heap;
37 using transience = typename memory::transience_t;
38 using refs_t = typename memory::refcount;
39 using ownee_t = typename transience::ownee;
40 using edit_t = typename transience::edit;
41 using value_t = T;
42
43 static constexpr bool embed_relaxed = memory::prefer_fewer_bigger_objects;
44
45 enum class kind_t
46 {
47 leaf,
48 inner
49 };
50
51 struct relaxed_data_t
52 {
53 count_t count;
54 size_t sizes[branches<B>];
55 };
56
57 using relaxed_data_with_meta_t =
58 combine_standard_layout_t<relaxed_data_t,
59 refs_t,
60 ownee_t>;
61
62 using relaxed_data_no_meta_t =
63 combine_standard_layout_t<relaxed_data_t>;
64
65 using relaxed_t = std::conditional_t<embed_relaxed,
66 relaxed_data_no_meta_t,
67 relaxed_data_with_meta_t>;
68
69 struct leaf_t
70 {
71 aligned_storage_for<T> buffer;
72 };
73
74 struct inner_t
75 {
76 relaxed_t* relaxed;
77 aligned_storage_for<node_t*> buffer;
78 };
79
80 union data_t
81 {
82 inner_t inner;
83 leaf_t leaf;
84 };
85
86 struct impl_data_t
87 {
88#if IMMER_TAGGED_NODE
89 kind_t kind;
90#endif
91 data_t data;
92 };
93
94 using impl_t = combine_standard_layout_t<
95 impl_data_t, refs_t, ownee_t>;
96
97 impl_t impl;
98
99 // assume that we need to keep headroom space in the node when we
100 // are doing reference counting, since any node may become
101 // transient when it has only one reference
102 constexpr static bool keep_headroom = !std::is_empty<refs_t>{};
103
104 constexpr static std::size_t sizeof_packed_leaf_n(count_t count)
105 {
106 return immer_offsetof(impl_t, d.data.leaf.buffer)
107 + sizeof(leaf_t::buffer) * count;
108 }
109
110 constexpr static std::size_t sizeof_packed_inner_n(count_t count)
111 {
112 return immer_offsetof(impl_t, d.data.inner.buffer)
113 + sizeof(inner_t::buffer) * count;
114 }
115
116 constexpr static std::size_t sizeof_packed_relaxed_n(count_t count)
117 {
118 return immer_offsetof(relaxed_t, d.sizes)
119 + sizeof(size_t) * count;
120 }
121
122 constexpr static std::size_t sizeof_packed_inner_r_n(count_t count)
123 {
124 return embed_relaxed
125 ? sizeof_packed_inner_n(count) + sizeof_packed_relaxed_n(count)
126 : sizeof_packed_inner_n(count);
127 }
128
129 constexpr static std::size_t max_sizeof_leaf =
130 sizeof_packed_leaf_n(branches<BL>);
131
132 constexpr static std::size_t max_sizeof_inner =
133 sizeof_packed_inner_n(branches<B>);
134
135 constexpr static std::size_t max_sizeof_relaxed =
136 sizeof_packed_relaxed_n(branches<B>);
137
138 constexpr static std::size_t max_sizeof_inner_r =
139 sizeof_packed_inner_r_n(branches<B>);
140
141 constexpr static std::size_t sizeof_inner_n(count_t n)
142 { return keep_headroom ? max_sizeof_inner : sizeof_packed_inner_n(n); }
143
144 constexpr static std::size_t sizeof_inner_r_n(count_t n)
145 { return keep_headroom ? max_sizeof_inner_r : sizeof_packed_inner_r_n(n); }
146
147 constexpr static std::size_t sizeof_relaxed_n(count_t n)
148 { return keep_headroom ? max_sizeof_relaxed : sizeof_packed_relaxed_n(n); }
149
150 constexpr static std::size_t sizeof_leaf_n(count_t n)
151 { return keep_headroom ? max_sizeof_leaf : sizeof_packed_leaf_n(n); }
152
153 using heap = typename heap_policy::template
154 optimized<max_sizeof_inner>::type;
155
156#if IMMER_TAGGED_NODE
157 kind_t kind() const
158 {
159 return impl.d.kind;
160 }
161#endif
162
163 relaxed_t* relaxed()
164 {
165 IMMER_ASSERT_TAGGED(kind() == kind_t::inner);
166 return impl.d.data.inner.relaxed;
167 }
168
169 const relaxed_t* relaxed() const
170 {
171 IMMER_ASSERT_TAGGED(kind() == kind_t::inner);
172 return impl.d.data.inner.relaxed;
173 }
174
175 node_t** inner()
176 {
177 IMMER_ASSERT_TAGGED(kind() == kind_t::inner);
178 return reinterpret_cast<node_t**>(&impl.d.data.inner.buffer);
179 }
180
181 T* leaf()
182 {
183 IMMER_ASSERT_TAGGED(kind() == kind_t::leaf);
184 return reinterpret_cast<T*>(&impl.d.data.leaf.buffer);
185 }
186
187 static refs_t& refs(const relaxed_t* x) { return auto_const_cast(get<refs_t>(*x)); }
188 static const ownee_t& ownee(const relaxed_t* x) { return get<ownee_t>(*x); }
189 static ownee_t& ownee(relaxed_t* x) { return get<ownee_t>(*x); }
190
191 static refs_t& refs(const node_t* x) { return auto_const_cast(get<refs_t>(x->impl)); }
192 static const ownee_t& ownee(const node_t* x) { return get<ownee_t>(x->impl); }
193 static ownee_t& ownee(node_t* x) { return get<ownee_t>(x->impl); }
194
195 static node_t* make_inner_n(count_t n)
196 {
197 assert(n <= branches<B>);
198 auto m = heap::allocate(sizeof_inner_n(n));
199 auto p = new (m) node_t;
200 p->impl.d.data.inner.relaxed = nullptr;
201#if IMMER_TAGGED_NODE
202 p->impl.d.kind = node_t::kind_t::inner;
203#endif
204 return p;
205 }
206
207 static node_t* make_inner_e(edit_t e)
208 {
209 auto m = heap::allocate(max_sizeof_inner);
210 auto p = new (m) node_t;
211 ownee(p) = e;
212 p->impl.d.data.inner.relaxed = nullptr;
213#if IMMER_TAGGED_NODE
214 p->impl.d.kind = node_t::kind_t::inner;
215#endif
216 return p;
217 }
218
219 static node_t* make_inner_r_n(count_t n)
220 {
221 assert(n <= branches<B>);
222 auto mp = heap::allocate(sizeof_inner_r_n(n));
223 auto mr = static_cast<void*>(nullptr);
224 if (embed_relaxed) {
225 mr = reinterpret_cast<unsigned char*>(mp) + sizeof_inner_n(n);
226 } else {
227 try {
228 mr = heap::allocate(sizeof_relaxed_n(n), norefs_tag{});
229 } catch (...) {
230 heap::deallocate(sizeof_inner_r_n(n), mp);
231 throw;
232 }
233 }
234 auto p = new (mp) node_t;
235 auto r = new (mr) relaxed_t;
236 r->d.count = 0;
237 p->impl.d.data.inner.relaxed = r;
238#if IMMER_TAGGED_NODE
239 p->impl.d.kind = node_t::kind_t::inner;
240#endif
241 return p;
242 }
243
244 static node_t* make_inner_sr_n(count_t n, relaxed_t* r)
245 {
246 return static_if<embed_relaxed, node_t*>(
247 [&] (auto) {
248 return node_t::make_inner_r_n(n);
249 },
250 [&] (auto) {
251 auto p = new (heap::allocate(node_t::sizeof_inner_r_n(n))) node_t;
252 assert(r->d.count >= n);
253 node_t::refs(r).inc();
254 p->impl.d.data.inner.relaxed = r;
255#if IMMER_TAGGED_NODE
256 p->impl.d.kind = node_t::kind_t::inner;
257#endif
258 return p;
259 });
260 }
261
262 static node_t* make_inner_r_e(edit_t e)
263 {
264 auto mp = heap::allocate(max_sizeof_inner_r);
265 auto mr = static_cast<void*>(nullptr);
266 if (embed_relaxed) {
267 mr = reinterpret_cast<unsigned char*>(mp) + max_sizeof_inner;
268 } else {
269 try {
270 mr = heap::allocate(max_sizeof_relaxed, norefs_tag{});
271 } catch (...) {
272 heap::deallocate(max_sizeof_inner_r, mp);
273 throw;
274 }
275 }
276 auto p = new (mp) node_t;
277 auto r = new (mr) relaxed_t;
278 ownee(p) = e;
279 static_if<!embed_relaxed>([&](auto){ node_t::ownee(r) = e; });
280 r->d.count = 0;
281 p->impl.d.data.inner.relaxed = r;
282#if IMMER_TAGGED_NODE
283 p->impl.d.kind = node_t::kind_t::inner;
284#endif
285 return p;
286 }
287
288 static node_t* make_inner_sr_e(edit_t e, relaxed_t* r)
289 {
290 return static_if<embed_relaxed, node_t*>(
291 [&] (auto) {
292 return node_t::make_inner_r_e(e);
293 },
294 [&] (auto) {
295 auto p = new (heap::allocate(node_t::max_sizeof_inner_r)) node_t;
296 node_t::refs(r).inc();
297 p->impl.d.data.inner.relaxed = r;
298 node_t::ownee(p) = e;
299#if IMMER_TAGGED_NODE
300 p->impl.d.kind = node_t::kind_t::inner;
301#endif
302 return p;
303 });
304 }
305
306 static node_t* make_leaf_n(count_t n)
307 {
308 assert(n <= branches<BL>);
309 auto p = new (heap::allocate(sizeof_leaf_n(n))) node_t;
310#if IMMER_TAGGED_NODE
311 p->impl.d.kind = node_t::kind_t::leaf;
312#endif
313 return p;
314 }
315
316 static node_t* make_leaf_e(edit_t e)
317 {
318 auto p = new (heap::allocate(max_sizeof_leaf)) node_t;
319 ownee(p) = e;
320#if IMMER_TAGGED_NODE
321 p->impl.d.kind = node_t::kind_t::leaf;
322#endif
323 return p;
324 }
325
326 static node_t* make_inner_n(count_t n, node_t* x)
327 {
328 assert(n >= 1);
329 auto p = make_inner_n(n);
330 p->inner() [0] = x;
331 return p;
332 }
333
334 static node_t* make_inner_n(edit_t n, node_t* x)
335 {
336 assert(n >= 1);
337 auto p = make_inner_n(n);
338 p->inner() [0] = x;
339 return p;
340 }
341
342 static node_t* make_inner_n(count_t n, node_t* x, node_t* y)
343 {
344 assert(n >= 2);
345 auto p = make_inner_n(n);
346 p->inner() [0] = x;
347 p->inner() [1] = y;
348 return p;
349 }
350
351 static node_t* make_inner_r_n(count_t n, node_t* x)
352 {
353 assert(n >= 1);
354 auto p = make_inner_r_n(n);
355 auto r = p->relaxed();
356 p->inner() [0] = x;
357 r->d.count = 1;
358 return p;
359 }
360
361 static node_t* make_inner_r_n(count_t n, node_t* x, size_t xs)
362 {
363 assert(n >= 1);
364 auto p = make_inner_r_n(n);
365 auto r = p->relaxed();
366 p->inner() [0] = x;
367 r->d.sizes [0] = xs;
368 r->d.count = 1;
369 return p;
370 }
371
372 static node_t* make_inner_r_n(count_t n, node_t* x, node_t* y)
373 {
374 assert(n >= 2);
375 auto p = make_inner_r_n(n);
376 auto r = p->relaxed();
377 p->inner() [0] = x;
378 p->inner() [1] = y;
379 r->d.count = 2;
380 return p;
381 }
382
383 static node_t* make_inner_r_n(count_t n,
384 node_t* x, size_t xs,
385 node_t* y)
386 {
387 assert(n >= 2);
388 auto p = make_inner_r_n(n);
389 auto r = p->relaxed();
390 p->inner() [0] = x;
391 p->inner() [1] = y;
392 r->d.sizes [0] = xs;
393 r->d.count = 2;
394 return p;
395 }
396
397 static node_t* make_inner_r_n(count_t n,
398 node_t* x, size_t xs,
399 node_t* y, size_t ys)
400 {
401 assert(n >= 2);
402 auto p = make_inner_r_n(n);
403 auto r = p->relaxed();
404 p->inner() [0] = x;
405 p->inner() [1] = y;
406 r->d.sizes [0] = xs;
407 r->d.sizes [1] = xs + ys;
408 r->d.count = 2;
409 return p;
410 }
411
412 static node_t* make_inner_r_n(count_t n,
413 node_t* x, size_t xs,
414 node_t* y, size_t ys,
415 node_t* z, size_t zs)
416 {
417 assert(n >= 3);
418 auto p = make_inner_r_n(n);
419 auto r = p->relaxed();
420 p->inner() [0] = x;
421 p->inner() [1] = y;
422 p->inner() [2] = z;
423 r->d.sizes [0] = xs;
424 r->d.sizes [1] = xs + ys;
425 r->d.sizes [2] = xs + ys + zs;
426 r->d.count = 3;
427 return p;
428 }
429
430 template <typename U>
431 static node_t* make_leaf_n(count_t n, U&& x)
432 {
433 assert(n >= 1);
434 auto p = make_leaf_n(n);
435 try {
436 new (p->leaf()) T{ std::forward<U>(x) };
437 } catch (...) {
438 heap::deallocate(node_t::sizeof_leaf_n(n), p);
439 throw;
440 }
441 return p;
442 }
443
444 template <typename U>
445 static node_t* make_leaf_e(edit_t e, U&& x)
446 {
447 auto p = make_leaf_e(e);
448 try {
449 new (p->leaf()) T{ std::forward<U>(x) };
450 } catch (...) {
451 heap::deallocate(node_t::max_sizeof_leaf, p);
452 throw;
453 }
454 return p;
455 }
456
457 static node_t* make_path(shift_t shift, node_t* node)
458 {
459 IMMER_ASSERT_TAGGED(node->kind() == kind_t::leaf);
460 if (shift == endshift<B, BL>)
461 return node;
462 else {
463 auto n = node_t::make_inner_n(1);
464 try {
465 n->inner() [0] = make_path(shift - B, node);
466 } catch (...) {
467 heap::deallocate(node_t::sizeof_inner_n(1), n);
468 throw;
469 }
470 return n;
471 }
472 }
473
474 static node_t* make_path_e(edit_t e, shift_t shift, node_t* node)
475 {
476 IMMER_ASSERT_TAGGED(node->kind() == kind_t::leaf);
477 if (shift == endshift<B, BL>)
478 return node;
479 else {
480 auto n = node_t::make_inner_e(e);
481 try {
482 n->inner() [0] = make_path_e(e, shift - B, node);
483 } catch (...) {
484 heap::deallocate(node_t::max_sizeof_inner, n);
485 throw;
486 }
487 return n;
488 }
489 }
490
491 static node_t* copy_inner(node_t* src, count_t n)
492 {
493 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
494 auto dst = make_inner_n(n);
495 inc_nodes(src->inner(), n);
496 std::uninitialized_copy(src->inner(), src->inner() + n, dst->inner());
497 return dst;
498 }
499
500 static node_t* copy_inner_n(count_t allocn, node_t* src, count_t n)
501 {
502 assert(allocn >= n);
503 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
504 auto dst = make_inner_n(allocn);
505 return do_copy_inner(dst, src, n);
506 }
507
508 static node_t* copy_inner_e(edit_t e, node_t* src, count_t n)
509 {
510 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
511 auto dst = make_inner_e(e);
512 return do_copy_inner(dst, src, n);
513 }
514
515 static node_t* do_copy_inner(node_t* dst, node_t* src, count_t n)
516 {
517 IMMER_ASSERT_TAGGED(dst->kind() == kind_t::inner);
518 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
519 auto p = src->inner();
520 inc_nodes(p, n);
521 std::uninitialized_copy(p, p + n, dst->inner());
522 return dst;
523 }
524
525 static node_t* copy_inner_r(node_t* src, count_t n)
526 {
527 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
528 auto dst = make_inner_r_n(n);
529 return do_copy_inner_r(dst, src, n);
530 }
531
532 static node_t* copy_inner_r_n(count_t allocn, node_t* src, count_t n)
533 {
534 assert(allocn >= n);
535 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
536 auto dst = make_inner_r_n(allocn);
537 return do_copy_inner_r(dst, src, n);
538 }
539
540 static node_t* copy_inner_r_e(edit_t e, node_t* src, count_t n)
541 {
542 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
543 auto dst = make_inner_r_e(e);
544 return do_copy_inner_r(dst, src, n);
545 }
546
547 static node_t* copy_inner_sr_e(edit_t e, node_t* src, count_t n)
548 {
549 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
550 auto dst = make_inner_sr_e(e, src->relaxed());
551 return do_copy_inner_sr(dst, src, n);
552 }
553
554 static node_t* do_copy_inner_r(node_t* dst, node_t* src, count_t n)
555 {
556 IMMER_ASSERT_TAGGED(dst->kind() == kind_t::inner);
557 IMMER_ASSERT_TAGGED(src->kind() == kind_t::inner);
558 auto src_r = src->relaxed();
559 auto dst_r = dst->relaxed();
560 inc_nodes(src->inner(), n);
561 std::copy(src->inner(), src->inner() + n, dst->inner());
562 std::copy(src_r->d.sizes, src_r->d.sizes + n, dst_r->d.sizes);
563 dst_r->d.count = n;
564 return dst;
565 }
566
567 static node_t* do_copy_inner_sr(node_t* dst, node_t* src, count_t n)
568 {
569 if (embed_relaxed)
570 return do_copy_inner_r(dst, src, n);
571 else {
572 inc_nodes(src->inner(), n);
573 std::copy(src->inner(), src->inner() + n, dst->inner());
574 return dst;
575 }
576 }
577
578 static node_t* copy_leaf(node_t* src, count_t n)
579 {
580 IMMER_ASSERT_TAGGED(src->kind() == kind_t::leaf);
581 auto dst = make_leaf_n(n);
582 try {
583 std::uninitialized_copy(src->leaf(), src->leaf() + n, dst->leaf());
584 } catch (...) {
585 heap::deallocate(node_t::sizeof_leaf_n(n), dst);
586 throw;
587 }
588 return dst;
589 }
590
591 static node_t* copy_leaf_e(edit_t e, node_t* src, count_t n)
592 {
593 IMMER_ASSERT_TAGGED(src->kind() == kind_t::leaf);
594 auto dst = make_leaf_e(e);
595 try {
596 std::uninitialized_copy(src->leaf(), src->leaf() + n, dst->leaf());
597 } catch (...) {
598 heap::deallocate(node_t::max_sizeof_leaf, dst);
599 throw;
600 }
601 return dst;
602 }
603
604 static node_t* copy_leaf_n(count_t allocn, node_t* src, count_t n)
605 {
606 assert(allocn >= n);
607 IMMER_ASSERT_TAGGED(src->kind() == kind_t::leaf);
608 auto dst = make_leaf_n(allocn);
609 try {
610 std::uninitialized_copy(src->leaf(), src->leaf() + n, dst->leaf());
611 } catch (...) {
612 heap::deallocate(node_t::sizeof_leaf_n(allocn), dst);
613 throw;
614 }
615 return dst;
616 }
617
618 static node_t* copy_leaf(node_t* src1, count_t n1,
619 node_t* src2, count_t n2)
620 {
621 IMMER_ASSERT_TAGGED(src1->kind() == kind_t::leaf);
622 IMMER_ASSERT_TAGGED(src2->kind() == kind_t::leaf);
623 auto dst = make_leaf_n(n1 + n2);
624 try {
625 std::uninitialized_copy(
626 src1->leaf(), src1->leaf() + n1, dst->leaf());
627 } catch (...) {
628 heap::deallocate(node_t::sizeof_leaf_n(n1 + n2), dst);
629 throw;
630 }
631 try {
632 std::uninitialized_copy(
633 src2->leaf(), src2->leaf() + n2, dst->leaf() + n1);
634 } catch (...) {
635 destroy_n(dst->leaf(), n1);
636 heap::deallocate(node_t::sizeof_leaf_n(n1 + n2), dst);
637 throw;
638 }
639 return dst;
640 }
641
642 static node_t* copy_leaf_e(edit_t e,
643 node_t* src1, count_t n1,
644 node_t* src2, count_t n2)
645 {
646 IMMER_ASSERT_TAGGED(src1->kind() == kind_t::leaf);
647 IMMER_ASSERT_TAGGED(src2->kind() == kind_t::leaf);
648 auto dst = make_leaf_e(e);
649 try {
650 std::uninitialized_copy(
651 src1->leaf(), src1->leaf() + n1, dst->leaf());
652 } catch (...) {
653 heap::deallocate(max_sizeof_leaf, dst);
654 throw;
655 }
656 try {
657 std::uninitialized_copy(
658 src2->leaf(), src2->leaf() + n2, dst->leaf() + n1);
659 } catch (...) {
660 destroy_n(dst->leaf(), n1);
661 heap::deallocate(max_sizeof_leaf, dst);
662 throw;
663 }
664 return dst;
665 }
666
667 static node_t* copy_leaf_e(edit_t e, node_t* src, count_t idx, count_t last)
668 {
669 IMMER_ASSERT_TAGGED(src->kind() == kind_t::leaf);
670 auto dst = make_leaf_e(e);
671 try {
672 std::uninitialized_copy(
673 src->leaf() + idx, src->leaf() + last, dst->leaf());
674 } catch (...) {
675 heap::deallocate(max_sizeof_leaf, dst);
676 throw;
677 }
678 return dst;
679 }
680
681 static node_t* copy_leaf(node_t* src, count_t idx, count_t last)
682 {
683 IMMER_ASSERT_TAGGED(src->kind() == kind_t::leaf);
684 auto dst = make_leaf_n(last - idx);
685 try {
686 std::uninitialized_copy(
687 src->leaf() + idx, src->leaf() + last, dst->leaf());
688 } catch (...) {
689 heap::deallocate(node_t::sizeof_leaf_n(last - idx), dst);
690 throw;
691 }
692 return dst;
693 }
694
695 template <typename U>
696 static node_t* copy_leaf_emplace(node_t* src, count_t n, U&& x)
697 {
698 auto dst = copy_leaf_n(n + 1, src, n);
699 try {
700 new (dst->leaf() + n) T{std::forward<U>(x)};
701 } catch (...) {
702 destroy_n(dst->leaf(), n);
703 heap::deallocate(node_t::sizeof_leaf_n(n + 1), dst);
704 throw;
705 }
706 return dst;
707 }
708
709 static void delete_inner(node_t* p, count_t n)
710 {
711 IMMER_ASSERT_TAGGED(p->kind() == kind_t::inner);
712 assert(!p->relaxed());
713 heap::deallocate(ownee(p).owned()
714 ? node_t::max_sizeof_inner
715 : node_t::sizeof_inner_n(n), p);
716 }
717
718 static void delete_inner_e(node_t* p)
719 {
720 IMMER_ASSERT_TAGGED(p->kind() == kind_t::inner);
721 assert(!p->relaxed());
722 heap::deallocate(node_t::max_sizeof_inner, p);
723 }
724
725 static void delete_inner_any(node_t* p, count_t n)
726 {
727 if (p->relaxed())
728 delete_inner_r(p, n);
729 else
730 delete_inner(p, n);
731 }
732
733 static void delete_inner_r(node_t* p, count_t n)
734 {
735 IMMER_ASSERT_TAGGED(p->kind() == kind_t::inner);
736 auto r = p->relaxed();
737 assert(r);
738 static_if<!embed_relaxed>([&] (auto) {
739 if (node_t::refs(r).dec())
740 heap::deallocate(node_t::ownee(r).owned()
741 ? node_t::max_sizeof_relaxed
742 : node_t::sizeof_relaxed_n(n), r);
743 });
744 heap::deallocate(ownee(p).owned()
745 ? node_t::max_sizeof_inner_r
746 : node_t::sizeof_inner_r_n(n), p);
747 }
748
749 static void delete_inner_r_e(node_t* p)
750 {
751 IMMER_ASSERT_TAGGED(p->kind() == kind_t::inner);
752 auto r = p->relaxed();
753 assert(r);
754 static_if<!embed_relaxed>([&] (auto) {
755 if (node_t::refs(r).dec())
756 heap::deallocate(node_t::max_sizeof_relaxed, r);
757 });
758 heap::deallocate(node_t::max_sizeof_inner_r, p);
759 }
760
761 static void delete_leaf(node_t* p, count_t n)
762 {
763 IMMER_ASSERT_TAGGED(p->kind() == kind_t::leaf);
764 destroy_n(p->leaf(), n);
765 heap::deallocate(ownee(p).owned()
766 ? node_t::max_sizeof_leaf
767 : node_t::sizeof_leaf_n(n), p);
768 }
769
770 bool can_mutate(edit_t e) const
771 {
772 return refs(this).unique()
773 || ownee(this).can_mutate(e);
774 }
775
776 bool can_relax() const
777 {
778 return !embed_relaxed || relaxed();
779 }
780
781 relaxed_t* ensure_mutable_relaxed(edit_t e)
782 {
783 auto src_r = relaxed();
784 return static_if<embed_relaxed, relaxed_t*>(
785 [&] (auto) { return src_r; },
786 [&] (auto) {
787 if (node_t::refs(src_r).unique() ||
788 node_t::ownee(src_r).can_mutate(e))
789 return src_r;
790 else {
791 if (src_r)
792 node_t::refs(src_r).dec_unsafe();
793 auto dst_r = impl.d.data.inner.relaxed =
794 new (heap::allocate(max_sizeof_relaxed)) relaxed_t;
795 node_t::ownee(dst_r) = e;
796 return dst_r;
797 }
798 });
799 }
800
801 relaxed_t* ensure_mutable_relaxed_e(edit_t e, edit_t ec)
802 {
803 auto src_r = relaxed();
804 return static_if<embed_relaxed, relaxed_t*>(
805 [&] (auto) { return src_r; },
806 [&] (auto) {
807 if (src_r && (node_t::refs(src_r).unique() ||
808 node_t::ownee(src_r).can_mutate(e))) {
809 node_t::ownee(src_r) = ec;
810 return src_r;
811 } else {
812 if (src_r)
813 node_t::refs(src_r).dec_unsafe();
814 auto dst_r = impl.d.data.inner.relaxed =
815 new (heap::allocate(max_sizeof_relaxed)) relaxed_t;
816 node_t::ownee(dst_r) = ec;
817 return dst_r;
818 }
819 });
820 }
821
822 relaxed_t* ensure_mutable_relaxed_n(edit_t e, count_t n)
823 {
824 auto src_r = relaxed();
825 return static_if<embed_relaxed, relaxed_t*>(
826 [&] (auto) { return src_r; },
827 [&] (auto) {
828 if (node_t::refs(src_r).unique() ||
829 node_t::ownee(src_r).can_mutate(e))
830 return src_r;
831 else {
832 if (src_r)
833 node_t::refs(src_r).dec_unsafe();
834 auto dst_r =
835 new (heap::allocate(max_sizeof_relaxed)) relaxed_t;
836 std::copy(src_r->d.sizes, src_r->d.sizes + n, dst_r->d.sizes);
837 node_t::ownee(dst_r) = e;
838 return impl.d.data.inner.relaxed = dst_r;
839 }
840 });
841 }
842
843 node_t* inc()
844 {
845 refs(this).inc();
846 return this;
847 }
848
849 const node_t* inc() const
850 {
851 refs(this).inc();
852 return this;
853 }
854
855 bool dec() const { return refs(this).dec(); }
856 void dec_unsafe() const { refs(this).dec_unsafe(); }
857
858 static void inc_nodes(node_t** p, count_t n)
859 {
860 for (auto i = p, e = i + n; i != e; ++i)
861 refs(*i).inc();
862 }
863
864#if IMMER_TAGGED_NODE
865 shift_t compute_shift()
866 {
867 if (kind() == kind_t::leaf)
868 return endshift<B, BL>;
869 else
870 return B + inner() [0]->compute_shift();
871 }
872#endif
873
874 bool check(shift_t shift, size_t size)
875 {
876#if IMMER_DEBUG_DEEP_CHECK
877 assert(size > 0);
878 if (shift == endshift<B, BL>) {
879 IMMER_ASSERT_TAGGED(kind() == kind_t::leaf);
880 assert(size <= branches<BL>);
881 } else if (auto r = relaxed()) {
882 auto count = r->d.count;
883 assert(count > 0);
884 assert(count <= branches<B>);
885 if (r->d.sizes[count - 1] != size) {
886 IMMER_TRACE_F("check");
887 IMMER_TRACE_E(r->d.sizes[count - 1]);
888 IMMER_TRACE_E(size);
889 }
890 assert(r->d.sizes[count - 1] == size);
891 for (auto i = 1; i < count; ++i)
892 assert(r->d.sizes[i - 1] < r->d.sizes[i]);
893 auto last_size = size_t{};
894 for (auto i = 0; i < count; ++i) {
895 assert(inner()[i]->check(
896 shift - B,
897 r->d.sizes[i] - last_size));
898 last_size = r->d.sizes[i];
899 }
900 } else {
901 assert(size <= branches<B> << shift);
902 auto count = (size >> shift)
903 + (size - ((size >> shift) << shift) > 0);
904 assert(count <= branches<B>);
905 if (count) {
906 for (auto i = 1; i < count - 1; ++i)
907 assert(inner()[i]->check(
908 shift - B,
909 1 << shift));
910 assert(inner()[count - 1]->check(
911 shift - B,
912 size - ((count - 1) << shift)));
913 }
914 }
915#endif // IMMER_DEBUG_DEEP_CHECK
916 return true;
917 }
918};
919
920template <typename T, typename MP, bits_t B>
921constexpr bits_t derive_bits_leaf_aux()
922{
923 using node_t = node<T, MP, B, B>;
924 constexpr auto sizeof_elem = sizeof(T);
925 constexpr auto space = node_t::max_sizeof_inner - node_t::sizeof_packed_leaf_n(0);
926 constexpr auto full_elems = space / sizeof_elem;
927 constexpr auto BL = log2(full_elems);
928 return BL;
929}
930
931template <typename T, typename MP, bits_t B>
932constexpr bits_t derive_bits_leaf = derive_bits_leaf_aux<T, MP, B>();
933
934} // namespace rbts
935} // namespace detail
936} // namespace immer
937