1// Copyright 2009-2021 Intel Corporation
2// SPDX-License-Identifier: Apache-2.0
3
4#pragma once
5
6#include "bvh_node_base.h"
7
8namespace embree
9{
10 /*! BVHN AABBNode */
11 template<typename NodeRef, int N>
12 struct AABBNode_t : public BaseNode_t<NodeRef, N>
13 {
14 using BaseNode_t<NodeRef,N>::children;
15
16 struct Create
17 {
18 __forceinline NodeRef operator() (const FastAllocator::CachedAllocator& alloc, size_t numChildren = 0) const
19 {
20 AABBNode_t* node = (AABBNode_t*) alloc.malloc0(sizeof(AABBNode_t),NodeRef::byteNodeAlignment); node->clear();
21 return NodeRef::encodeNode(node);
22 }
23 };
24
25 struct Set
26 {
27 __forceinline void operator() (NodeRef node, size_t i, NodeRef child, const BBox3fa& bounds) const {
28 node.getAABBNode()->setRef(i,child);
29 node.getAABBNode()->setBounds(i,bounds);
30 }
31 };
32
33 struct Create2
34 {
35 template<typename BuildRecord>
36 __forceinline NodeRef operator() (BuildRecord* children, const size_t num, const FastAllocator::CachedAllocator& alloc) const
37 {
38 AABBNode_t* node = (AABBNode_t*) alloc.malloc0(sizeof(AABBNode_t), NodeRef::byteNodeAlignment); node->clear();
39 for (size_t i=0; i<num; i++) node->setBounds(i,children[i].bounds());
40 return NodeRef::encodeNode(node);
41 }
42 };
43
44 struct Set2
45 {
46 template<typename BuildRecord>
47 __forceinline NodeRef operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRef* children, const size_t num) const
48 {
49#if defined(DEBUG)
50 // check that empty children are only at the end of the child list
51 bool emptyChild = false;
52 for (size_t i=0; i<num; i++) {
53 emptyChild |= (children[i] == NodeRef::emptyNode);
54 assert(emptyChild == (children[i] == NodeRef::emptyNode));
55 }
56#endif
57 AABBNode_t* node = ref.getAABBNode();
58 for (size_t i=0; i<num; i++) node->setRef(i,children[i]);
59 return ref;
60 }
61 };
62
63 struct Set3
64 {
65 Set3 (FastAllocator* allocator, PrimRef* prims)
66 : allocator(allocator), prims(prims) {}
67
68 template<typename BuildRecord>
69 __forceinline NodeRef operator() (const BuildRecord& precord, const BuildRecord* crecords, NodeRef ref, NodeRef* children, const size_t num) const
70 {
71#if defined(DEBUG)
72 // check that empty children are only at the end of the child list
73 bool emptyChild = false;
74 for (size_t i=0; i<num; i++) {
75 emptyChild |= (children[i] == NodeRef::emptyNode);
76 assert(emptyChild == (children[i] == NodeRef::emptyNode));
77 }
78#endif
79 AABBNode_t* node = ref.getAABBNode();
80 for (size_t i=0; i<num; i++) node->setRef(i,children[i]);
81
82 if (unlikely(precord.alloc_barrier))
83 {
84 PrimRef* begin = &prims[precord.prims.begin()];
85 PrimRef* end = &prims[precord.prims.end()]; // FIXME: extended end for spatial split builder!!!!!
86 size_t bytes = (size_t)end - (size_t)begin;
87 allocator->addBlock(begin,bytes);
88 }
89
90 return ref;
91 }
92
93 FastAllocator* const allocator;
94 PrimRef* const prims;
95 };
96
97 /*! Clears the node. */
98 __forceinline void clear() {
99 lower_x = lower_y = lower_z = pos_inf;
100 upper_x = upper_y = upper_z = neg_inf;
101 BaseNode_t<NodeRef,N>::clear();
102 }
103
104 /*! Sets bounding box and ID of child. */
105 __forceinline void setRef(size_t i, const NodeRef& ref) {
106 assert(i < N);
107 children[i] = ref;
108 }
109
110 /*! Sets bounding box of child. */
111 __forceinline void setBounds(size_t i, const BBox3fa& bounds)
112 {
113 assert(i < N);
114 lower_x[i] = bounds.lower.x; lower_y[i] = bounds.lower.y; lower_z[i] = bounds.lower.z;
115 upper_x[i] = bounds.upper.x; upper_y[i] = bounds.upper.y; upper_z[i] = bounds.upper.z;
116 }
117
118 /*! Sets bounding box and ID of child. */
119 __forceinline void set(size_t i, const NodeRef& ref, const BBox3fa& bounds) {
120 setBounds(i,bounds);
121 children[i] = ref;
122 }
123
124 /*! Returns bounds of node. */
125 __forceinline BBox3fa bounds() const {
126 const Vec3fa lower(reduce_min(lower_x),reduce_min(lower_y),reduce_min(lower_z));
127 const Vec3fa upper(reduce_max(upper_x),reduce_max(upper_y),reduce_max(upper_z));
128 return BBox3fa(lower,upper);
129 }
130
131 /*! Returns bounds of specified child. */
132 __forceinline BBox3fa bounds(size_t i) const
133 {
134 assert(i < N);
135 const Vec3fa lower(lower_x[i],lower_y[i],lower_z[i]);
136 const Vec3fa upper(upper_x[i],upper_y[i],upper_z[i]);
137 return BBox3fa(lower,upper);
138 }
139
140 /*! Returns extent of bounds of specified child. */
141 __forceinline Vec3fa extend(size_t i) const {
142 return bounds(i).size();
143 }
144
145 /*! Returns bounds of all children (implemented later as specializations) */
146 __forceinline void bounds(BBox<vfloat4>& bounds0, BBox<vfloat4>& bounds1, BBox<vfloat4>& bounds2, BBox<vfloat4>& bounds3) const;
147
148 /*! swap two children of the node */
149 __forceinline void swap(size_t i, size_t j)
150 {
151 assert(i<N && j<N);
152 std::swap(children[i],children[j]);
153 std::swap(lower_x[i],lower_x[j]);
154 std::swap(lower_y[i],lower_y[j]);
155 std::swap(lower_z[i],lower_z[j]);
156 std::swap(upper_x[i],upper_x[j]);
157 std::swap(upper_y[i],upper_y[j]);
158 std::swap(upper_z[i],upper_z[j]);
159 }
160
161 /*! swap the children of two nodes */
162 __forceinline static void swap(AABBNode_t* a, size_t i, AABBNode_t* b, size_t j)
163 {
164 assert(i<N && j<N);
165 std::swap(a->children[i],b->children[j]);
166 std::swap(a->lower_x[i],b->lower_x[j]);
167 std::swap(a->lower_y[i],b->lower_y[j]);
168 std::swap(a->lower_z[i],b->lower_z[j]);
169 std::swap(a->upper_x[i],b->upper_x[j]);
170 std::swap(a->upper_y[i],b->upper_y[j]);
171 std::swap(a->upper_z[i],b->upper_z[j]);
172 }
173
174 /*! compacts a node (moves empty children to the end) */
175 __forceinline static void compact(AABBNode_t* a)
176 {
177 /* find right most filled node */
178 ssize_t j=N;
179 for (j=j-1; j>=0; j--)
180 if (a->child(j) != NodeRef::emptyNode)
181 break;
182
183 /* replace empty nodes with filled nodes */
184 for (ssize_t i=0; i<j; i++) {
185 if (a->child(i) == NodeRef::emptyNode) {
186 a->swap(i,j);
187 for (j=j-1; j>i; j--)
188 if (a->child(j) != NodeRef::emptyNode)
189 break;
190 }
191 }
192 }
193
194 /*! Returns reference to specified child */
195 __forceinline NodeRef& child(size_t i) { assert(i<N); return children[i]; }
196 __forceinline const NodeRef& child(size_t i) const { assert(i<N); return children[i]; }
197
198 /*! output operator */
199 friend embree_ostream operator<<(embree_ostream o, const AABBNode_t& n)
200 {
201 o << "AABBNode { " << embree_endl;
202 o << " lower_x " << n.lower_x << embree_endl;
203 o << " upper_x " << n.upper_x << embree_endl;
204 o << " lower_y " << n.lower_y << embree_endl;
205 o << " upper_y " << n.upper_y << embree_endl;
206 o << " lower_z " << n.lower_z << embree_endl;
207 o << " upper_z " << n.upper_z << embree_endl;
208 o << " children = ";
209 for (size_t i=0; i<N; i++) o << n.children[i] << " ";
210 o << embree_endl;
211 o << "}" << embree_endl;
212 return o;
213 }
214
215 public:
216 vfloat<N> lower_x; //!< X dimension of lower bounds of all N children.
217 vfloat<N> upper_x; //!< X dimension of upper bounds of all N children.
218 vfloat<N> lower_y; //!< Y dimension of lower bounds of all N children.
219 vfloat<N> upper_y; //!< Y dimension of upper bounds of all N children.
220 vfloat<N> lower_z; //!< Z dimension of lower bounds of all N children.
221 vfloat<N> upper_z; //!< Z dimension of upper bounds of all N children.
222 };
223
224 template<>
225 __forceinline void AABBNode_t<NodeRefPtr<4>,4>::bounds(BBox<vfloat4>& bounds0, BBox<vfloat4>& bounds1, BBox<vfloat4>& bounds2, BBox<vfloat4>& bounds3) const {
226 transpose(lower_x,lower_y,lower_z,vfloat4(zero),bounds0.lower,bounds1.lower,bounds2.lower,bounds3.lower);
227 transpose(upper_x,upper_y,upper_z,vfloat4(zero),bounds0.upper,bounds1.upper,bounds2.upper,bounds3.upper);
228 }
229}
230