1// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
2// Licensed under the MIT License:
3//
4// Permission is hereby granted, free of charge, to any person obtaining a copy
5// of this software and associated documentation files (the "Software"), to deal
6// in the Software without restriction, including without limitation the rights
7// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8// copies of the Software, and to permit persons to whom the Software is
9// furnished to do so, subject to the following conditions:
10//
11// The above copyright notice and this permission notice shall be included in
12// all copies or substantial portions of the Software.
13//
14// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
20// THE SOFTWARE.
21
22#define CAPNP_PRIVATE
23#include "layout.h"
24#include <kj/debug.h>
25#include "arena.h"
26#include <string.h>
27#include <stdlib.h>
28
29#if !CAPNP_LITE
30#include "capability.h"
31#endif // !CAPNP_LITE
32
33namespace capnp {
34namespace _ { // private
35
36#if !CAPNP_LITE
37static BrokenCapFactory* brokenCapFactory = nullptr;
38// Horrible hack: We need to be able to construct broken caps without any capability context,
39// but we can't have a link-time dependency on libcapnp-rpc.
40
41void setGlobalBrokenCapFactoryForLayoutCpp(BrokenCapFactory& factory) {
42 // Called from capability.c++ when the capability API is used, to make sure that layout.c++
43 // is ready for it. May be called multiple times but always with the same value.
44#if __GNUC__
45 __atomic_store_n(&brokenCapFactory, &factory, __ATOMIC_RELAXED);
46#elif _MSC_VER
47 *static_cast<BrokenCapFactory* volatile*>(&brokenCapFactory) = &factory;
48#else
49#error "Platform not supported"
50#endif
51}
52
53} // namespace _ (private)
54
55const uint ClientHook::NULL_CAPABILITY_BRAND = 0;
56// Defined here rather than capability.c++ so that we can safely call isNull() in this file.
57
58void* ClientHook::getLocalServer(_::CapabilityServerSetBase& capServerSet) {
59 // Defined here rather than capability.c++ because otherwise building with -fsanitize=vptr fails.
60 return nullptr;
61}
62
63namespace _ { // private
64
65#endif // !CAPNP_LITE
66
67#if CAPNP_DEBUG_TYPES
68#define G(n) bounded<n>()
69#else
70#define G(n) n
71#endif
72
73// =======================================================================================
74
75#if __GNUC__ >= 8 && !__clang__
76// GCC 8 introduced a warning which complains whenever we try to memset() or memcpy() a
77// WirePointer, becaues we deleted the regular copy constructor / assignment operator. Weirdly, if
78// I remove those deletions, GCC *still* complains that WirePointer is non-trivial. I don't
79// understand why -- maybe because WireValue has private members? We don't want to make WireValue's
80// member public, but memset() and memcpy() on it are certainly valid and desirable, so we'll just
81// have to disable the warning I guess.
82#pragma GCC diagnostic ignored "-Wclass-memaccess"
83#endif
84
85struct WirePointer {
86 // A pointer, in exactly the format in which it appears on the wire.
87
88 // Copying and moving is not allowed because the offset would become wrong.
89 WirePointer(const WirePointer& other) = delete;
90 WirePointer(WirePointer&& other) = delete;
91 WirePointer& operator=(const WirePointer& other) = delete;
92 WirePointer& operator=(WirePointer&& other) = delete;
93
94 // -----------------------------------------------------------------
95 // Common part of all pointers: kind + offset
96 //
97 // Actually this is not terribly common. The "offset" could actually be different things
98 // depending on the context:
99 // - For a regular (e.g. struct/list) pointer, a signed word offset from the word immediately
100 // following the pointer pointer. (The off-by-one means the offset is more often zero, saving
101 // bytes on the wire when packed.)
102 // - For an inline composite list tag (not really a pointer, but structured similarly), an
103 // element count.
104 // - For a FAR pointer, an unsigned offset into the target segment.
105 // - For a FAR landing pad, zero indicates that the target value immediately follows the pad while
106 // 1 indicates that the pad is followed by another FAR pointer that actually points at the
107 // value.
108
109 enum Kind {
110 STRUCT = 0,
111 // Reference points at / describes a struct.
112
113 LIST = 1,
114 // Reference points at / describes a list.
115
116 FAR = 2,
117 // Reference is a "far pointer", which points at data located in a different segment. The
118 // eventual target is one of the other kinds.
119
120 OTHER = 3
121 // Reference has type "other". If the next 30 bits are all zero (i.e. the lower 32 bits contain
122 // only the kind OTHER) then the pointer is a capability. All other values are reserved.
123 };
124
125 WireValue<uint32_t> offsetAndKind;
126
127 KJ_ALWAYS_INLINE(Kind kind() const) {
128 return static_cast<Kind>(offsetAndKind.get() & 3);
129 }
130 KJ_ALWAYS_INLINE(bool isPositional() const) {
131 return (offsetAndKind.get() & 2) == 0; // match STRUCT and LIST but not FAR or OTHER
132 }
133 KJ_ALWAYS_INLINE(bool isCapability() const) {
134 return offsetAndKind.get() == OTHER;
135 }
136
137 KJ_ALWAYS_INLINE(word* target()) {
138 return reinterpret_cast<word*>(this) + 1 + (static_cast<int32_t>(offsetAndKind.get()) >> 2);
139 }
140 KJ_ALWAYS_INLINE(const word* target(SegmentReader* segment) const) {
141 if (segment == nullptr) {
142 return reinterpret_cast<const word*>(this + 1) +
143 (static_cast<int32_t>(offsetAndKind.get()) >> 2);
144 } else {
145 return segment->checkOffset(reinterpret_cast<const word*>(this + 1),
146 static_cast<int32_t>(offsetAndKind.get()) >> 2);
147 }
148 }
149 KJ_ALWAYS_INLINE(void setKindAndTarget(Kind kind, word* target, SegmentBuilder* segment)) {
150 // Check that the target is really in the same segment, otherwise subtracting pointers is
151 // undefined behavior. As it turns out, it's undefined behavior that actually produces
152 // unexpected results in a real-world situation that actually happened: At one time,
153 // OrphanBuilder's "tag" (a WirePointer) was allowed to be initialized as if it lived in
154 // a particular segment when in fact it does not. On 32-bit systems, where words might
155 // only be 32-bit aligned, it's possible that the difference between `this` and `target` is
156 // not a whole number of words. But clang optimizes:
157 // (target - (word*)this - 1) << 2
158 // to:
159 // (((ptrdiff_t)target - (ptrdiff_t)this - 8) >> 1)
160 // So now when the pointers are not aligned the same, we can end up corrupting the bottom
161 // two bits, where `kind` is stored. For example, this turns a struct into a far pointer.
162 // Ouch!
163 KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) >=
164 reinterpret_cast<uintptr_t>(segment->getStartPtr()));
165 KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) <
166 reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
167 KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) >=
168 reinterpret_cast<uintptr_t>(segment->getStartPtr()));
169 KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) <=
170 reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
171 offsetAndKind.set((static_cast<uint32_t>(target - reinterpret_cast<word*>(this) - 1) << 2) | kind);
172 }
173 KJ_ALWAYS_INLINE(void setKindWithZeroOffset(Kind kind)) {
174 offsetAndKind.set(kind);
175 }
176 KJ_ALWAYS_INLINE(void setKindAndTargetForEmptyStruct()) {
177 // This pointer points at an empty struct. Assuming the WirePointer itself is in-bounds, we
178 // can set the target to point either at the WirePointer itself or immediately after it. The
179 // latter would cause the WirePointer to be "null" (since for an empty struct the upper 32
180 // bits are going to be zero). So we set an offset of -1, as if the struct were allocated
181 // immediately before this pointer, to distinguish it from null.
182 offsetAndKind.set(0xfffffffc);
183 }
184 KJ_ALWAYS_INLINE(void setKindForOrphan(Kind kind)) {
185 // OrphanBuilder contains a WirePointer, but since it isn't located in a segment, it should
186 // not have a valid offset (unless it is a FAR or OTHER pointer). We set its offset to -1
187 // because setting it to zero would mean a pointer to an empty struct would appear to be a null
188 // pointer.
189 KJ_DREQUIRE(isPositional());
190 offsetAndKind.set(kind | 0xfffffffc);
191 }
192
193 KJ_ALWAYS_INLINE(ListElementCount inlineCompositeListElementCount() const) {
194 return ((bounded(offsetAndKind.get()) >> G(2))
195 & G(kj::maxValueForBits<LIST_ELEMENT_COUNT_BITS>())) * ELEMENTS;
196 }
197 KJ_ALWAYS_INLINE(void setKindAndInlineCompositeListElementCount(
198 Kind kind, ListElementCount elementCount)) {
199 offsetAndKind.set(unboundAs<uint32_t>((elementCount / ELEMENTS) << G(2)) | kind);
200 }
201
202 KJ_ALWAYS_INLINE(const word* farTarget(SegmentReader* segment) const) {
203 KJ_DREQUIRE(kind() == FAR,
204 "farTarget() should only be called on FAR pointers.");
205 return segment->checkOffset(segment->getStartPtr(), offsetAndKind.get() >> 3);
206 }
207 KJ_ALWAYS_INLINE(word* farTarget(SegmentBuilder* segment) const) {
208 KJ_DREQUIRE(kind() == FAR,
209 "farTarget() should only be called on FAR pointers.");
210 return segment->getPtrUnchecked((bounded(offsetAndKind.get()) >> G(3)) * WORDS);
211 }
212 KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
213 KJ_DREQUIRE(kind() == FAR,
214 "isDoubleFar() should only be called on FAR pointers.");
215 return (offsetAndKind.get() >> 2) & 1;
216 }
217 KJ_ALWAYS_INLINE(void setFar(bool isDoubleFar, WordCountN<29> pos)) {
218 offsetAndKind.set(unboundAs<uint32_t>((pos / WORDS) << G(3)) |
219 (static_cast<uint32_t>(isDoubleFar) << 2) |
220 static_cast<uint32_t>(Kind::FAR));
221 }
222 KJ_ALWAYS_INLINE(void setCap(uint index)) {
223 offsetAndKind.set(static_cast<uint32_t>(Kind::OTHER));
224 capRef.index.set(index);
225 }
226
227 // -----------------------------------------------------------------
228 // Part of pointer that depends on the kind.
229
230 // Note: Originally StructRef, ListRef, and FarRef were unnamed types, but this somehow
231 // tickled a bug in GCC:
232 // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58192
233 struct StructRef {
234 WireValue<WordCount16> dataSize;
235 WireValue<WirePointerCount16> ptrCount;
236
237 inline WordCountN<17> wordSize() const {
238 return upgradeBound<uint32_t>(dataSize.get()) + ptrCount.get() * WORDS_PER_POINTER;
239 }
240
241 KJ_ALWAYS_INLINE(void set(WordCount16 ds, WirePointerCount16 rc)) {
242 dataSize.set(ds);
243 ptrCount.set(rc);
244 }
245 KJ_ALWAYS_INLINE(void set(StructSize size)) {
246 dataSize.set(size.data);
247 ptrCount.set(size.pointers);
248 }
249 };
250
251 struct ListRef {
252 WireValue<uint32_t> elementSizeAndCount;
253
254 KJ_ALWAYS_INLINE(ElementSize elementSize() const) {
255 return static_cast<ElementSize>(elementSizeAndCount.get() & 7);
256 }
257 KJ_ALWAYS_INLINE(ElementCountN<29> elementCount() const) {
258 return (bounded(elementSizeAndCount.get()) >> G(3)) * ELEMENTS;
259 }
260 KJ_ALWAYS_INLINE(WordCountN<29> inlineCompositeWordCount() const) {
261 return elementCount() * (ONE * WORDS / ELEMENTS);
262 }
263
264 KJ_ALWAYS_INLINE(void set(ElementSize es, ElementCountN<29> ec)) {
265 elementSizeAndCount.set(unboundAs<uint32_t>((ec / ELEMENTS) << G(3)) |
266 static_cast<int>(es));
267 }
268
269 KJ_ALWAYS_INLINE(void setInlineComposite(WordCountN<29> wc)) {
270 elementSizeAndCount.set(unboundAs<uint32_t>((wc / WORDS) << G(3)) |
271 static_cast<int>(ElementSize::INLINE_COMPOSITE));
272 }
273 };
274
275 struct FarRef {
276 WireValue<SegmentId> segmentId;
277
278 KJ_ALWAYS_INLINE(void set(SegmentId si)) {
279 segmentId.set(si);
280 }
281 };
282
283 struct CapRef {
284 WireValue<uint32_t> index;
285 // Index into the message's capability table.
286 };
287
288 union {
289 uint32_t upper32Bits;
290
291 StructRef structRef;
292
293 ListRef listRef;
294
295 FarRef farRef;
296
297 CapRef capRef;
298 };
299
300 KJ_ALWAYS_INLINE(bool isNull() const) {
301 // If the upper 32 bits are zero, this is a pointer to an empty struct. We consider that to be
302 // our "null" value.
303 return (offsetAndKind.get() == 0) & (upper32Bits == 0);
304 }
305
306};
307static_assert(sizeof(WirePointer) == sizeof(word),
308 "capnp::WirePointer is not exactly one word. This will probably break everything.");
309static_assert(unboundAs<size_t>(POINTERS * WORDS_PER_POINTER * BYTES_PER_WORD / BYTES) ==
310 sizeof(WirePointer),
311 "WORDS_PER_POINTER is wrong.");
312static_assert(unboundAs<size_t>(POINTERS * BYTES_PER_POINTER / BYTES) == sizeof(WirePointer),
313 "BYTES_PER_POINTER is wrong.");
314static_assert(unboundAs<size_t>(POINTERS * BITS_PER_POINTER / BITS_PER_BYTE / BYTES) ==
315 sizeof(WirePointer),
316 "BITS_PER_POINTER is wrong.");
317
318namespace {
319
320static const union {
321 AlignedData<unbound(POINTER_SIZE_IN_WORDS / WORDS)> word;
322 WirePointer pointer;
323} zero = {{{0}}};
324
325} // namespace
326
327// =======================================================================================
328
329namespace {
330
331template <typename T>
332struct SegmentAnd {
333 SegmentBuilder* segment;
334 T value;
335};
336
337} // namespace
338
339struct WireHelpers {
340#if CAPNP_DEBUG_TYPES
341 template <uint64_t maxN, typename T>
342 static KJ_ALWAYS_INLINE(
343 kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, word> roundBytesUpToWords(
344 kj::Quantity<kj::Bounded<maxN, T>, byte> bytes)) {
345 static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
346 return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
347 }
348
349 template <uint64_t maxN, typename T>
350 static KJ_ALWAYS_INLINE(
351 kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, byte> roundBitsUpToBytes(
352 kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
353 return (bits + G(7) * BITS) / BITS_PER_BYTE;
354 }
355
356 template <uint64_t maxN, typename T>
357 static KJ_ALWAYS_INLINE(
358 kj::Quantity<kj::Bounded<(maxN + 63) / 64, T>, word> roundBitsUpToWords(
359 kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
360 static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
361 return (bits + G(63) * BITS) / BITS_PER_WORD;
362 }
363#else
364 static KJ_ALWAYS_INLINE(WordCount roundBytesUpToWords(ByteCount bytes)) {
365 static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
366 return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
367 }
368
369 static KJ_ALWAYS_INLINE(ByteCount roundBitsUpToBytes(BitCount bits)) {
370 return (bits + G(7) * BITS) / BITS_PER_BYTE;
371 }
372
373 static KJ_ALWAYS_INLINE(WordCount64 roundBitsUpToWords(BitCount64 bits)) {
374 static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
375 return (bits + G(63) * BITS) / BITS_PER_WORD;
376 }
377
378 static KJ_ALWAYS_INLINE(ByteCount64 roundBitsUpToBytes(BitCount64 bits)) {
379 return (bits + G(7) * BITS) / BITS_PER_BYTE;
380 }
381#endif
382
383 static KJ_ALWAYS_INLINE(void zeroMemory(byte* ptr, ByteCount32 count)) {
384 if (count != ZERO * BYTES) memset(ptr, 0, unbound(count / BYTES));
385 }
386
387 static KJ_ALWAYS_INLINE(void zeroMemory(word* ptr, WordCountN<29> count)) {
388 if (count != ZERO * WORDS) memset(ptr, 0, unbound(count * BYTES_PER_WORD / BYTES));
389 }
390
391 static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr, WirePointerCountN<29> count)) {
392 if (count != ZERO * POINTERS) memset(ptr, 0, unbound(count * BYTES_PER_POINTER / BYTES));
393 }
394
395 static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr)) {
396 memset(ptr, 0, sizeof(*ptr));
397 }
398
399 template <typename T>
400 static inline void zeroMemory(kj::ArrayPtr<T> array) {
401 if (array.size() != 0u) memset(array.begin(), 0, array.size() * sizeof(array[0]));
402 }
403
404 static KJ_ALWAYS_INLINE(void copyMemory(byte* to, const byte* from, ByteCount32 count)) {
405 if (count != ZERO * BYTES) memcpy(to, from, unbound(count / BYTES));
406 }
407
408 static KJ_ALWAYS_INLINE(void copyMemory(word* to, const word* from, WordCountN<29> count)) {
409 if (count != ZERO * WORDS) memcpy(to, from, unbound(count * BYTES_PER_WORD / BYTES));
410 }
411
412 static KJ_ALWAYS_INLINE(void copyMemory(WirePointer* to, const WirePointer* from,
413 WirePointerCountN<29> count)) {
414 if (count != ZERO * POINTERS) memcpy(to, from, unbound(count * BYTES_PER_POINTER / BYTES));
415 }
416
417 template <typename T>
418 static inline void copyMemory(T* to, const T* from) {
419 memcpy(to, from, sizeof(*from));
420 }
421
422 // TODO(cleanup): Turn these into a .copyTo() method of ArrayPtr?
423 template <typename T>
424 static inline void copyMemory(T* to, kj::ArrayPtr<T> from) {
425 if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
426 }
427 template <typename T>
428 static inline void copyMemory(T* to, kj::ArrayPtr<const T> from) {
429 if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
430 }
431 static KJ_ALWAYS_INLINE(void copyMemory(char* to, kj::StringPtr from)) {
432 if (from.size() != 0u) memcpy(to, from.begin(), from.size() * sizeof(from[0]));
433 }
434
435 static KJ_ALWAYS_INLINE(bool boundsCheck(
436 SegmentReader* segment, const word* start, WordCountN<31> size)) {
437 // If segment is null, this is an unchecked message, so we don't do bounds checks.
438 return segment == nullptr || segment->checkObject(start, size);
439 }
440
441 static KJ_ALWAYS_INLINE(bool amplifiedRead(SegmentReader* segment, WordCount virtualAmount)) {
442 // If segment is null, this is an unchecked message, so we don't do read limiter checks.
443 return segment == nullptr || segment->amplifiedRead(virtualAmount);
444 }
445
446 static KJ_ALWAYS_INLINE(word* allocate(
447 WirePointer*& ref, SegmentBuilder*& segment, CapTableBuilder* capTable,
448 SegmentWordCount amount, WirePointer::Kind kind, BuilderArena* orphanArena)) {
449 // Allocate space in the message for a new object, creating far pointers if necessary.
450 //
451 // * `ref` starts out being a reference to the pointer which shall be assigned to point at the
452 // new object. On return, `ref` points to a pointer which needs to be initialized with
453 // the object's type information. Normally this is the same pointer, but it can change if
454 // a far pointer was allocated -- in this case, `ref` will end up pointing to the far
455 // pointer's tag. Either way, `allocate()` takes care of making sure that the original
456 // pointer ends up leading to the new object. On return, only the upper 32 bit of `*ref`
457 // need to be filled in by the caller.
458 // * `segment` starts out pointing to the segment containing `ref`. On return, it points to
459 // the segment containing the allocated object, which is usually the same segment but could
460 // be a different one if the original segment was out of space.
461 // * `amount` is the number of words to allocate.
462 // * `kind` is the kind of object to allocate. It is used to initialize the pointer. It
463 // cannot be `FAR` -- far pointers are allocated automatically as needed.
464 // * `orphanArena` is usually null. If it is non-null, then we're allocating an orphan object.
465 // In this case, `segment` starts out null; the allocation takes place in an arbitrary
466 // segment belonging to the arena. `ref` will be initialized as a non-far pointer, but its
467 // target offset will be set to zero.
468
469 if (orphanArena == nullptr) {
470 if (!ref->isNull()) zeroObject(segment, capTable, ref);
471
472 if (amount == ZERO * WORDS && kind == WirePointer::STRUCT) {
473 // Note that the check for kind == WirePointer::STRUCT will hopefully cause this whole
474 // branch to be optimized away from all the call sites that are allocating non-structs.
475 ref->setKindAndTargetForEmptyStruct();
476 return reinterpret_cast<word*>(ref);
477 }
478
479 word* ptr = segment->allocate(amount);
480
481 if (ptr == nullptr) {
482
483 // Need to allocate in a new segment. We'll need to allocate an extra pointer worth of
484 // space to act as the landing pad for a far pointer.
485
486 WordCount amountPlusRef = amount + POINTER_SIZE_IN_WORDS;
487 auto allocation = segment->getArena()->allocate(
488 assertMaxBits<SEGMENT_WORD_COUNT_BITS>(amountPlusRef, []() {
489 KJ_FAIL_REQUIRE("requested object size exceeds maximum segment size");
490 }));
491 segment = allocation.segment;
492 ptr = allocation.words;
493
494 // Set up the original pointer to be a far pointer to the new segment.
495 ref->setFar(false, segment->getOffsetTo(ptr));
496 ref->farRef.set(segment->getSegmentId());
497
498 // Initialize the landing pad to indicate that the data immediately follows the pad.
499 ref = reinterpret_cast<WirePointer*>(ptr);
500 ref->setKindAndTarget(kind, ptr + POINTER_SIZE_IN_WORDS, segment);
501
502 // Allocated space follows new pointer.
503 return ptr + POINTER_SIZE_IN_WORDS;
504 } else {
505 ref->setKindAndTarget(kind, ptr, segment);
506 return ptr;
507 }
508 } else {
509 // orphanArena is non-null. Allocate an orphan.
510 KJ_DASSERT(ref->isNull());
511 auto allocation = orphanArena->allocate(amount);
512 segment = allocation.segment;
513 ref->setKindForOrphan(kind);
514 return allocation.words;
515 }
516 }
517
518 static KJ_ALWAYS_INLINE(word* followFarsNoWritableCheck(
519 WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
520 // If `ref` is a far pointer, follow it. On return, `ref` will have been updated to point at
521 // a WirePointer that contains the type information about the target object, and a pointer to
522 // the object contents is returned. The caller must NOT use `ref->target()` as this may or may
523 // not actually return a valid pointer. `segment` is also updated to point at the segment which
524 // actually contains the object.
525 //
526 // If `ref` is not a far pointer, this simply returns `refTarget`. Usually, `refTarget` should
527 // be the same as `ref->target()`, but may not be in cases where `ref` is only a tag.
528
529 if (ref->kind() == WirePointer::FAR) {
530 segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
531 WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
532 if (!ref->isDoubleFar()) {
533 ref = pad;
534 return pad->target();
535 }
536
537 // Landing pad is another far pointer. It is followed by a tag describing the pointed-to
538 // object.
539 ref = pad + 1;
540
541 segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
542 return pad->farTarget(segment);
543 } else {
544 return refTarget;
545 }
546 }
547
548 static KJ_ALWAYS_INLINE(word* followFars(
549 WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
550 auto result = followFarsNoWritableCheck(ref, refTarget, segment);
551 segment->checkWritable();
552 return result;
553 }
554
555 static KJ_ALWAYS_INLINE(kj::Maybe<const word&> followFars(
556 const WirePointer*& ref, const word* refTarget, SegmentReader*& segment))
557 KJ_WARN_UNUSED_RESULT {
558 // Like the other followFars() but operates on readers.
559
560 // If the segment is null, this is an unchecked message, so there are no FAR pointers.
561 if (segment != nullptr && ref->kind() == WirePointer::FAR) {
562 // Look up the segment containing the landing pad.
563 segment = segment->getArena()->tryGetSegment(ref->farRef.segmentId.get());
564 KJ_REQUIRE(segment != nullptr, "Message contains far pointer to unknown segment.") {
565 return nullptr;
566 }
567
568 // Find the landing pad and check that it is within bounds.
569 const word* ptr = ref->farTarget(segment);
570 auto padWords = (ONE + bounded(ref->isDoubleFar())) * POINTER_SIZE_IN_WORDS;
571 KJ_REQUIRE(boundsCheck(segment, ptr, padWords),
572 "Message contains out-of-bounds far pointer.") {
573 return nullptr;
574 }
575
576 const WirePointer* pad = reinterpret_cast<const WirePointer*>(ptr);
577
578 // If this is not a double-far then the landing pad is our final pointer.
579 if (!ref->isDoubleFar()) {
580 ref = pad;
581 return pad->target(segment);
582 }
583
584 // Landing pad is another far pointer. It is followed by a tag describing the pointed-to
585 // object.
586 ref = pad + 1;
587
588 SegmentReader* newSegment = segment->getArena()->tryGetSegment(pad->farRef.segmentId.get());
589 KJ_REQUIRE(newSegment != nullptr,
590 "Message contains double-far pointer to unknown segment.") {
591 return nullptr;
592 }
593 KJ_REQUIRE(pad->kind() == WirePointer::FAR,
594 "Second word of double-far pad must be far pointer.") {
595 return nullptr;
596 }
597
598 segment = newSegment;
599 return pad->farTarget(segment);
600 } else {
601 KJ_DASSERT(refTarget != nullptr);
602 return refTarget;
603 }
604 }
605
606 // -----------------------------------------------------------------
607
608 static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref) {
609 // Zero out the pointed-to object. Use when the pointer is about to be overwritten making the
610 // target object no longer reachable.
611
612 // We shouldn't zero out external data linked into the message.
613 if (!segment->isWritable()) return;
614
615 switch (ref->kind()) {
616 case WirePointer::STRUCT:
617 case WirePointer::LIST:
618 zeroObject(segment, capTable, ref, ref->target());
619 break;
620 case WirePointer::FAR: {
621 segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
622 if (segment->isWritable()) { // Don't zero external data.
623 WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
624
625 if (ref->isDoubleFar()) {
626 segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
627 if (segment->isWritable()) {
628 zeroObject(segment, capTable, pad + 1, pad->farTarget(segment));
629 }
630 zeroMemory(pad, G(2) * POINTERS);
631 } else {
632 zeroObject(segment, capTable, pad);
633 zeroMemory(pad);
634 }
635 }
636 break;
637 }
638 case WirePointer::OTHER:
639 if (ref->isCapability()) {
640#if CAPNP_LITE
641 KJ_FAIL_ASSERT("Capability encountered in builder in lite mode?") { break; }
642#else // CAPNP_LINE
643 capTable->dropCap(ref->capRef.index.get());
644#endif // CAPNP_LITE, else
645 } else {
646 KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
647 }
648 break;
649 }
650 }
651
652 static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable,
653 WirePointer* tag, word* ptr) {
654 // We shouldn't zero out external data linked into the message.
655 if (!segment->isWritable()) return;
656
657 switch (tag->kind()) {
658 case WirePointer::STRUCT: {
659 WirePointer* pointerSection =
660 reinterpret_cast<WirePointer*>(ptr + tag->structRef.dataSize.get());
661 for (auto i: kj::zeroTo(tag->structRef.ptrCount.get())) {
662 zeroObject(segment, capTable, pointerSection + i);
663 }
664 zeroMemory(ptr, tag->structRef.wordSize());
665 break;
666 }
667 case WirePointer::LIST: {
668 switch (tag->listRef.elementSize()) {
669 case ElementSize::VOID:
670 // Nothing.
671 break;
672 case ElementSize::BIT:
673 case ElementSize::BYTE:
674 case ElementSize::TWO_BYTES:
675 case ElementSize::FOUR_BYTES:
676 case ElementSize::EIGHT_BYTES: {
677 zeroMemory(ptr, roundBitsUpToWords(
678 upgradeBound<uint64_t>(tag->listRef.elementCount()) *
679 dataBitsPerElement(tag->listRef.elementSize())));
680 break;
681 }
682 case ElementSize::POINTER: {
683 WirePointer* typedPtr = reinterpret_cast<WirePointer*>(ptr);
684 auto count = tag->listRef.elementCount() * (ONE * POINTERS / ELEMENTS);
685 for (auto i: kj::zeroTo(count)) {
686 zeroObject(segment, capTable, typedPtr + i);
687 }
688 zeroMemory(typedPtr, count);
689 break;
690 }
691 case ElementSize::INLINE_COMPOSITE: {
692 WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);
693
694 KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
695 "Don't know how to handle non-STRUCT inline composite.");
696 WordCount dataSize = elementTag->structRef.dataSize.get();
697 WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();
698
699 auto count = elementTag->inlineCompositeListElementCount();
700 if (pointerCount > ZERO * POINTERS) {
701 word* pos = ptr + POINTER_SIZE_IN_WORDS;
702 for (auto i KJ_UNUSED: kj::zeroTo(count)) {
703 pos += dataSize;
704
705 for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
706 zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(pos));
707 pos += POINTER_SIZE_IN_WORDS;
708 }
709 }
710 }
711
712 auto wordsPerElement = elementTag->structRef.wordSize() / ELEMENTS;
713 zeroMemory(ptr, assertMaxBits<SEGMENT_WORD_COUNT_BITS>(POINTER_SIZE_IN_WORDS +
714 upgradeBound<uint64_t>(count) * wordsPerElement, []() {
715 KJ_FAIL_ASSERT("encountered list pointer in builder which is too large to "
716 "possibly fit in a segment. Bug in builder code?");
717 }));
718 break;
719 }
720 }
721 break;
722 }
723 case WirePointer::FAR:
724 KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
725 break;
726 }
727 break;
728 case WirePointer::OTHER:
729 KJ_FAIL_ASSERT("Unexpected OTHER pointer.") {
730 break;
731 }
732 break;
733 }
734 }
735
736 static KJ_ALWAYS_INLINE(
737 void zeroPointerAndFars(SegmentBuilder* segment, WirePointer* ref)) {
738 // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, but
739 // do not zero the object body. Used when upgrading.
740
741 if (ref->kind() == WirePointer::FAR) {
742 SegmentBuilder* padSegment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
743 if (padSegment->isWritable()) { // Don't zero external data.
744 WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(padSegment));
745 if (ref->isDoubleFar()) {
746 zeroMemory(pad, G(2) * POINTERS);
747 } else {
748 zeroMemory(pad);
749 }
750 }
751 }
752
753 zeroMemory(ref);
754 }
755
756
757 // -----------------------------------------------------------------
758
759 static MessageSizeCounts totalSize(
760 SegmentReader* segment, const WirePointer* ref, int nestingLimit) {
761 // Compute the total size of the object pointed to, not counting far pointer overhead.
762
763 MessageSizeCounts result = { ZERO * WORDS, 0 };
764
765 if (ref->isNull()) {
766 return result;
767 }
768
769 KJ_REQUIRE(nestingLimit > 0, "Message is too deeply-nested.") {
770 return result;
771 }
772 --nestingLimit;
773
774 const word* ptr;
775 KJ_IF_MAYBE(p, followFars(ref, ref->target(segment), segment)) {
776 ptr = p;
777 } else {
778 return result;
779 }
780
781 switch (ref->kind()) {
782 case WirePointer::STRUCT: {
783 KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
784 "Message contained out-of-bounds struct pointer.") {
785 return result;
786 }
787 result.addWords(ref->structRef.wordSize());
788
789 const WirePointer* pointerSection =
790 reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get());
791 for (auto i: kj::zeroTo(ref->structRef.ptrCount.get())) {
792 result += totalSize(segment, pointerSection + i, nestingLimit);
793 }
794 break;
795 }
796 case WirePointer::LIST: {
797 switch (ref->listRef.elementSize()) {
798 case ElementSize::VOID:
799 // Nothing.
800 break;
801 case ElementSize::BIT:
802 case ElementSize::BYTE:
803 case ElementSize::TWO_BYTES:
804 case ElementSize::FOUR_BYTES:
805 case ElementSize::EIGHT_BYTES: {
806 auto totalWords = roundBitsUpToWords(
807 upgradeBound<uint64_t>(ref->listRef.elementCount()) *
808 dataBitsPerElement(ref->listRef.elementSize()));
809 KJ_REQUIRE(boundsCheck(segment, ptr, totalWords),
810 "Message contained out-of-bounds list pointer.") {
811 return result;
812 }
813 result.addWords(totalWords);
814 break;
815 }
816 case ElementSize::POINTER: {
817 auto count = ref->listRef.elementCount() * (POINTERS / ELEMENTS);
818
819 KJ_REQUIRE(boundsCheck(segment, ptr, count * WORDS_PER_POINTER),
820 "Message contained out-of-bounds list pointer.") {
821 return result;
822 }
823
824 result.addWords(count * WORDS_PER_POINTER);
825
826 for (auto i: kj::zeroTo(count)) {
827 result += totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
828 nestingLimit);
829 }
830 break;
831 }
832 case ElementSize::INLINE_COMPOSITE: {
833 auto wordCount = ref->listRef.inlineCompositeWordCount();
834 KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
835 "Message contained out-of-bounds list pointer.") {
836 return result;
837 }
838
839 const WirePointer* elementTag = reinterpret_cast<const WirePointer*>(ptr);
840 auto count = elementTag->inlineCompositeListElementCount();
841
842 KJ_REQUIRE(elementTag->kind() == WirePointer::STRUCT,
843 "Don't know how to handle non-STRUCT inline composite.") {
844 return result;
845 }
846
847 auto actualSize = elementTag->structRef.wordSize() / ELEMENTS *
848 upgradeBound<uint64_t>(count);
849 KJ_REQUIRE(actualSize <= wordCount,
850 "Struct list pointer's elements overran size.") {
851 return result;
852 }
853
854 // We count the actual size rather than the claimed word count because that's what
855 // we'll end up with if we make a copy.
856 result.addWords(actualSize + POINTER_SIZE_IN_WORDS);
857
858 WordCount dataSize = elementTag->structRef.dataSize.get();
859 WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();
860
861 if (pointerCount > ZERO * POINTERS) {
862 const word* pos = ptr + POINTER_SIZE_IN_WORDS;
863 for (auto i KJ_UNUSED: kj::zeroTo(count)) {
864 pos += dataSize;
865
866 for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
867 result += totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
868 nestingLimit);
869 pos += POINTER_SIZE_IN_WORDS;
870 }
871 }
872 }
873 break;
874 }
875 }
876 break;
877 }
878 case WirePointer::FAR:
879 KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
880 break;
881 }
882 break;
883 case WirePointer::OTHER:
884 if (ref->isCapability()) {
885 result.capCount++;
886 } else {
887 KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
888 }
889 break;
890 }
891
892 return result;
893 }
894
895 // -----------------------------------------------------------------
896 // Copy from an unchecked message.
897
898 static KJ_ALWAYS_INLINE(
899 void copyStruct(SegmentBuilder* segment, CapTableBuilder* capTable,
900 word* dst, const word* src,
901 StructDataWordCount dataSize, StructPointerCount pointerCount)) {
902 copyMemory(dst, src, dataSize);
903
904 const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src + dataSize);
905 WirePointer* dstRefs = reinterpret_cast<WirePointer*>(dst + dataSize);
906
907 for (auto i: kj::zeroTo(pointerCount)) {
908 SegmentBuilder* subSegment = segment;
909 WirePointer* dstRef = dstRefs + i;
910 copyMessage(subSegment, capTable, dstRef, srcRefs + i);
911 }
912 }
913
914 static word* copyMessage(
915 SegmentBuilder*& segment, CapTableBuilder* capTable,
916 WirePointer*& dst, const WirePointer* src) {
917 // Not always-inline because it's recursive.
918
919 switch (src->kind()) {
920 case WirePointer::STRUCT: {
921 if (src->isNull()) {
922 zeroMemory(dst);
923 return nullptr;
924 } else {
925 const word* srcPtr = src->target(nullptr);
926 word* dstPtr = allocate(
927 dst, segment, capTable, src->structRef.wordSize(), WirePointer::STRUCT, nullptr);
928
929 copyStruct(segment, capTable, dstPtr, srcPtr, src->structRef.dataSize.get(),
930 src->structRef.ptrCount.get());
931
932 dst->structRef.set(src->structRef.dataSize.get(), src->structRef.ptrCount.get());
933 return dstPtr;
934 }
935 }
936 case WirePointer::LIST: {
937 switch (src->listRef.elementSize()) {
938 case ElementSize::VOID:
939 case ElementSize::BIT:
940 case ElementSize::BYTE:
941 case ElementSize::TWO_BYTES:
942 case ElementSize::FOUR_BYTES:
943 case ElementSize::EIGHT_BYTES: {
944 auto wordCount = roundBitsUpToWords(
945 upgradeBound<uint64_t>(src->listRef.elementCount()) *
946 dataBitsPerElement(src->listRef.elementSize()));
947 const word* srcPtr = src->target(nullptr);
948 word* dstPtr = allocate(dst, segment, capTable, wordCount, WirePointer::LIST, nullptr);
949 copyMemory(dstPtr, srcPtr, wordCount);
950
951 dst->listRef.set(src->listRef.elementSize(), src->listRef.elementCount());
952 return dstPtr;
953 }
954
955 case ElementSize::POINTER: {
956 const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src->target(nullptr));
957 WirePointer* dstRefs = reinterpret_cast<WirePointer*>(
958 allocate(dst, segment, capTable, src->listRef.elementCount() *
959 (ONE * POINTERS / ELEMENTS) * WORDS_PER_POINTER,
960 WirePointer::LIST, nullptr));
961
962 for (auto i: kj::zeroTo(src->listRef.elementCount() * (ONE * POINTERS / ELEMENTS))) {
963 SegmentBuilder* subSegment = segment;
964 WirePointer* dstRef = dstRefs + i;
965 copyMessage(subSegment, capTable, dstRef, srcRefs + i);
966 }
967
968 dst->listRef.set(ElementSize::POINTER, src->listRef.elementCount());
969 return reinterpret_cast<word*>(dstRefs);
970 }
971
972 case ElementSize::INLINE_COMPOSITE: {
973 const word* srcPtr = src->target(nullptr);
974 word* dstPtr = allocate(dst, segment, capTable,
975 assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
976 src->listRef.inlineCompositeWordCount() + POINTER_SIZE_IN_WORDS,
977 []() { KJ_FAIL_ASSERT("list too big to fit in a segment"); }),
978 WirePointer::LIST, nullptr);
979
980 dst->listRef.setInlineComposite(src->listRef.inlineCompositeWordCount());
981
982 const WirePointer* srcTag = reinterpret_cast<const WirePointer*>(srcPtr);
983 copyMemory(reinterpret_cast<WirePointer*>(dstPtr), srcTag);
984
985 const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
986 word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
987
988 KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
989 "INLINE_COMPOSITE of lists is not yet supported.");
990
991 for (auto i KJ_UNUSED: kj::zeroTo(srcTag->inlineCompositeListElementCount())) {
992 copyStruct(segment, capTable, dstElement, srcElement,
993 srcTag->structRef.dataSize.get(), srcTag->structRef.ptrCount.get());
994 srcElement += srcTag->structRef.wordSize();
995 dstElement += srcTag->structRef.wordSize();
996 }
997 return dstPtr;
998 }
999 }
1000 break;
1001 }
1002 case WirePointer::OTHER:
1003 KJ_FAIL_REQUIRE("Unchecked messages cannot contain OTHER pointers (e.g. capabilities).");
1004 break;
1005 case WirePointer::FAR:
1006 KJ_FAIL_REQUIRE("Unchecked messages cannot contain far pointers.");
1007 break;
1008 }
1009
1010 return nullptr;
1011 }
1012
1013 static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
1014 SegmentBuilder* srcSegment, WirePointer* src) {
1015 // Make *dst point to the same object as *src. Both must reside in the same message, but can
1016 // be in different segments. Not always-inline because this is rarely used.
1017 //
1018 // Caller MUST zero out the source pointer after calling this, to make sure no later code
1019 // mistakenly thinks the source location still owns the object. transferPointer() doesn't do
1020 // this zeroing itself because many callers transfer several pointers in a loop then zero out
1021 // the whole section.
1022
1023 KJ_DASSERT(dst->isNull());
1024 // We expect the caller to ensure the target is already null so won't leak.
1025
1026 if (src->isNull()) {
1027 zeroMemory(dst);
1028 } else if (src->isPositional()) {
1029 transferPointer(dstSegment, dst, srcSegment, src, src->target());
1030 } else {
1031 // Far and other pointers are position-independent, so we can just copy.
1032 copyMemory(dst, src);
1033 }
1034 }
1035
1036 static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
1037 SegmentBuilder* srcSegment, const WirePointer* srcTag,
1038 word* srcPtr) {
1039 // Like the other overload, but splits src into a tag and a target. Particularly useful for
1040 // OrphanBuilder.
1041
1042 if (dstSegment == srcSegment) {
1043 // Same segment, so create a direct pointer.
1044
1045 if (srcTag->kind() == WirePointer::STRUCT && srcTag->structRef.wordSize() == ZERO * WORDS) {
1046 dst->setKindAndTargetForEmptyStruct();
1047 } else {
1048 dst->setKindAndTarget(srcTag->kind(), srcPtr, dstSegment);
1049 }
1050
1051 // We can just copy the upper 32 bits. (Use memcpy() to comply with aliasing rules.)
1052 copyMemory(&dst->upper32Bits, &srcTag->upper32Bits);
1053 } else {
1054 // Need to create a far pointer. Try to allocate it in the same segment as the source, so
1055 // that it doesn't need to be a double-far.
1056
1057 WirePointer* landingPad =
1058 reinterpret_cast<WirePointer*>(srcSegment->allocate(G(1) * WORDS));
1059 if (landingPad == nullptr) {
1060 // Darn, need a double-far.
1061 auto allocation = srcSegment->getArena()->allocate(G(2) * WORDS);
1062 SegmentBuilder* farSegment = allocation.segment;
1063 landingPad = reinterpret_cast<WirePointer*>(allocation.words);
1064
1065 landingPad[0].setFar(false, srcSegment->getOffsetTo(srcPtr));
1066 landingPad[0].farRef.segmentId.set(srcSegment->getSegmentId());
1067
1068 landingPad[1].setKindWithZeroOffset(srcTag->kind());
1069 copyMemory(&landingPad[1].upper32Bits, &srcTag->upper32Bits);
1070
1071 dst->setFar(true, farSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
1072 dst->farRef.set(farSegment->getSegmentId());
1073 } else {
1074 // Simple landing pad is just a pointer.
1075 landingPad->setKindAndTarget(srcTag->kind(), srcPtr, srcSegment);
1076 copyMemory(&landingPad->upper32Bits, &srcTag->upper32Bits);
1077
1078 dst->setFar(false, srcSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
1079 dst->farRef.set(srcSegment->getSegmentId());
1080 }
1081 }
1082 }
1083
1084 // -----------------------------------------------------------------
1085
1086 static KJ_ALWAYS_INLINE(StructBuilder initStructPointer(
1087 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
1088 BuilderArena* orphanArena = nullptr)) {
1089 // Allocate space for the new struct. Newly-allocated space is automatically zeroed.
1090 word* ptr = allocate(ref, segment, capTable, size.total(), WirePointer::STRUCT, orphanArena);
1091
1092 // Initialize the pointer.
1093 ref->structRef.set(size);
1094
1095 // Build the StructBuilder.
1096 return StructBuilder(segment, capTable, ptr, reinterpret_cast<WirePointer*>(ptr + size.data),
1097 size.data * BITS_PER_WORD, size.pointers);
1098 }
1099
1100 static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
1101 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
1102 const word* defaultValue)) {
1103 return getWritableStructPointer(ref, ref->target(), segment, capTable, size, defaultValue);
1104 }
1105
1106 static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
1107 WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1108 StructSize size, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1109 if (ref->isNull()) {
1110 useDefault:
1111 if (defaultValue == nullptr ||
1112 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1113 return initStructPointer(ref, segment, capTable, size, orphanArena);
1114 }
1115 refTarget = copyMessage(segment, capTable, ref,
1116 reinterpret_cast<const WirePointer*>(defaultValue));
1117 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
1118 }
1119
1120 WirePointer* oldRef = ref;
1121 SegmentBuilder* oldSegment = segment;
1122 word* oldPtr = followFars(oldRef, refTarget, oldSegment);
1123
1124 KJ_REQUIRE(oldRef->kind() == WirePointer::STRUCT,
1125 "Message contains non-struct pointer where struct pointer was expected.") {
1126 goto useDefault;
1127 }
1128
1129 auto oldDataSize = oldRef->structRef.dataSize.get();
1130 auto oldPointerCount = oldRef->structRef.ptrCount.get();
1131 WirePointer* oldPointerSection =
1132 reinterpret_cast<WirePointer*>(oldPtr + oldDataSize);
1133
1134 if (oldDataSize < size.data || oldPointerCount < size.pointers) {
1135 // The space allocated for this struct is too small. Unlike with readers, we can't just
1136 // run with it and do bounds checks at access time, because how would we handle writes?
1137 // Instead, we have to copy the struct to a new space now.
1138
1139 auto newDataSize = kj::max(oldDataSize, size.data);
1140 auto newPointerCount = kj::max(oldPointerCount, size.pointers);
1141 auto totalSize = newDataSize + newPointerCount * WORDS_PER_POINTER;
1142
1143 // Don't let allocate() zero out the object just yet.
1144 zeroPointerAndFars(segment, ref);
1145
1146 word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1147 ref->structRef.set(newDataSize, newPointerCount);
1148
1149 // Copy data section.
1150 copyMemory(ptr, oldPtr, oldDataSize);
1151
1152 // Copy pointer section.
1153 WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(ptr + newDataSize);
1154 for (auto i: kj::zeroTo(oldPointerCount)) {
1155 transferPointer(segment, newPointerSection + i, oldSegment, oldPointerSection + i);
1156 }
1157
1158 // Zero out old location. This has two purposes:
1159 // 1) We don't want to leak the original contents of the struct when the message is written
1160 // out as it may contain secrets that the caller intends to remove from the new copy.
1161 // 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
1162 // hits the wire.
1163 zeroMemory(oldPtr, oldDataSize + oldPointerCount * WORDS_PER_POINTER);
1164
1165 return StructBuilder(segment, capTable, ptr, newPointerSection, newDataSize * BITS_PER_WORD,
1166 newPointerCount);
1167 } else {
1168 return StructBuilder(oldSegment, capTable, oldPtr, oldPointerSection,
1169 oldDataSize * BITS_PER_WORD, oldPointerCount);
1170 }
1171 }
1172
1173 static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
1174 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1175 ElementCount elementCount, ElementSize elementSize, BuilderArena* orphanArena = nullptr)) {
1176 KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1177 "Should have called initStructListPointer() instead.");
1178
1179 auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
1180 []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });
1181
1182 auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
1183 auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
1184 auto step = bitsPerElementIncludingPointers(elementSize);
1185 KJ_DASSERT(step * ELEMENTS == (dataSize + pointerCount * BITS_PER_POINTER));
1186
1187 // Calculate size of the list.
1188 auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(checkedElementCount) * step);
1189
1190 // Allocate the list.
1191 word* ptr = allocate(ref, segment, capTable, wordCount, WirePointer::LIST, orphanArena);
1192
1193 // Initialize the pointer.
1194 ref->listRef.set(elementSize, checkedElementCount);
1195
1196 // Build the ListBuilder.
1197 return ListBuilder(segment, capTable, ptr, step, checkedElementCount,
1198 dataSize, pointerCount, elementSize);
1199 }
1200
1201 static KJ_ALWAYS_INLINE(ListBuilder initStructListPointer(
1202 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1203 ElementCount elementCount, StructSize elementSize, BuilderArena* orphanArena = nullptr)) {
1204 auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
1205 []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });
1206
1207 WordsPerElementN<17> wordsPerElement = elementSize.total() / ELEMENTS;
1208
1209 // Allocate the list, prefixed by a single WirePointer.
1210 auto wordCount = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1211 upgradeBound<uint64_t>(checkedElementCount) * wordsPerElement,
1212 []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
1213 word* ptr = allocate(ref, segment, capTable, POINTER_SIZE_IN_WORDS + wordCount,
1214 WirePointer::LIST, orphanArena);
1215
1216 // Initialize the pointer.
1217 // INLINE_COMPOSITE lists replace the element count with the word count.
1218 ref->listRef.setInlineComposite(wordCount);
1219
1220 // Initialize the list tag.
1221 reinterpret_cast<WirePointer*>(ptr)->setKindAndInlineCompositeListElementCount(
1222 WirePointer::STRUCT, checkedElementCount);
1223 reinterpret_cast<WirePointer*>(ptr)->structRef.set(elementSize);
1224 ptr += POINTER_SIZE_IN_WORDS;
1225
1226 // Build the ListBuilder.
1227 return ListBuilder(segment, capTable, ptr, wordsPerElement * BITS_PER_WORD, checkedElementCount,
1228 elementSize.data * BITS_PER_WORD, elementSize.pointers,
1229 ElementSize::INLINE_COMPOSITE);
1230 }
1231
1232 static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1233 WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
1234 ElementSize elementSize, const word* defaultValue)) {
1235 return getWritableListPointer(origRef, origRef->target(), origSegment, capTable, elementSize,
1236 defaultValue);
1237 }
1238
1239 static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1240 WirePointer* origRef, word* origRefTarget,
1241 SegmentBuilder* origSegment, CapTableBuilder* capTable, ElementSize elementSize,
1242 const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1243 KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1244 "Use getWritableStructListPointer() for struct lists.");
1245
1246 if (origRef->isNull()) {
1247 useDefault:
1248 if (defaultValue == nullptr ||
1249 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1250 return ListBuilder(elementSize);
1251 }
1252 origRefTarget = copyMessage(
1253 origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1254 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
1255 }
1256
1257 // We must verify that the pointer has the right size. Unlike in
1258 // getWritableStructListPointer(), we never need to "upgrade" the data, because this
1259 // method is called only for non-struct lists, and there is no allowed upgrade path *to*
1260 // a non-struct list, only *from* them.
1261
1262 WirePointer* ref = origRef;
1263 SegmentBuilder* segment = origSegment;
1264 word* ptr = followFars(ref, origRefTarget, segment);
1265
1266 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1267 "Called getWritableListPointer() but existing pointer is not a list.") {
1268 goto useDefault;
1269 }
1270
1271 ElementSize oldSize = ref->listRef.elementSize();
1272
1273 if (oldSize == ElementSize::INLINE_COMPOSITE) {
1274 // The existing element size is INLINE_COMPOSITE, though we expected a list of primitives.
1275 // The existing data must have been written with a newer version of the protocol. We
1276 // therefore never need to upgrade the data in this case, but we do need to validate that it
1277 // is a valid upgrade from what we expected.
1278
1279 // Read the tag to get the actual element count.
1280 WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1281 KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1282 "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
1283 ptr += POINTER_SIZE_IN_WORDS;
1284
1285 auto dataSize = tag->structRef.dataSize.get();
1286 auto pointerCount = tag->structRef.ptrCount.get();
1287
1288 switch (elementSize) {
1289 case ElementSize::VOID:
1290 // Anything is a valid upgrade from Void.
1291 break;
1292
1293 case ElementSize::BIT:
1294 KJ_FAIL_REQUIRE(
1295 "Found struct list where bit list was expected; upgrading boolean lists to structs "
1296 "is no longer supported.") {
1297 goto useDefault;
1298 }
1299 break;
1300
1301 case ElementSize::BYTE:
1302 case ElementSize::TWO_BYTES:
1303 case ElementSize::FOUR_BYTES:
1304 case ElementSize::EIGHT_BYTES:
1305 KJ_REQUIRE(dataSize >= ONE * WORDS,
1306 "Existing list value is incompatible with expected type.") {
1307 goto useDefault;
1308 }
1309 break;
1310
1311 case ElementSize::POINTER:
1312 KJ_REQUIRE(pointerCount >= ONE * POINTERS,
1313 "Existing list value is incompatible with expected type.") {
1314 goto useDefault;
1315 }
1316 // Adjust the pointer to point at the reference segment.
1317 ptr += dataSize;
1318 break;
1319
1320 case ElementSize::INLINE_COMPOSITE:
1321 KJ_UNREACHABLE;
1322 }
1323
1324 // OK, looks valid.
1325
1326 return ListBuilder(segment, capTable, ptr,
1327 tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
1328 tag->inlineCompositeListElementCount(),
1329 dataSize * BITS_PER_WORD, pointerCount, ElementSize::INLINE_COMPOSITE);
1330 } else {
1331 auto dataSize = dataBitsPerElement(oldSize) * ELEMENTS;
1332 auto pointerCount = pointersPerElement(oldSize) * ELEMENTS;
1333
1334 if (elementSize == ElementSize::BIT) {
1335 KJ_REQUIRE(oldSize == ElementSize::BIT,
1336 "Found non-bit list where bit list was expected.") {
1337 goto useDefault;
1338 }
1339 } else {
1340 KJ_REQUIRE(oldSize != ElementSize::BIT,
1341 "Found bit list where non-bit list was expected.") {
1342 goto useDefault;
1343 }
1344 KJ_REQUIRE(dataSize >= dataBitsPerElement(elementSize) * ELEMENTS,
1345 "Existing list value is incompatible with expected type.") {
1346 goto useDefault;
1347 }
1348 KJ_REQUIRE(pointerCount >= pointersPerElement(elementSize) * ELEMENTS,
1349 "Existing list value is incompatible with expected type.") {
1350 goto useDefault;
1351 }
1352 }
1353
1354 auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1355 return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1356 dataSize, pointerCount, oldSize);
1357 }
1358 }
1359
1360 static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1361 WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
1362 const word* defaultValue)) {
1363 return getWritableListPointerAnySize(origRef, origRef->target(), origSegment,
1364 capTable, defaultValue);
1365 }
1366
1367 static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1368 WirePointer* origRef, word* origRefTarget,
1369 SegmentBuilder* origSegment, CapTableBuilder* capTable,
1370 const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1371 if (origRef->isNull()) {
1372 useDefault:
1373 if (defaultValue == nullptr ||
1374 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1375 return ListBuilder(ElementSize::VOID);
1376 }
1377 origRefTarget = copyMessage(
1378 origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1379 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
1380 }
1381
1382 WirePointer* ref = origRef;
1383 SegmentBuilder* segment = origSegment;
1384 word* ptr = followFars(ref, origRefTarget, segment);
1385
1386 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1387 "Called getWritableListPointerAnySize() but existing pointer is not a list.") {
1388 goto useDefault;
1389 }
1390
1391 ElementSize elementSize = ref->listRef.elementSize();
1392
1393 if (elementSize == ElementSize::INLINE_COMPOSITE) {
1394 // Read the tag to get the actual element count.
1395 WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1396 KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1397 "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
1398 ptr += POINTER_SIZE_IN_WORDS;
1399
1400 return ListBuilder(segment, capTable, ptr,
1401 tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
1402 tag->inlineCompositeListElementCount(),
1403 tag->structRef.dataSize.get() * BITS_PER_WORD,
1404 tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE);
1405 } else {
1406 auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
1407 auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
1408
1409 auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1410 return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1411 dataSize, pointerCount, elementSize);
1412 }
1413 }
1414
1415 static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1416 WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
1417 StructSize elementSize, const word* defaultValue)) {
1418 return getWritableStructListPointer(origRef, origRef->target(), origSegment, capTable,
1419 elementSize, defaultValue);
1420 }
1421 static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1422 WirePointer* origRef, word* origRefTarget,
1423 SegmentBuilder* origSegment, CapTableBuilder* capTable,
1424 StructSize elementSize, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1425 if (origRef->isNull()) {
1426 useDefault:
1427 if (defaultValue == nullptr ||
1428 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1429 return ListBuilder(ElementSize::INLINE_COMPOSITE);
1430 }
1431 origRefTarget = copyMessage(
1432 origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1433 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
1434 }
1435
1436 // We must verify that the pointer has the right size and potentially upgrade it if not.
1437
1438 WirePointer* oldRef = origRef;
1439 SegmentBuilder* oldSegment = origSegment;
1440 word* oldPtr = followFars(oldRef, origRefTarget, oldSegment);
1441
1442 KJ_REQUIRE(oldRef->kind() == WirePointer::LIST,
1443 "Called getList{Field,Element}() but existing pointer is not a list.") {
1444 goto useDefault;
1445 }
1446
1447 ElementSize oldSize = oldRef->listRef.elementSize();
1448
1449 if (oldSize == ElementSize::INLINE_COMPOSITE) {
1450 // Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
1451
1452 WirePointer* oldTag = reinterpret_cast<WirePointer*>(oldPtr);
1453 oldPtr += POINTER_SIZE_IN_WORDS;
1454 KJ_REQUIRE(oldTag->kind() == WirePointer::STRUCT,
1455 "INLINE_COMPOSITE list with non-STRUCT elements not supported.") {
1456 goto useDefault;
1457 }
1458
1459 auto oldDataSize = oldTag->structRef.dataSize.get();
1460 auto oldPointerCount = oldTag->structRef.ptrCount.get();
1461 auto oldStep = (oldDataSize + oldPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1462
1463 auto elementCount = oldTag->inlineCompositeListElementCount();
1464
1465 if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
1466 // Old size is at least as large as we need. Ship it.
1467 return ListBuilder(oldSegment, capTable, oldPtr, oldStep * BITS_PER_WORD, elementCount,
1468 oldDataSize * BITS_PER_WORD, oldPointerCount,
1469 ElementSize::INLINE_COMPOSITE);
1470 }
1471
1472 // The structs in this list are smaller than expected, probably written using an older
1473 // version of the protocol. We need to make a copy and expand them.
1474
1475 auto newDataSize = kj::max(oldDataSize, elementSize.data);
1476 auto newPointerCount = kj::max(oldPointerCount, elementSize.pointers);
1477 auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1478
1479 auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1480 newStep * upgradeBound<uint64_t>(elementCount),
1481 []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
1482
1483 // Don't let allocate() zero out the object just yet.
1484 zeroPointerAndFars(origSegment, origRef);
1485
1486 word* newPtr = allocate(origRef, origSegment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1487 WirePointer::LIST, orphanArena);
1488 origRef->listRef.setInlineComposite(totalSize);
1489
1490 WirePointer* newTag = reinterpret_cast<WirePointer*>(newPtr);
1491 newTag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
1492 newTag->structRef.set(newDataSize, newPointerCount);
1493 newPtr += POINTER_SIZE_IN_WORDS;
1494
1495 word* src = oldPtr;
1496 word* dst = newPtr;
1497 for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
1498 // Copy data section.
1499 copyMemory(dst, src, oldDataSize);
1500
1501 // Copy pointer section.
1502 WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(dst + newDataSize);
1503 WirePointer* oldPointerSection = reinterpret_cast<WirePointer*>(src + oldDataSize);
1504 for (auto j: kj::zeroTo(oldPointerCount)) {
1505 transferPointer(origSegment, newPointerSection + j, oldSegment, oldPointerSection + j);
1506 }
1507
1508 dst += newStep * (ONE * ELEMENTS);
1509 src += oldStep * (ONE * ELEMENTS);
1510 }
1511
1512 auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1513 oldStep * upgradeBound<uint64_t>(elementCount),
1514 []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });
1515
1516 // Zero out old location. See explanation in getWritableStructPointer().
1517 // Make sure to include the tag word.
1518 zeroMemory(oldPtr - POINTER_SIZE_IN_WORDS, oldSize + POINTER_SIZE_IN_WORDS);
1519
1520 return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1521 newDataSize * BITS_PER_WORD, newPointerCount,
1522 ElementSize::INLINE_COMPOSITE);
1523 } else {
1524 // We're upgrading from a non-struct list.
1525
1526 auto oldDataSize = dataBitsPerElement(oldSize) * ELEMENTS;
1527 auto oldPointerCount = pointersPerElement(oldSize) * ELEMENTS;
1528 auto oldStep = (oldDataSize + oldPointerCount * BITS_PER_POINTER) / ELEMENTS;
1529 auto elementCount = oldRef->listRef.elementCount();
1530
1531 if (oldSize == ElementSize::VOID) {
1532 // Nothing to copy, just allocate a new list.
1533 return initStructListPointer(origRef, origSegment, capTable, elementCount, elementSize);
1534 } else {
1535 // Upgrading to an inline composite list.
1536
1537 KJ_REQUIRE(oldSize != ElementSize::BIT,
1538 "Found bit list where struct list was expected; upgrading boolean lists to structs "
1539 "is no longer supported.") {
1540 goto useDefault;
1541 }
1542
1543 auto newDataSize = elementSize.data;
1544 auto newPointerCount = elementSize.pointers;
1545
1546 if (oldSize == ElementSize::POINTER) {
1547 newPointerCount = kj::max(newPointerCount, ONE * POINTERS);
1548 } else {
1549 // Old list contains data elements, so we need at least 1 word of data.
1550 newDataSize = kj::max(newDataSize, ONE * WORDS);
1551 }
1552
1553 auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1554 auto totalWords = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1555 newStep * upgradeBound<uint64_t>(elementCount),
1556 []() {KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size");});
1557
1558 // Don't let allocate() zero out the object just yet.
1559 zeroPointerAndFars(origSegment, origRef);
1560
1561 word* newPtr = allocate(origRef, origSegment, capTable, totalWords + POINTER_SIZE_IN_WORDS,
1562 WirePointer::LIST, orphanArena);
1563 origRef->listRef.setInlineComposite(totalWords);
1564
1565 WirePointer* tag = reinterpret_cast<WirePointer*>(newPtr);
1566 tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
1567 tag->structRef.set(newDataSize, newPointerCount);
1568 newPtr += POINTER_SIZE_IN_WORDS;
1569
1570 if (oldSize == ElementSize::POINTER) {
1571 WirePointer* dst = reinterpret_cast<WirePointer*>(newPtr + newDataSize);
1572 WirePointer* src = reinterpret_cast<WirePointer*>(oldPtr);
1573 for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
1574 transferPointer(origSegment, dst, oldSegment, src);
1575 dst += newStep / WORDS_PER_POINTER * (ONE * ELEMENTS);
1576 ++src;
1577 }
1578 } else {
1579 byte* dst = reinterpret_cast<byte*>(newPtr);
1580 byte* src = reinterpret_cast<byte*>(oldPtr);
1581 auto newByteStep = newStep * (ONE * ELEMENTS) * BYTES_PER_WORD;
1582 auto oldByteStep = oldDataSize / BITS_PER_BYTE;
1583 for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
1584 copyMemory(dst, src, oldByteStep);
1585 src += oldByteStep;
1586 dst += newByteStep;
1587 }
1588 }
1589
1590 auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1591 roundBitsUpToWords(oldStep * upgradeBound<uint64_t>(elementCount)),
1592 []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });
1593
1594 // Zero out old location. See explanation in getWritableStructPointer().
1595 zeroMemory(oldPtr, oldSize);
1596
1597 return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1598 newDataSize * BITS_PER_WORD, newPointerCount,
1599 ElementSize::INLINE_COMPOSITE);
1600 }
1601 }
1602 }
1603
1604 static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> initTextPointer(
1605 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, TextSize size,
1606 BuilderArena* orphanArena = nullptr)) {
1607 // The byte list must include a NUL terminator.
1608 auto byteSize = size + ONE * BYTES;
1609
1610 // Allocate the space.
1611 word* ptr = allocate(
1612 ref, segment, capTable, roundBytesUpToWords(byteSize), WirePointer::LIST, orphanArena);
1613
1614 // Initialize the pointer.
1615 ref->listRef.set(ElementSize::BYTE, byteSize * (ONE * ELEMENTS / BYTES));
1616
1617 // Build the Text::Builder. This will initialize the NUL terminator.
1618 return { segment, Text::Builder(reinterpret_cast<char*>(ptr), unbound(size / BYTES)) };
1619 }
1620
1621 static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> setTextPointer(
1622 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Text::Reader value,
1623 BuilderArena* orphanArena = nullptr)) {
1624 TextSize size = assertMax<MAX_TEXT_SIZE>(bounded(value.size()),
1625 []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;
1626
1627 auto allocation = initTextPointer(ref, segment, capTable, size, orphanArena);
1628 copyMemory(allocation.value.begin(), value);
1629 return allocation;
1630 }
1631
1632 static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1633 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1634 const void* defaultValue, TextSize defaultSize)) {
1635 return getWritableTextPointer(ref, ref->target(), segment,capTable, defaultValue, defaultSize);
1636 }
1637
1638 static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1639 WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1640 const void* defaultValue, TextSize defaultSize)) {
1641 if (ref->isNull()) {
1642 useDefault:
1643 if (defaultSize == ZERO * BYTES) {
1644 return nullptr;
1645 } else {
1646 Text::Builder builder = initTextPointer(ref, segment, capTable, defaultSize).value;
1647 copyMemory(builder.asBytes().begin(), reinterpret_cast<const byte*>(defaultValue),
1648 defaultSize);
1649 return builder;
1650 }
1651 } else {
1652 word* ptr = followFars(ref, refTarget, segment);
1653 byte* bptr = reinterpret_cast<byte*>(ptr);
1654
1655 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1656 "Called getText{Field,Element}() but existing pointer is not a list.") {
1657 goto useDefault;
1658 }
1659 KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1660 "Called getText{Field,Element}() but existing list pointer is not byte-sized.") {
1661 goto useDefault;
1662 }
1663
1664 auto maybeSize = trySubtract(ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS),
1665 ONE * BYTES);
1666 KJ_IF_MAYBE(size, maybeSize) {
1667 KJ_REQUIRE(*(bptr + *size) == '\0', "Text blob missing NUL terminator.") {
1668 goto useDefault;
1669 }
1670
1671 return Text::Builder(reinterpret_cast<char*>(bptr), unbound(*size / BYTES));
1672 } else {
1673 KJ_FAIL_REQUIRE("zero-size blob can't be text (need NUL terminator)") {
1674 goto useDefault;
1675 };
1676 }
1677 }
1678 }
1679
1680 static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> initDataPointer(
1681 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, BlobSize size,
1682 BuilderArena* orphanArena = nullptr)) {
1683 // Allocate the space.
1684 word* ptr = allocate(ref, segment, capTable, roundBytesUpToWords(size),
1685 WirePointer::LIST, orphanArena);
1686
1687 // Initialize the pointer.
1688 ref->listRef.set(ElementSize::BYTE, size * (ONE * ELEMENTS / BYTES));
1689
1690 // Build the Data::Builder.
1691 return { segment, Data::Builder(reinterpret_cast<byte*>(ptr), unbound(size / BYTES)) };
1692 }
1693
1694 static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> setDataPointer(
1695 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Data::Reader value,
1696 BuilderArena* orphanArena = nullptr)) {
1697 BlobSize size = assertMaxBits<BLOB_SIZE_BITS>(bounded(value.size()),
1698 []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;
1699
1700 auto allocation = initDataPointer(ref, segment, capTable, size, orphanArena);
1701 copyMemory(allocation.value.begin(), value);
1702 return allocation;
1703 }
1704
1705 static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1706 WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1707 const void* defaultValue, BlobSize defaultSize)) {
1708 return getWritableDataPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1709 }
1710
1711 static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1712 WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1713 const void* defaultValue, BlobSize defaultSize)) {
1714 if (ref->isNull()) {
1715 useDefault:
1716 if (defaultSize == ZERO * BYTES) {
1717 return nullptr;
1718 } else {
1719 Data::Builder builder = initDataPointer(ref, segment, capTable, defaultSize).value;
1720 copyMemory(builder.begin(), reinterpret_cast<const byte*>(defaultValue), defaultSize);
1721 return builder;
1722 }
1723 } else {
1724 word* ptr = followFars(ref, refTarget, segment);
1725
1726 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1727 "Called getData{Field,Element}() but existing pointer is not a list.") {
1728 goto useDefault;
1729 }
1730 KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1731 "Called getData{Field,Element}() but existing list pointer is not byte-sized.") {
1732 goto useDefault;
1733 }
1734
1735 return Data::Builder(reinterpret_cast<byte*>(ptr),
1736 unbound(ref->listRef.elementCount() / ELEMENTS));
1737 }
1738 }
1739
1740 static SegmentAnd<word*> setStructPointer(
1741 SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, StructReader value,
1742 BuilderArena* orphanArena = nullptr, bool canonical = false) {
1743 auto dataSize = roundBitsUpToBytes(value.dataSize);
1744 auto ptrCount = value.pointerCount;
1745
1746 if (canonical) {
1747 // StructReaders should not have bitwidths other than 1, but let's be safe
1748 KJ_REQUIRE((value.dataSize == ONE * BITS)
1749 || (value.dataSize % BITS_PER_BYTE == ZERO * BITS));
1750
1751 if (value.dataSize == ONE * BITS) {
1752 // Handle the truncation case where it's a false in a 1-bit struct
1753 if (!value.getDataField<bool>(ZERO * ELEMENTS)) {
1754 dataSize = ZERO * BYTES;
1755 }
1756 } else {
1757 // Truncate the data section
1758 auto data = value.getDataSectionAsBlob();
1759 auto end = data.end();
1760 while (end > data.begin() && end[-1] == 0) --end;
1761 dataSize = intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD);
1762 }
1763
1764 // Truncate pointer section
1765 const WirePointer* ptr = value.pointers + ptrCount;
1766 while (ptr > value.pointers && ptr[-1].isNull()) --ptr;
1767 ptrCount = intervalLength(value.pointers, ptr, MAX_STRUCT_POINTER_COUNT);
1768 }
1769
1770 auto dataWords = roundBytesUpToWords(dataSize);
1771
1772 auto totalSize = dataWords + ptrCount * WORDS_PER_POINTER;
1773
1774 word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1775 ref->structRef.set(dataWords, ptrCount);
1776
1777 if (value.dataSize == ONE * BITS) {
1778 // Data size could be made 0 by truncation
1779 if (dataSize != ZERO * BYTES) {
1780 *reinterpret_cast<char*>(ptr) = value.getDataField<bool>(ZERO * ELEMENTS);
1781 }
1782 } else {
1783 copyMemory(reinterpret_cast<byte*>(ptr),
1784 reinterpret_cast<const byte*>(value.data),
1785 dataSize);
1786 }
1787
1788 WirePointer* pointerSection = reinterpret_cast<WirePointer*>(ptr + dataWords);
1789 for (auto i: kj::zeroTo(ptrCount)) {
1790 copyPointer(segment, capTable, pointerSection + i,
1791 value.segment, value.capTable, value.pointers + i,
1792 value.nestingLimit, nullptr, canonical);
1793 }
1794
1795 return { segment, ptr };
1796 }
1797
1798#if !CAPNP_LITE
1799 static void setCapabilityPointer(
1800 SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref,
1801 kj::Own<ClientHook>&& cap) {
1802 if (!ref->isNull()) {
1803 zeroObject(segment, capTable, ref);
1804 }
1805 if (cap->isNull()) {
1806 zeroMemory(ref);
1807 } else {
1808 ref->setCap(capTable->injectCap(kj::mv(cap)));
1809 }
1810 }
1811#endif // !CAPNP_LITE
1812
1813 static SegmentAnd<word*> setListPointer(
1814 SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, ListReader value,
1815 BuilderArena* orphanArena = nullptr, bool canonical = false) {
1816 auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1817 roundBitsUpToWords(upgradeBound<uint64_t>(value.elementCount) * value.step),
1818 []() { KJ_FAIL_ASSERT("encountered impossibly long struct list ListReader"); });
1819
1820 if (value.elementSize != ElementSize::INLINE_COMPOSITE) {
1821 // List of non-structs.
1822 word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::LIST, orphanArena);
1823
1824 if (value.elementSize == ElementSize::POINTER) {
1825 // List of pointers.
1826 ref->listRef.set(ElementSize::POINTER, value.elementCount);
1827 for (auto i: kj::zeroTo(value.elementCount * (ONE * POINTERS / ELEMENTS))) {
1828 copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i,
1829 value.segment, value.capTable,
1830 reinterpret_cast<const WirePointer*>(value.ptr) + i,
1831 value.nestingLimit, nullptr, canonical);
1832 }
1833 } else {
1834 // List of data.
1835 ref->listRef.set(value.elementSize, value.elementCount);
1836
1837 auto wholeByteSize =
1838 assertMax(MAX_SEGMENT_WORDS * BYTES_PER_WORD,
1839 upgradeBound<uint64_t>(value.elementCount) * value.step / BITS_PER_BYTE,
1840 []() { KJ_FAIL_ASSERT("encountered impossibly long data ListReader"); });
1841 copyMemory(reinterpret_cast<byte*>(ptr), value.ptr, wholeByteSize);
1842 auto leftoverBits =
1843 (upgradeBound<uint64_t>(value.elementCount) * value.step) % BITS_PER_BYTE;
1844 if (leftoverBits > ZERO * BITS) {
1845 // We need to copy a partial byte.
1846 uint8_t mask = (1 << unbound(leftoverBits / BITS)) - 1;
1847 *((reinterpret_cast<byte*>(ptr)) + wholeByteSize) = mask & *(value.ptr + wholeByteSize);
1848 }
1849 }
1850
1851 return { segment, ptr };
1852 } else {
1853 // List of structs.
1854 StructDataWordCount declDataSize = value.structDataSize / BITS_PER_WORD;
1855 StructPointerCount declPointerCount = value.structPointerCount;
1856
1857 StructDataWordCount dataSize = ZERO * WORDS;
1858 StructPointerCount ptrCount = ZERO * POINTERS;
1859
1860 if (canonical) {
1861 for (auto i: kj::zeroTo(value.elementCount)) {
1862 auto element = value.getStructElement(i);
1863
1864 // Truncate the data section
1865 auto data = element.getDataSectionAsBlob();
1866 auto end = data.end();
1867 while (end > data.begin() && end[-1] == 0) --end;
1868 dataSize = kj::max(dataSize, roundBytesUpToWords(
1869 intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD)));
1870
1871 // Truncate pointer section
1872 const WirePointer* ptr = element.pointers + element.pointerCount;
1873 while (ptr > element.pointers && ptr[-1].isNull()) --ptr;
1874 ptrCount = kj::max(ptrCount,
1875 intervalLength(element.pointers, ptr, MAX_STRUCT_POINTER_COUNT));
1876 }
1877 auto newTotalSize = (dataSize + upgradeBound<uint64_t>(ptrCount) * WORDS_PER_POINTER)
1878 / ELEMENTS * value.elementCount;
1879 KJ_ASSERT(newTotalSize <= totalSize); // we've only removed data!
1880 totalSize = assumeMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(newTotalSize);
1881 } else {
1882 dataSize = declDataSize;
1883 ptrCount = declPointerCount;
1884 }
1885
1886 KJ_DASSERT(value.structDataSize % BITS_PER_WORD == ZERO * BITS);
1887 word* ptr = allocate(ref, segment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1888 WirePointer::LIST, orphanArena);
1889 ref->listRef.setInlineComposite(totalSize);
1890
1891 WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1892 tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, value.elementCount);
1893 tag->structRef.set(dataSize, ptrCount);
1894 word* dst = ptr + POINTER_SIZE_IN_WORDS;
1895
1896 const word* src = reinterpret_cast<const word*>(value.ptr);
1897 for (auto i KJ_UNUSED: kj::zeroTo(value.elementCount)) {
1898 copyMemory(dst, src, dataSize);
1899 dst += dataSize;
1900 src += declDataSize;
1901
1902 for (auto j: kj::zeroTo(ptrCount)) {
1903 copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(dst) + j,
1904 value.segment, value.capTable, reinterpret_cast<const WirePointer*>(src) + j,
1905 value.nestingLimit, nullptr, canonical);
1906 }
1907 dst += ptrCount * WORDS_PER_POINTER;
1908 src += declPointerCount * WORDS_PER_POINTER;
1909 }
1910
1911 return { segment, ptr };
1912 }
1913 }
1914
1915 static KJ_ALWAYS_INLINE(SegmentAnd<word*> copyPointer(
1916 SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
1917 SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
1918 int nestingLimit, BuilderArena* orphanArena = nullptr,
1919 bool canonical = false)) {
1920 return copyPointer(dstSegment, dstCapTable, dst,
1921 srcSegment, srcCapTable, src, src->target(srcSegment),
1922 nestingLimit, orphanArena, canonical);
1923 }
1924
1925 static SegmentAnd<word*> copyPointer(
1926 SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
1927 SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
1928 const word* srcTarget, int nestingLimit,
1929 BuilderArena* orphanArena = nullptr, bool canonical = false) {
1930 // Deep-copy the object pointed to by src into dst. It turns out we can't reuse
1931 // readStructPointer(), etc. because they do type checking whereas here we want to accept any
1932 // valid pointer.
1933
1934 if (src->isNull()) {
1935 useDefault:
1936 if (!dst->isNull()) {
1937 zeroObject(dstSegment, dstCapTable, dst);
1938 zeroMemory(dst);
1939 }
1940 return { dstSegment, nullptr };
1941 }
1942
1943 const word* ptr;
1944 KJ_IF_MAYBE(p, WireHelpers::followFars(src, srcTarget, srcSegment)) {
1945 ptr = p;
1946 } else {
1947 goto useDefault;
1948 }
1949
1950 switch (src->kind()) {
1951 case WirePointer::STRUCT:
1952 KJ_REQUIRE(nestingLimit > 0,
1953 "Message is too deeply-nested or contains cycles. See capnp::ReaderOptions.") {
1954 goto useDefault;
1955 }
1956
1957 KJ_REQUIRE(boundsCheck(srcSegment, ptr, src->structRef.wordSize()),
1958 "Message contained out-of-bounds struct pointer.") {
1959 goto useDefault;
1960 }
1961 return setStructPointer(dstSegment, dstCapTable, dst,
1962 StructReader(srcSegment, srcCapTable, ptr,
1963 reinterpret_cast<const WirePointer*>(ptr + src->structRef.dataSize.get()),
1964 src->structRef.dataSize.get() * BITS_PER_WORD,
1965 src->structRef.ptrCount.get(),
1966 nestingLimit - 1),
1967 orphanArena, canonical);
1968
1969 case WirePointer::LIST: {
1970 ElementSize elementSize = src->listRef.elementSize();
1971
1972 KJ_REQUIRE(nestingLimit > 0,
1973 "Message is too deeply-nested or contains cycles. See capnp::ReaderOptions.") {
1974 goto useDefault;
1975 }
1976
1977 if (elementSize == ElementSize::INLINE_COMPOSITE) {
1978 auto wordCount = src->listRef.inlineCompositeWordCount();
1979 const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
1980
1981 KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
1982 "Message contains out-of-bounds list pointer.") {
1983 goto useDefault;
1984 }
1985
1986 ptr += POINTER_SIZE_IN_WORDS;
1987
1988 KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1989 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
1990 goto useDefault;
1991 }
1992
1993 auto elementCount = tag->inlineCompositeListElementCount();
1994 auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
1995
1996 KJ_REQUIRE(wordsPerElement * upgradeBound<uint64_t>(elementCount) <= wordCount,
1997 "INLINE_COMPOSITE list's elements overrun its word count.") {
1998 goto useDefault;
1999 }
2000
2001 if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
2002 // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
2003 // without having sent actual data.
2004 KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
2005 "Message contains amplified list pointer.") {
2006 goto useDefault;
2007 }
2008 }
2009
2010 return setListPointer(dstSegment, dstCapTable, dst,
2011 ListReader(srcSegment, srcCapTable, ptr,
2012 elementCount, wordsPerElement * BITS_PER_WORD,
2013 tag->structRef.dataSize.get() * BITS_PER_WORD,
2014 tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2015 nestingLimit - 1),
2016 orphanArena, canonical);
2017 } else {
2018 auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
2019 auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
2020 auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2021 auto elementCount = src->listRef.elementCount();
2022 auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
2023
2024 KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount),
2025 "Message contains out-of-bounds list pointer.") {
2026 goto useDefault;
2027 }
2028
2029 if (elementSize == ElementSize::VOID) {
2030 // Watch out for lists of void, which can claim to be arbitrarily large without having
2031 // sent actual data.
2032 KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
2033 "Message contains amplified list pointer.") {
2034 goto useDefault;
2035 }
2036 }
2037
2038 return setListPointer(dstSegment, dstCapTable, dst,
2039 ListReader(srcSegment, srcCapTable, ptr, elementCount, step, dataSize, pointerCount,
2040 elementSize, nestingLimit - 1),
2041 orphanArena, canonical);
2042 }
2043 }
2044
2045 case WirePointer::FAR:
2046 KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
2047 goto useDefault;
2048 }
2049
2050 case WirePointer::OTHER: {
2051 KJ_REQUIRE(src->isCapability(), "Unknown pointer type.") {
2052 goto useDefault;
2053 }
2054
2055 if (canonical) {
2056 KJ_FAIL_REQUIRE("Cannot create a canonical message with a capability") {
2057 break;
2058 }
2059 }
2060#if !CAPNP_LITE
2061 KJ_IF_MAYBE(cap, srcCapTable->extractCap(src->capRef.index.get())) {
2062 setCapabilityPointer(dstSegment, dstCapTable, dst, kj::mv(*cap));
2063 // Return dummy non-null pointer so OrphanBuilder doesn't end up null.
2064 return { dstSegment, reinterpret_cast<word*>(1) };
2065 } else {
2066#endif // !CAPNP_LITE
2067 KJ_FAIL_REQUIRE("Message contained invalid capability pointer.") {
2068 goto useDefault;
2069 }
2070#if !CAPNP_LITE
2071 }
2072#endif // !CAPNP_LITE
2073 }
2074 }
2075
2076 KJ_UNREACHABLE;
2077 }
2078
2079 static void adopt(SegmentBuilder* segment, CapTableBuilder* capTable,
2080 WirePointer* ref, OrphanBuilder&& value) {
2081 KJ_REQUIRE(value.segment == nullptr || value.segment->getArena() == segment->getArena(),
2082 "Adopted object must live in the same message.");
2083
2084 if (!ref->isNull()) {
2085 zeroObject(segment, capTable, ref);
2086 }
2087
2088 if (value == nullptr) {
2089 // Set null.
2090 zeroMemory(ref);
2091 } else if (value.tagAsPtr()->isPositional()) {
2092 WireHelpers::transferPointer(segment, ref, value.segment, value.tagAsPtr(), value.location);
2093 } else {
2094 // FAR and OTHER pointers are position-independent, so we can just copy.
2095 copyMemory(ref, value.tagAsPtr());
2096 }
2097
2098 // Take ownership away from the OrphanBuilder.
2099 zeroMemory(value.tagAsPtr());
2100 value.location = nullptr;
2101 value.segment = nullptr;
2102 }
2103
2104 static OrphanBuilder disown(SegmentBuilder* segment, CapTableBuilder* capTable,
2105 WirePointer* ref) {
2106 word* location;
2107
2108 if (ref->isNull()) {
2109 location = nullptr;
2110 } else if (ref->kind() == WirePointer::OTHER) {
2111 KJ_REQUIRE(ref->isCapability(), "Unknown pointer type.") { break; }
2112 location = reinterpret_cast<word*>(1); // dummy so that it is non-null
2113 } else {
2114 WirePointer* refCopy = ref;
2115 location = followFarsNoWritableCheck(refCopy, ref->target(), segment);
2116 }
2117
2118 OrphanBuilder result(ref, segment, capTable, location);
2119
2120 if (!ref->isNull() && ref->isPositional()) {
2121 result.tagAsPtr()->setKindForOrphan(ref->kind());
2122 }
2123
2124 // Zero out the pointer that was disowned.
2125 zeroMemory(ref);
2126
2127 return result;
2128 }
2129
2130 // -----------------------------------------------------------------
2131
2132 static KJ_ALWAYS_INLINE(StructReader readStructPointer(
2133 SegmentReader* segment, CapTableReader* capTable,
2134 const WirePointer* ref, const word* defaultValue,
2135 int nestingLimit)) {
2136 return readStructPointer(segment, capTable, ref, ref->target(segment),
2137 defaultValue, nestingLimit);
2138 }
2139
2140 static KJ_ALWAYS_INLINE(StructReader readStructPointer(
2141 SegmentReader* segment, CapTableReader* capTable,
2142 const WirePointer* ref, const word* refTarget,
2143 const word* defaultValue, int nestingLimit)) {
2144 if (ref->isNull()) {
2145 useDefault:
2146 if (defaultValue == nullptr ||
2147 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
2148 return StructReader();
2149 }
2150 segment = nullptr;
2151 ref = reinterpret_cast<const WirePointer*>(defaultValue);
2152 refTarget = ref->target(segment);
2153 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
2154 }
2155
2156 KJ_REQUIRE(nestingLimit > 0,
2157 "Message is too deeply-nested or contains cycles. See capnp::ReaderOptions.") {
2158 goto useDefault;
2159 }
2160
2161 const word* ptr;
2162 KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
2163 ptr = p;
2164 } else {
2165 goto useDefault;
2166 }
2167
2168 KJ_REQUIRE(ref->kind() == WirePointer::STRUCT,
2169 "Message contains non-struct pointer where struct pointer was expected.") {
2170 goto useDefault;
2171 }
2172
2173 KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
2174 "Message contained out-of-bounds struct pointer.") {
2175 goto useDefault;
2176 }
2177
2178 return StructReader(
2179 segment, capTable,
2180 ptr, reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get()),
2181 ref->structRef.dataSize.get() * BITS_PER_WORD,
2182 ref->structRef.ptrCount.get(),
2183 nestingLimit - 1);
2184 }
2185
2186#if !CAPNP_LITE
2187 static KJ_ALWAYS_INLINE(kj::Own<ClientHook> readCapabilityPointer(
2188 SegmentReader* segment, CapTableReader* capTable,
2189 const WirePointer* ref, int nestingLimit)) {
2190 kj::Maybe<kj::Own<ClientHook>> maybeCap;
2191
2192 KJ_REQUIRE(brokenCapFactory != nullptr,
2193 "Trying to read capabilities without ever having created a capability context. "
2194 "To read capabilities from a message, you must imbue it with CapReaderContext, or "
2195 "use the Cap'n Proto RPC system.");
2196
2197 if (ref->isNull()) {
2198 return brokenCapFactory->newNullCap();
2199 } else if (!ref->isCapability()) {
2200 KJ_FAIL_REQUIRE(
2201 "Message contains non-capability pointer where capability pointer was expected.") {
2202 break;
2203 }
2204 return brokenCapFactory->newBrokenCap(
2205 "Calling capability extracted from a non-capability pointer.");
2206 } else KJ_IF_MAYBE(cap, capTable->extractCap(ref->capRef.index.get())) {
2207 return kj::mv(*cap);
2208 } else {
2209 KJ_FAIL_REQUIRE("Message contains invalid capability pointer.") {
2210 break;
2211 }
2212 return brokenCapFactory->newBrokenCap("Calling invalid capability pointer.");
2213 }
2214 }
2215#endif // !CAPNP_LITE
2216
2217 static KJ_ALWAYS_INLINE(ListReader readListPointer(
2218 SegmentReader* segment, CapTableReader* capTable,
2219 const WirePointer* ref, const word* defaultValue,
2220 ElementSize expectedElementSize, int nestingLimit, bool checkElementSize = true)) {
2221 return readListPointer(segment, capTable, ref, ref->target(segment), defaultValue,
2222 expectedElementSize, nestingLimit, checkElementSize);
2223 }
2224
2225 static KJ_ALWAYS_INLINE(ListReader readListPointer(
2226 SegmentReader* segment, CapTableReader* capTable,
2227 const WirePointer* ref, const word* refTarget,
2228 const word* defaultValue, ElementSize expectedElementSize, int nestingLimit,
2229 bool checkElementSize = true)) {
2230 if (ref->isNull()) {
2231 useDefault:
2232 if (defaultValue == nullptr ||
2233 reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
2234 return ListReader(expectedElementSize);
2235 }
2236 segment = nullptr;
2237 ref = reinterpret_cast<const WirePointer*>(defaultValue);
2238 refTarget = ref->target(segment);
2239 defaultValue = nullptr; // If the default value is itself invalid, don't use it again.
2240 }
2241
2242 KJ_REQUIRE(nestingLimit > 0,
2243 "Message is too deeply-nested or contains cycles. See capnp::ReaderOptions.") {
2244 goto useDefault;
2245 }
2246
2247 const word* ptr;
2248 KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
2249 ptr = p;
2250 } else {
2251 goto useDefault;
2252 }
2253
2254 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
2255 "Message contains non-list pointer where list pointer was expected.") {
2256 goto useDefault;
2257 }
2258
2259 ElementSize elementSize = ref->listRef.elementSize();
2260 if (elementSize == ElementSize::INLINE_COMPOSITE) {
2261 auto wordCount = ref->listRef.inlineCompositeWordCount();
2262
2263 // An INLINE_COMPOSITE list points to a tag, which is formatted like a pointer.
2264 const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
2265
2266 KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
2267 "Message contains out-of-bounds list pointer.") {
2268 goto useDefault;
2269 }
2270
2271 ptr += POINTER_SIZE_IN_WORDS;
2272
2273 KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
2274 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
2275 goto useDefault;
2276 }
2277
2278 auto size = tag->inlineCompositeListElementCount();
2279 auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
2280
2281 KJ_REQUIRE(upgradeBound<uint64_t>(size) * wordsPerElement <= wordCount,
2282 "INLINE_COMPOSITE list's elements overrun its word count.") {
2283 goto useDefault;
2284 }
2285
2286 if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
2287 // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
2288 // without having sent actual data.
2289 KJ_REQUIRE(amplifiedRead(segment, size * (ONE * WORDS / ELEMENTS)),
2290 "Message contains amplified list pointer.") {
2291 goto useDefault;
2292 }
2293 }
2294
2295 if (checkElementSize) {
2296 // If a struct list was not expected, then presumably a non-struct list was upgraded to a
2297 // struct list. We need to manipulate the pointer to point at the first field of the
2298 // struct. Together with the `step` field, this will allow the struct list to be accessed
2299 // as if it were a primitive list without branching.
2300
2301 // Check whether the size is compatible.
2302 switch (expectedElementSize) {
2303 case ElementSize::VOID:
2304 break;
2305
2306 case ElementSize::BIT:
2307 KJ_FAIL_REQUIRE(
2308 "Found struct list where bit list was expected; upgrading boolean lists to structs "
2309 "is no longer supported.") {
2310 goto useDefault;
2311 }
2312 break;
2313
2314 case ElementSize::BYTE:
2315 case ElementSize::TWO_BYTES:
2316 case ElementSize::FOUR_BYTES:
2317 case ElementSize::EIGHT_BYTES:
2318 KJ_REQUIRE(tag->structRef.dataSize.get() > ZERO * WORDS,
2319 "Expected a primitive list, but got a list of pointer-only structs.") {
2320 goto useDefault;
2321 }
2322 break;
2323
2324 case ElementSize::POINTER:
2325 // We expected a list of pointers but got a list of structs. Assuming the first field
2326 // in the struct is the pointer we were looking for, we want to munge the pointer to
2327 // point at the first element's pointer section.
2328 ptr += tag->structRef.dataSize.get();
2329 KJ_REQUIRE(tag->structRef.ptrCount.get() > ZERO * POINTERS,
2330 "Expected a pointer list, but got a list of data-only structs.") {
2331 goto useDefault;
2332 }
2333 break;
2334
2335 case ElementSize::INLINE_COMPOSITE:
2336 break;
2337 }
2338 }
2339
2340 return ListReader(
2341 segment, capTable, ptr, size, wordsPerElement * BITS_PER_WORD,
2342 tag->structRef.dataSize.get() * BITS_PER_WORD,
2343 tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2344 nestingLimit - 1);
2345
2346 } else {
2347 // This is a primitive or pointer list, but all such lists can also be interpreted as struct
2348 // lists. We need to compute the data size and pointer count for such structs.
2349 auto dataSize = dataBitsPerElement(ref->listRef.elementSize()) * ELEMENTS;
2350 auto pointerCount = pointersPerElement(ref->listRef.elementSize()) * ELEMENTS;
2351 auto elementCount = ref->listRef.elementCount();
2352 auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2353
2354 auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
2355 KJ_REQUIRE(boundsCheck(segment, ptr, wordCount),
2356 "Message contains out-of-bounds list pointer.") {
2357 goto useDefault;
2358 }
2359
2360 if (elementSize == ElementSize::VOID) {
2361 // Watch out for lists of void, which can claim to be arbitrarily large without having sent
2362 // actual data.
2363 KJ_REQUIRE(amplifiedRead(segment, elementCount * (ONE * WORDS / ELEMENTS)),
2364 "Message contains amplified list pointer.") {
2365 goto useDefault;
2366 }
2367 }
2368
2369 if (checkElementSize) {
2370 if (elementSize == ElementSize::BIT && expectedElementSize != ElementSize::BIT) {
2371 KJ_FAIL_REQUIRE(
2372 "Found bit list where struct list was expected; upgrading boolean lists to structs "
2373 "is no longer supported.") {
2374 goto useDefault;
2375 }
2376 }
2377
2378 // Verify that the elements are at least as large as the expected type. Note that if we
2379 // expected INLINE_COMPOSITE, the expected sizes here will be zero, because bounds checking
2380 // will be performed at field access time. So this check here is for the case where we
2381 // expected a list of some primitive or pointer type.
2382
2383 BitCount expectedDataBitsPerElement =
2384 dataBitsPerElement(expectedElementSize) * ELEMENTS;
2385 WirePointerCount expectedPointersPerElement =
2386 pointersPerElement(expectedElementSize) * ELEMENTS;
2387
2388 KJ_REQUIRE(expectedDataBitsPerElement <= dataSize,
2389 "Message contained list with incompatible element type.") {
2390 goto useDefault;
2391 }
2392 KJ_REQUIRE(expectedPointersPerElement <= pointerCount,
2393 "Message contained list with incompatible element type.") {
2394 goto useDefault;
2395 }
2396 }
2397
2398 return ListReader(segment, capTable, ptr, elementCount, step,
2399 dataSize, pointerCount, elementSize, nestingLimit - 1);
2400 }
2401 }
2402
2403 static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
2404 SegmentReader* segment, const WirePointer* ref,
2405 const void* defaultValue, ByteCount defaultSize)) {
2406 return readTextPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
2407 }
2408
2409 static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
2410 SegmentReader* segment, const WirePointer* ref, const word* refTarget,
2411 const void* defaultValue, ByteCount defaultSize)) {
2412 if (ref->isNull()) {
2413 useDefault:
2414 if (defaultValue == nullptr) defaultValue = "";
2415 return Text::Reader(reinterpret_cast<const char*>(defaultValue),
2416 unbound(defaultSize / BYTES));
2417 } else {
2418 const word* ptr;
2419 KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
2420 ptr = p;
2421 } else {
2422 goto useDefault;
2423 }
2424
2425 auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
2426
2427 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
2428 "Message contains non-list pointer where text was expected.") {
2429 goto useDefault;
2430 }
2431
2432 KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2433 "Message contains list pointer of non-bytes where text was expected.") {
2434 goto useDefault;
2435 }
2436
2437 KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
2438 "Message contained out-of-bounds text pointer.") {
2439 goto useDefault;
2440 }
2441
2442 KJ_REQUIRE(size > ZERO * BYTES, "Message contains text that is not NUL-terminated.") {
2443 goto useDefault;
2444 }
2445
2446 const char* cptr = reinterpret_cast<const char*>(ptr);
2447 uint unboundedSize = unbound(size / BYTES) - 1;
2448
2449 KJ_REQUIRE(cptr[unboundedSize] == '\0', "Message contains text that is not NUL-terminated.") {
2450 goto useDefault;
2451 }
2452
2453 return Text::Reader(cptr, unboundedSize);
2454 }
2455 }
2456
2457 static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
2458 SegmentReader* segment, const WirePointer* ref,
2459 const void* defaultValue, BlobSize defaultSize)) {
2460 return readDataPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
2461 }
2462
2463 static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
2464 SegmentReader* segment, const WirePointer* ref, const word* refTarget,
2465 const void* defaultValue, BlobSize defaultSize)) {
2466 if (ref->isNull()) {
2467 useDefault:
2468 return Data::Reader(reinterpret_cast<const byte*>(defaultValue),
2469 unbound(defaultSize / BYTES));
2470 } else {
2471 const word* ptr;
2472 KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
2473 ptr = p;
2474 } else {
2475 goto useDefault;
2476 }
2477
2478 if (KJ_UNLIKELY(ptr == nullptr)) {
2479 // Already reported error.
2480 goto useDefault;
2481 }
2482
2483 auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
2484
2485 KJ_REQUIRE(ref->kind() == WirePointer::LIST,
2486 "Message contains non-list pointer where data was expected.") {
2487 goto useDefault;
2488 }
2489
2490 KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2491 "Message contains list pointer of non-bytes where data was expected.") {
2492 goto useDefault;
2493 }
2494
2495 KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
2496 "Message contained out-of-bounds data pointer.") {
2497 goto useDefault;
2498 }
2499
2500 return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(size / BYTES));
2501 }
2502 }
2503};
2504
2505// =======================================================================================
2506// PointerBuilder
2507
2508StructBuilder PointerBuilder::initStruct(StructSize size) {
2509 return WireHelpers::initStructPointer(pointer, segment, capTable, size);
2510}
2511
2512StructBuilder PointerBuilder::getStruct(StructSize size, const word* defaultValue) {
2513 return WireHelpers::getWritableStructPointer(pointer, segment, capTable, size, defaultValue);
2514}
2515
2516ListBuilder PointerBuilder::initList(ElementSize elementSize, ElementCount elementCount) {
2517 return WireHelpers::initListPointer(pointer, segment, capTable, elementCount, elementSize);
2518}
2519
2520ListBuilder PointerBuilder::initStructList(ElementCount elementCount, StructSize elementSize) {
2521 return WireHelpers::initStructListPointer(pointer, segment, capTable, elementCount, elementSize);
2522}
2523
2524ListBuilder PointerBuilder::getList(ElementSize elementSize, const word* defaultValue) {
2525 return WireHelpers::getWritableListPointer(pointer, segment, capTable, elementSize, defaultValue);
2526}
2527
2528ListBuilder PointerBuilder::getStructList(StructSize elementSize, const word* defaultValue) {
2529 return WireHelpers::getWritableStructListPointer(
2530 pointer, segment, capTable, elementSize, defaultValue);
2531}
2532
2533ListBuilder PointerBuilder::getListAnySize(const word* defaultValue) {
2534 return WireHelpers::getWritableListPointerAnySize(pointer, segment, capTable, defaultValue);
2535}
2536
2537template <>
2538Text::Builder PointerBuilder::initBlob<Text>(ByteCount size) {
2539 return WireHelpers::initTextPointer(pointer, segment, capTable,
2540 assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow())).value;
2541}
2542template <>
2543void PointerBuilder::setBlob<Text>(Text::Reader value) {
2544 WireHelpers::setTextPointer(pointer, segment, capTable, value);
2545}
2546template <>
2547Text::Builder PointerBuilder::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) {
2548 return WireHelpers::getWritableTextPointer(pointer, segment, capTable, defaultValue,
2549 assertMax<MAX_TEXT_SIZE>(defaultSize, ThrowOverflow()));
2550}
2551
2552template <>
2553Data::Builder PointerBuilder::initBlob<Data>(ByteCount size) {
2554 return WireHelpers::initDataPointer(pointer, segment, capTable,
2555 assertMaxBits<BLOB_SIZE_BITS>(size, ThrowOverflow())).value;
2556}
2557template <>
2558void PointerBuilder::setBlob<Data>(Data::Reader value) {
2559 WireHelpers::setDataPointer(pointer, segment, capTable, value);
2560}
2561template <>
2562Data::Builder PointerBuilder::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) {
2563 return WireHelpers::getWritableDataPointer(pointer, segment, capTable, defaultValue,
2564 assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
2565}
2566
2567void PointerBuilder::setStruct(const StructReader& value, bool canonical) {
2568 WireHelpers::setStructPointer(segment, capTable, pointer, value, nullptr, canonical);
2569}
2570
2571void PointerBuilder::setList(const ListReader& value, bool canonical) {
2572 WireHelpers::setListPointer(segment, capTable, pointer, value, nullptr, canonical);
2573}
2574
2575#if !CAPNP_LITE
2576kj::Own<ClientHook> PointerBuilder::getCapability() {
2577 return WireHelpers::readCapabilityPointer(
2578 segment, capTable, pointer, kj::maxValue);
2579}
2580
2581void PointerBuilder::setCapability(kj::Own<ClientHook>&& cap) {
2582 WireHelpers::setCapabilityPointer(segment, capTable, pointer, kj::mv(cap));
2583}
2584#endif // !CAPNP_LITE
2585
2586void PointerBuilder::adopt(OrphanBuilder&& value) {
2587 WireHelpers::adopt(segment, capTable, pointer, kj::mv(value));
2588}
2589
2590OrphanBuilder PointerBuilder::disown() {
2591 return WireHelpers::disown(segment, capTable, pointer);
2592}
2593
2594void PointerBuilder::clear() {
2595 WireHelpers::zeroObject(segment, capTable, pointer);
2596 WireHelpers::zeroMemory(pointer);
2597}
2598
2599PointerType PointerBuilder::getPointerType() const {
2600 if(pointer->isNull()) {
2601 return PointerType::NULL_;
2602 } else {
2603 WirePointer* ptr = pointer;
2604 SegmentBuilder* sgmt = segment;
2605 WireHelpers::followFars(ptr, ptr->target(), sgmt);
2606 switch(ptr->kind()) {
2607 case WirePointer::FAR:
2608 KJ_FAIL_ASSERT("far pointer not followed?");
2609 case WirePointer::STRUCT:
2610 return PointerType::STRUCT;
2611 case WirePointer::LIST:
2612 return PointerType::LIST;
2613 case WirePointer::OTHER:
2614 KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
2615 return PointerType::CAPABILITY;
2616 }
2617 KJ_UNREACHABLE;
2618 }
2619}
2620
2621void PointerBuilder::transferFrom(PointerBuilder other) {
2622 if (!pointer->isNull()) {
2623 WireHelpers::zeroObject(segment, capTable, pointer);
2624 WireHelpers::zeroMemory(pointer);
2625 }
2626 WireHelpers::transferPointer(segment, pointer, other.segment, other.pointer);
2627 WireHelpers::zeroMemory(other.pointer);
2628}
2629
2630void PointerBuilder::copyFrom(PointerReader other, bool canonical) {
2631 if (other.pointer == nullptr) {
2632 if (!pointer->isNull()) {
2633 WireHelpers::zeroObject(segment, capTable, pointer);
2634 WireHelpers::zeroMemory(pointer);
2635 }
2636 } else {
2637 WireHelpers::copyPointer(segment, capTable, pointer,
2638 other.segment, other.capTable, other.pointer, other.nestingLimit,
2639 nullptr,
2640 canonical);
2641 }
2642}
2643
2644PointerReader PointerBuilder::asReader() const {
2645 return PointerReader(segment, capTable, pointer, kj::maxValue);
2646}
2647
2648BuilderArena* PointerBuilder::getArena() const {
2649 return segment->getArena();
2650}
2651
2652CapTableBuilder* PointerBuilder::getCapTable() {
2653 return capTable;
2654}
2655
2656PointerBuilder PointerBuilder::imbue(CapTableBuilder* capTable) {
2657 auto result = *this;
2658 result.capTable = capTable;
2659 return result;
2660}
2661
2662// =======================================================================================
2663// PointerReader
2664
2665PointerReader PointerReader::getRoot(SegmentReader* segment, CapTableReader* capTable,
2666 const word* location, int nestingLimit) {
2667 KJ_REQUIRE(WireHelpers::boundsCheck(segment, location, POINTER_SIZE_IN_WORDS),
2668 "Root location out-of-bounds.") {
2669 location = nullptr;
2670 }
2671
2672 return PointerReader(segment, capTable,
2673 reinterpret_cast<const WirePointer*>(location), nestingLimit);
2674}
2675
2676StructReader PointerReader::getStruct(const word* defaultValue) const {
2677 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2678 return WireHelpers::readStructPointer(segment, capTable, ref, defaultValue, nestingLimit);
2679}
2680
2681ListReader PointerReader::getList(ElementSize expectedElementSize, const word* defaultValue) const {
2682 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2683 return WireHelpers::readListPointer(
2684 segment, capTable, ref, defaultValue, expectedElementSize, nestingLimit);
2685}
2686
2687ListReader PointerReader::getListAnySize(const word* defaultValue) const {
2688 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2689 return WireHelpers::readListPointer(
2690 segment, capTable, ref, defaultValue, ElementSize::VOID /* dummy */, nestingLimit, false);
2691}
2692
2693template <>
2694Text::Reader PointerReader::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) const {
2695 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2696 return WireHelpers::readTextPointer(segment, ref, defaultValue, defaultSize);
2697}
2698
2699template <>
2700Data::Reader PointerReader::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) const {
2701 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2702 return WireHelpers::readDataPointer(segment, ref, defaultValue,
2703 assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
2704}
2705
2706#if !CAPNP_LITE
2707kj::Own<ClientHook> PointerReader::getCapability() const {
2708 const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2709 return WireHelpers::readCapabilityPointer(segment, capTable, ref, nestingLimit);
2710}
2711#endif // !CAPNP_LITE
2712
2713const word* PointerReader::getUnchecked() const {
2714 KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
2715 return reinterpret_cast<const word*>(pointer);
2716}
2717
2718MessageSizeCounts PointerReader::targetSize() const {
2719 return pointer == nullptr ? MessageSizeCounts { ZERO * WORDS, 0 }
2720 : WireHelpers::totalSize(segment, pointer, nestingLimit);
2721}
2722
2723PointerType PointerReader::getPointerType() const {
2724 if(pointer == nullptr || pointer->isNull()) {
2725 return PointerType::NULL_;
2726 } else {
2727 const WirePointer* ptr = pointer;
2728 const word* refTarget = ptr->target(segment);
2729 SegmentReader* sgmt = segment;
2730 if (WireHelpers::followFars(ptr, refTarget, sgmt) == nullptr) return PointerType::NULL_;
2731 switch(ptr->kind()) {
2732 case WirePointer::FAR:
2733 KJ_FAIL_ASSERT("far pointer not followed?") { return PointerType::NULL_; }
2734 case WirePointer::STRUCT:
2735 return PointerType::STRUCT;
2736 case WirePointer::LIST:
2737 return PointerType::LIST;
2738 case WirePointer::OTHER:
2739 KJ_REQUIRE(ptr->isCapability(), "unknown pointer type") { return PointerType::NULL_; }
2740 return PointerType::CAPABILITY;
2741 }
2742 KJ_UNREACHABLE;
2743 }
2744}
2745
2746kj::Maybe<Arena&> PointerReader::getArena() const {
2747 return segment == nullptr ? nullptr : segment->getArena();
2748}
2749
2750CapTableReader* PointerReader::getCapTable() {
2751 return capTable;
2752}
2753
2754PointerReader PointerReader::imbue(CapTableReader* capTable) const {
2755 auto result = *this;
2756 result.capTable = capTable;
2757 return result;
2758}
2759
2760bool PointerReader::isCanonical(const word **readHead) {
2761 if (!this->pointer) {
2762 // The pointer is null, so we are canonical and do not read
2763 return true;
2764 }
2765
2766 if (!this->pointer->isPositional()) {
2767 // The pointer is a FAR or OTHER pointer, and is non-canonical
2768 return false;
2769 }
2770
2771 switch (this->getPointerType()) {
2772 case PointerType::NULL_:
2773 // The pointer is null, we are canonical and do not read
2774 return true;
2775 case PointerType::STRUCT: {
2776 bool dataTrunc, ptrTrunc;
2777 auto structReader = this->getStruct(nullptr);
2778 if (structReader.getDataSectionSize() == ZERO * BITS &&
2779 structReader.getPointerSectionSize() == ZERO * POINTERS) {
2780 return reinterpret_cast<const word*>(this->pointer) == structReader.getLocation();
2781 } else {
2782 return structReader.isCanonical(readHead, readHead, &dataTrunc, &ptrTrunc) && dataTrunc && ptrTrunc;
2783 }
2784 }
2785 case PointerType::LIST:
2786 return this->getListAnySize(nullptr).isCanonical(readHead, pointer);
2787 case PointerType::CAPABILITY:
2788 KJ_FAIL_ASSERT("Capabilities are not positional");
2789 }
2790 KJ_UNREACHABLE;
2791}
2792
2793// =======================================================================================
2794// StructBuilder
2795
2796void StructBuilder::clearAll() {
2797 if (dataSize == ONE * BITS) {
2798 setDataField<bool>(ONE * ELEMENTS, false);
2799 } else {
2800 WireHelpers::zeroMemory(reinterpret_cast<byte*>(data), dataSize / BITS_PER_BYTE);
2801 }
2802
2803 for (auto i: kj::zeroTo(pointerCount)) {
2804 WireHelpers::zeroObject(segment, capTable, pointers + i);
2805 }
2806 WireHelpers::zeroMemory(pointers, pointerCount);
2807}
2808
2809void StructBuilder::transferContentFrom(StructBuilder other) {
2810 // Determine the amount of data the builders have in common.
2811 auto sharedDataSize = kj::min(dataSize, other.dataSize);
2812
2813 if (dataSize > sharedDataSize) {
2814 // Since the target is larger than the source, make sure to zero out the extra bits that the
2815 // source doesn't have.
2816 if (dataSize == ONE * BITS) {
2817 setDataField<bool>(ZERO * ELEMENTS, false);
2818 } else {
2819 byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
2820 // Note: this subtraction can't fail due to the if() above
2821 WireHelpers::zeroMemory(unshared,
2822 subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
2823 }
2824 }
2825
2826 // Copy over the shared part.
2827 if (sharedDataSize == ONE * BITS) {
2828 setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
2829 } else {
2830 WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
2831 reinterpret_cast<byte*>(other.data),
2832 sharedDataSize / BITS_PER_BYTE);
2833 }
2834
2835 // Zero out all pointers in the target.
2836 for (auto i: kj::zeroTo(pointerCount)) {
2837 WireHelpers::zeroObject(segment, capTable, pointers + i);
2838 }
2839 WireHelpers::zeroMemory(pointers, pointerCount);
2840
2841 // Transfer the pointers.
2842 auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
2843 for (auto i: kj::zeroTo(sharedPointerCount)) {
2844 WireHelpers::transferPointer(segment, pointers + i, other.segment, other.pointers + i);
2845 }
2846
2847 // Zero out the pointers that were transferred in the source because it no longer has ownership.
2848 // If the source had any extra pointers that the destination didn't have space for, we
2849 // intentionally leave them be, so that they'll be cleaned up later.
2850 WireHelpers::zeroMemory(other.pointers, sharedPointerCount);
2851}
2852
2853void StructBuilder::copyContentFrom(StructReader other) {
2854 // Determine the amount of data the builders have in common.
2855 auto sharedDataSize = kj::min(dataSize, other.dataSize);
2856 auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
2857
2858 if ((sharedDataSize > ZERO * BITS && other.data == data) ||
2859 (sharedPointerCount > ZERO * POINTERS && other.pointers == pointers)) {
2860 // At least one of the section pointers is pointing to ourself. Verify that the other is two
2861 // (but ignore empty sections).
2862 KJ_ASSERT((sharedDataSize == ZERO * BITS || other.data == data) &&
2863 (sharedPointerCount == ZERO * POINTERS || other.pointers == pointers));
2864 // So `other` appears to be a reader for this same struct. No coping is needed.
2865 return;
2866 }
2867
2868 if (dataSize > sharedDataSize) {
2869 // Since the target is larger than the source, make sure to zero out the extra bits that the
2870 // source doesn't have.
2871 if (dataSize == ONE * BITS) {
2872 setDataField<bool>(ZERO * ELEMENTS, false);
2873 } else {
2874 byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
2875 WireHelpers::zeroMemory(unshared,
2876 subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
2877 }
2878 }
2879
2880 // Copy over the shared part.
2881 if (sharedDataSize == ONE * BITS) {
2882 setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
2883 } else {
2884 WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
2885 reinterpret_cast<const byte*>(other.data),
2886 sharedDataSize / BITS_PER_BYTE);
2887 }
2888
2889 // Zero out all pointers in the target.
2890 for (auto i: kj::zeroTo(pointerCount)) {
2891 WireHelpers::zeroObject(segment, capTable, pointers + i);
2892 }
2893 WireHelpers::zeroMemory(pointers, pointerCount);
2894
2895 // Copy the pointers.
2896 for (auto i: kj::zeroTo(sharedPointerCount)) {
2897 WireHelpers::copyPointer(segment, capTable, pointers + i,
2898 other.segment, other.capTable, other.pointers + i, other.nestingLimit);
2899 }
2900}
2901
2902StructReader StructBuilder::asReader() const {
2903 return StructReader(segment, capTable, data, pointers,
2904 dataSize, pointerCount, kj::maxValue);
2905}
2906
2907BuilderArena* StructBuilder::getArena() {
2908 return segment->getArena();
2909}
2910
2911CapTableBuilder* StructBuilder::getCapTable() {
2912 return capTable;
2913}
2914
2915StructBuilder StructBuilder::imbue(CapTableBuilder* capTable) {
2916 auto result = *this;
2917 result.capTable = capTable;
2918 return result;
2919}
2920
2921// =======================================================================================
2922// StructReader
2923
2924MessageSizeCounts StructReader::totalSize() const {
2925 MessageSizeCounts result = {
2926 WireHelpers::roundBitsUpToWords(dataSize) + pointerCount * WORDS_PER_POINTER, 0 };
2927
2928 for (auto i: kj::zeroTo(pointerCount)) {
2929 result += WireHelpers::totalSize(segment, pointers + i, nestingLimit);
2930 }
2931
2932 if (segment != nullptr) {
2933 // This traversal should not count against the read limit, because it's highly likely that
2934 // the caller is going to traverse the object again, e.g. to copy it.
2935 segment->unread(result.wordCount);
2936 }
2937
2938 return result;
2939}
2940
2941kj::Array<word> StructReader::canonicalize() {
2942 auto size = totalSize().wordCount + POINTER_SIZE_IN_WORDS;
2943 kj::Array<word> backing = kj::heapArray<word>(unbound(size / WORDS));
2944 WireHelpers::zeroMemory(backing.asPtr());
2945 FlatMessageBuilder builder(backing);
2946 _::PointerHelpers<AnyPointer>::getInternalBuilder(builder.initRoot<AnyPointer>()).setStruct(*this, true);
2947 KJ_ASSERT(builder.isCanonical());
2948 auto output = builder.getSegmentsForOutput()[0];
2949 kj::Array<word> trunc = kj::heapArray<word>(output.size());
2950 WireHelpers::copyMemory(trunc.begin(), output);
2951 return trunc;
2952}
2953
2954CapTableReader* StructReader::getCapTable() {
2955 return capTable;
2956}
2957
2958StructReader StructReader::imbue(CapTableReader* capTable) const {
2959 auto result = *this;
2960 result.capTable = capTable;
2961 return result;
2962}
2963
2964bool StructReader::isCanonical(const word **readHead,
2965 const word **ptrHead,
2966 bool *dataTrunc,
2967 bool *ptrTrunc) {
2968 if (this->getLocation() != *readHead) {
2969 // Our target area is not at the readHead, preorder fails
2970 return false;
2971 }
2972
2973 if (this->getDataSectionSize() % BITS_PER_WORD != ZERO * BITS) {
2974 // Using legacy non-word-size structs, reject
2975 return false;
2976 }
2977 auto dataSize = this->getDataSectionSize() / BITS_PER_WORD;
2978
2979 // Mark whether the struct is properly truncated
2980 KJ_IF_MAYBE(diff, trySubtract(dataSize, ONE * WORDS)) {
2981 *dataTrunc = this->getDataField<uint64_t>(*diff / WORDS * ELEMENTS) != 0;
2982 } else {
2983 // Data segment empty.
2984 *dataTrunc = true;
2985 }
2986
2987 KJ_IF_MAYBE(diff, trySubtract(this->pointerCount, ONE * POINTERS)) {
2988 *ptrTrunc = !this->getPointerField(*diff).isNull();
2989 } else {
2990 *ptrTrunc = true;
2991 }
2992
2993 // Advance the read head
2994 *readHead += (dataSize + (this->pointerCount * WORDS_PER_POINTER));
2995
2996 // Check each pointer field for canonicity
2997 for (auto ptrIndex: kj::zeroTo(this->pointerCount)) {
2998 if (!this->getPointerField(ptrIndex).isCanonical(ptrHead)) {
2999 return false;
3000 }
3001 }
3002
3003 return true;
3004}
3005
3006// =======================================================================================
3007// ListBuilder
3008
3009Text::Builder ListBuilder::asText() {
3010 KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3011 "Expected Text, got list of non-bytes.") {
3012 return Text::Builder();
3013 }
3014
3015 size_t size = unbound(elementCount / ELEMENTS);
3016
3017 KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
3018 return Text::Builder();
3019 }
3020
3021 char* cptr = reinterpret_cast<char*>(ptr);
3022 --size; // NUL terminator
3023
3024 KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
3025 return Text::Builder();
3026 }
3027
3028 return Text::Builder(cptr, size);
3029}
3030
3031Data::Builder ListBuilder::asData() {
3032 KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3033 "Expected Text, got list of non-bytes.") {
3034 return Data::Builder();
3035 }
3036
3037 return Data::Builder(reinterpret_cast<byte*>(ptr), unbound(elementCount / ELEMENTS));
3038}
3039
3040StructBuilder ListBuilder::getStructElement(ElementCount index) {
3041 auto indexBit = upgradeBound<uint64_t>(index) * step;
3042 byte* structData = ptr + indexBit / BITS_PER_BYTE;
3043 KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
3044 return StructBuilder(segment, capTable, structData,
3045 reinterpret_cast<WirePointer*>(structData + structDataSize / BITS_PER_BYTE),
3046 structDataSize, structPointerCount);
3047}
3048
3049ListReader ListBuilder::asReader() const {
3050 return ListReader(segment, capTable, ptr, elementCount, step, structDataSize, structPointerCount,
3051 elementSize, kj::maxValue);
3052}
3053
3054BuilderArena* ListBuilder::getArena() {
3055 return segment->getArena();
3056}
3057
3058CapTableBuilder* ListBuilder::getCapTable() {
3059 return capTable;
3060}
3061
3062ListBuilder ListBuilder::imbue(CapTableBuilder* capTable) {
3063 auto result = *this;
3064 result.capTable = capTable;
3065 return result;
3066}
3067
3068// =======================================================================================
3069// ListReader
3070
3071Text::Reader ListReader::asText() {
3072 KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3073 "Expected Text, got list of non-bytes.") {
3074 return Text::Reader();
3075 }
3076
3077 size_t size = unbound(elementCount / ELEMENTS);
3078
3079 KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
3080 return Text::Reader();
3081 }
3082
3083 const char* cptr = reinterpret_cast<const char*>(ptr);
3084 --size; // NUL terminator
3085
3086 KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
3087 return Text::Reader();
3088 }
3089
3090 return Text::Reader(cptr, size);
3091}
3092
3093Data::Reader ListReader::asData() {
3094 KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3095 "Expected Text, got list of non-bytes.") {
3096 return Data::Reader();
3097 }
3098
3099 return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(elementCount / ELEMENTS));
3100}
3101
3102kj::ArrayPtr<const byte> ListReader::asRawBytes() const {
3103 KJ_REQUIRE(structPointerCount == ZERO * POINTERS,
3104 "Expected data only, got pointers.") {
3105 return kj::ArrayPtr<const byte>();
3106 }
3107
3108 return arrayPtr(reinterpret_cast<const byte*>(ptr),
3109 WireHelpers::roundBitsUpToBytes(
3110 upgradeBound<uint64_t>(elementCount) * (structDataSize / ELEMENTS)));
3111}
3112
3113StructReader ListReader::getStructElement(ElementCount index) const {
3114 KJ_REQUIRE(nestingLimit > 0,
3115 "Message is too deeply-nested or contains cycles. See capnp::ReaderOptions.") {
3116 return StructReader();
3117 }
3118
3119 auto indexBit = upgradeBound<uint64_t>(index) * step;
3120 const byte* structData = ptr + indexBit / BITS_PER_BYTE;
3121 const WirePointer* structPointers =
3122 reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
3123
3124 KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
3125 return StructReader(
3126 segment, capTable, structData, structPointers,
3127 structDataSize, structPointerCount,
3128 nestingLimit - 1);
3129}
3130
3131MessageSizeCounts ListReader::totalSize() const {
3132 // TODO(cleanup): This is kind of a lot of logic duplicated from WireHelpers::totalSize(), but
3133 // it's unclear how to share it effectively.
3134
3135 MessageSizeCounts result = { ZERO * WORDS, 0 };
3136
3137 switch (elementSize) {
3138 case ElementSize::VOID:
3139 // Nothing.
3140 break;
3141 case ElementSize::BIT:
3142 case ElementSize::BYTE:
3143 case ElementSize::TWO_BYTES:
3144 case ElementSize::FOUR_BYTES:
3145 case ElementSize::EIGHT_BYTES:
3146 result.addWords(WireHelpers::roundBitsUpToWords(
3147 upgradeBound<uint64_t>(elementCount) * dataBitsPerElement(elementSize)));
3148 break;
3149 case ElementSize::POINTER: {
3150 auto count = elementCount * (POINTERS / ELEMENTS);
3151 result.addWords(count * WORDS_PER_POINTER);
3152
3153 for (auto i: kj::zeroTo(count)) {
3154 result += WireHelpers::totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
3155 nestingLimit);
3156 }
3157 break;
3158 }
3159 case ElementSize::INLINE_COMPOSITE: {
3160 // Don't forget to count the tag word.
3161 auto wordSize = upgradeBound<uint64_t>(elementCount) * step / BITS_PER_WORD;
3162 result.addWords(wordSize + POINTER_SIZE_IN_WORDS);
3163
3164 if (structPointerCount > ZERO * POINTERS) {
3165 const word* pos = reinterpret_cast<const word*>(ptr);
3166 for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
3167 pos += structDataSize / BITS_PER_WORD;
3168
3169 for (auto j KJ_UNUSED: kj::zeroTo(structPointerCount)) {
3170 result += WireHelpers::totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
3171 nestingLimit);
3172 pos += POINTER_SIZE_IN_WORDS;
3173 }
3174 }
3175 }
3176 break;
3177 }
3178 }
3179
3180 if (segment != nullptr) {
3181 // This traversal should not count against the read limit, because it's highly likely that
3182 // the caller is going to traverse the object again, e.g. to copy it.
3183 segment->unread(result.wordCount);
3184 }
3185
3186 return result;
3187}
3188
3189CapTableReader* ListReader::getCapTable() {
3190 return capTable;
3191}
3192
3193ListReader ListReader::imbue(CapTableReader* capTable) const {
3194 auto result = *this;
3195 result.capTable = capTable;
3196 return result;
3197}
3198
3199bool ListReader::isCanonical(const word **readHead, const WirePointer *ref) {
3200 switch (this->getElementSize()) {
3201 case ElementSize::INLINE_COMPOSITE: {
3202 *readHead += 1;
3203 if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
3204 // The next word to read is the tag word, but the pointer is in
3205 // front of it, so our check is slightly different
3206 return false;
3207 }
3208 if (this->structDataSize % BITS_PER_WORD != ZERO * BITS) {
3209 return false;
3210 }
3211 auto elementSize = StructSize(this->structDataSize / BITS_PER_WORD,
3212 this->structPointerCount).total() / ELEMENTS;
3213 auto totalSize = upgradeBound<uint64_t>(this->elementCount) * elementSize;
3214 if (totalSize != ref->listRef.inlineCompositeWordCount()) {
3215 return false;
3216 }
3217 if (elementSize == ZERO * WORDS / ELEMENTS) {
3218 return true;
3219 }
3220 auto listEnd = *readHead + totalSize;
3221 auto pointerHead = listEnd;
3222 bool listDataTrunc = false;
3223 bool listPtrTrunc = false;
3224 for (auto ec: kj::zeroTo(this->elementCount)) {
3225 bool dataTrunc, ptrTrunc;
3226 if (!this->getStructElement(ec).isCanonical(readHead,
3227 &pointerHead,
3228 &dataTrunc,
3229 &ptrTrunc)) {
3230 return false;
3231 }
3232 listDataTrunc |= dataTrunc;
3233 listPtrTrunc |= ptrTrunc;
3234 }
3235 KJ_REQUIRE(*readHead == listEnd, *readHead, listEnd);
3236 *readHead = pointerHead;
3237 return listDataTrunc && listPtrTrunc;
3238 }
3239 case ElementSize::POINTER: {
3240 if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
3241 return false;
3242 }
3243 *readHead += this->elementCount * (POINTERS / ELEMENTS) * WORDS_PER_POINTER;
3244 for (auto ec: kj::zeroTo(this->elementCount)) {
3245 if (!this->getPointerElement(ec).isCanonical(readHead)) {
3246 return false;
3247 }
3248 }
3249 return true;
3250 }
3251 default: {
3252 if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
3253 return false;
3254 }
3255
3256 auto bitSize = upgradeBound<uint64_t>(this->elementCount) *
3257 dataBitsPerElement(this->elementSize);
3258 auto truncatedByteSize = bitSize / BITS_PER_BYTE;
3259 auto byteReadHead = reinterpret_cast<const uint8_t*>(*readHead) + truncatedByteSize;
3260 auto readHeadEnd = *readHead + WireHelpers::roundBitsUpToWords(bitSize);
3261
3262 auto leftoverBits = bitSize % BITS_PER_BYTE;
3263 if (leftoverBits > ZERO * BITS) {
3264 auto mask = ~((1 << unbound(leftoverBits / BITS)) - 1);
3265
3266 if (mask & *byteReadHead) {
3267 return false;
3268 }
3269 byteReadHead += 1;
3270 }
3271
3272 while (byteReadHead != reinterpret_cast<const uint8_t*>(readHeadEnd)) {
3273 if (*byteReadHead != 0) {
3274 return false;
3275 }
3276 byteReadHead += 1;
3277 }
3278
3279 *readHead = readHeadEnd;
3280 return true;
3281 }
3282 }
3283 KJ_UNREACHABLE;
3284}
3285
3286// =======================================================================================
3287// OrphanBuilder
3288
3289OrphanBuilder OrphanBuilder::initStruct(
3290 BuilderArena* arena, CapTableBuilder* capTable, StructSize size) {
3291 OrphanBuilder result;
3292 StructBuilder builder = WireHelpers::initStructPointer(
3293 result.tagAsPtr(), nullptr, capTable, size, arena);
3294 result.segment = builder.segment;
3295 result.capTable = capTable;
3296 result.location = builder.getLocation();
3297 return result;
3298}
3299
3300OrphanBuilder OrphanBuilder::initList(
3301 BuilderArena* arena, CapTableBuilder* capTable,
3302 ElementCount elementCount, ElementSize elementSize) {
3303 OrphanBuilder result;
3304 ListBuilder builder = WireHelpers::initListPointer(
3305 result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
3306 result.segment = builder.segment;
3307 result.capTable = capTable;
3308 result.location = builder.getLocation();
3309 return result;
3310}
3311
3312OrphanBuilder OrphanBuilder::initStructList(
3313 BuilderArena* arena, CapTableBuilder* capTable,
3314 ElementCount elementCount, StructSize elementSize) {
3315 OrphanBuilder result;
3316 ListBuilder builder = WireHelpers::initStructListPointer(
3317 result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
3318 result.segment = builder.segment;
3319 result.capTable = capTable;
3320 result.location = builder.getLocation();
3321 return result;
3322}
3323
3324OrphanBuilder OrphanBuilder::initText(
3325 BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
3326 OrphanBuilder result;
3327 auto allocation = WireHelpers::initTextPointer(result.tagAsPtr(), nullptr, capTable,
3328 assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow()), arena);
3329 result.segment = allocation.segment;
3330 result.capTable = capTable;
3331 result.location = reinterpret_cast<word*>(allocation.value.begin());
3332 return result;
3333}
3334
3335OrphanBuilder OrphanBuilder::initData(
3336 BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
3337 OrphanBuilder result;
3338 auto allocation = WireHelpers::initDataPointer(result.tagAsPtr(), nullptr, capTable,
3339 assertMaxBits<BLOB_SIZE_BITS>(size), arena);
3340 result.segment = allocation.segment;
3341 result.capTable = capTable;
3342 result.location = reinterpret_cast<word*>(allocation.value.begin());
3343 return result;
3344}
3345
3346OrphanBuilder OrphanBuilder::copy(
3347 BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom) {
3348 OrphanBuilder result;
3349 auto allocation = WireHelpers::setStructPointer(
3350 nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
3351 result.segment = allocation.segment;
3352 result.capTable = capTable;
3353 result.location = reinterpret_cast<word*>(allocation.value);
3354 return result;
3355}
3356
3357OrphanBuilder OrphanBuilder::copy(
3358 BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom) {
3359 OrphanBuilder result;
3360 auto allocation = WireHelpers::setListPointer(
3361 nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
3362 result.segment = allocation.segment;
3363 result.capTable = capTable;
3364 result.location = reinterpret_cast<word*>(allocation.value);
3365 return result;
3366}
3367
3368OrphanBuilder OrphanBuilder::copy(
3369 BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom) {
3370 OrphanBuilder result;
3371 auto allocation = WireHelpers::copyPointer(
3372 nullptr, capTable, result.tagAsPtr(),
3373 copyFrom.segment, copyFrom.capTable, copyFrom.pointer, copyFrom.nestingLimit, arena);
3374 result.segment = allocation.segment;
3375 result.capTable = capTable;
3376 result.location = reinterpret_cast<word*>(allocation.value);
3377 return result;
3378}
3379
3380OrphanBuilder OrphanBuilder::copy(
3381 BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom) {
3382 OrphanBuilder result;
3383 auto allocation = WireHelpers::setTextPointer(
3384 result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
3385 result.segment = allocation.segment;
3386 result.capTable = capTable;
3387 result.location = reinterpret_cast<word*>(allocation.value.begin());
3388 return result;
3389}
3390
3391OrphanBuilder OrphanBuilder::copy(
3392 BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom) {
3393 OrphanBuilder result;
3394 auto allocation = WireHelpers::setDataPointer(
3395 result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
3396 result.segment = allocation.segment;
3397 result.capTable = capTable;
3398 result.location = reinterpret_cast<word*>(allocation.value.begin());
3399 return result;
3400}
3401
3402#if !CAPNP_LITE
3403OrphanBuilder OrphanBuilder::copy(
3404 BuilderArena* arena, CapTableBuilder* capTable, kj::Own<ClientHook> copyFrom) {
3405 OrphanBuilder result;
3406 WireHelpers::setCapabilityPointer(nullptr, capTable, result.tagAsPtr(), kj::mv(copyFrom));
3407 result.segment = arena->getSegment(SegmentId(0));
3408 result.capTable = capTable;
3409 result.location = &result.tag; // dummy to make location non-null
3410 return result;
3411}
3412#endif // !CAPNP_LITE
3413
3414OrphanBuilder OrphanBuilder::concat(
3415 BuilderArena* arena, CapTableBuilder* capTable,
3416 ElementSize elementSize, StructSize structSize,
3417 kj::ArrayPtr<const ListReader> lists) {
3418 KJ_REQUIRE(lists.size() > 0, "Can't concat empty list ");
3419
3420 // Find the overall element count and size.
3421 ListElementCount elementCount = ZERO * ELEMENTS;
3422 for (auto& list: lists) {
3423 elementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount + list.elementCount,
3424 []() { KJ_FAIL_REQUIRE("concatenated list exceeds list size limit"); });
3425 if (list.elementSize != elementSize) {
3426 // If element sizes don't all match, upgrade to struct list.
3427 KJ_REQUIRE(list.elementSize != ElementSize::BIT && elementSize != ElementSize::BIT,
3428 "can't upgrade bit lists to struct lists");
3429 elementSize = ElementSize::INLINE_COMPOSITE;
3430 }
3431 structSize.data = kj::max(structSize.data,
3432 WireHelpers::roundBitsUpToWords(list.structDataSize));
3433 structSize.pointers = kj::max(structSize.pointers, list.structPointerCount);
3434 }
3435
3436 // Allocate the list.
3437 OrphanBuilder result;
3438 ListBuilder builder = (elementSize == ElementSize::INLINE_COMPOSITE)
3439 ? WireHelpers::initStructListPointer(
3440 result.tagAsPtr(), nullptr, capTable, elementCount, structSize, arena)
3441 : WireHelpers::initListPointer(
3442 result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
3443
3444 // Copy elements.
3445 switch (elementSize) {
3446 case ElementSize::INLINE_COMPOSITE: {
3447 ListElementCount pos = ZERO * ELEMENTS;
3448 for (auto& list: lists) {
3449 for (auto i: kj::zeroTo(list.size())) {
3450 builder.getStructElement(pos).copyContentFrom(list.getStructElement(i));
3451 // assumeBits() safe because we checked total size earlier.
3452 pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3453 }
3454 }
3455 break;
3456 }
3457 case ElementSize::POINTER: {
3458 ListElementCount pos = ZERO * ELEMENTS;
3459 for (auto& list: lists) {
3460 for (auto i: kj::zeroTo(list.size())) {
3461 builder.getPointerElement(pos).copyFrom(list.getPointerElement(i));
3462 // assumeBits() safe because we checked total size earlier.
3463 pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3464 }
3465 }
3466 break;
3467 }
3468 case ElementSize::BIT: {
3469 // It's difficult to memcpy() bits since a list could start or end mid-byte. For now we
3470 // do a slow, naive loop. Probably no one will ever care.
3471 ListElementCount pos = ZERO * ELEMENTS;
3472 for (auto& list: lists) {
3473 for (auto i: kj::zeroTo(list.size())) {
3474 builder.setDataElement<bool>(pos, list.getDataElement<bool>(i));
3475 // assumeBits() safe because we checked total size earlier.
3476 pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3477 }
3478 }
3479 break;
3480 }
3481 default: {
3482 // We know all the inputs are primitives with identical size because otherwise we would have
3483 // chosen INLINE_COMPOSITE. Therefore, we can safely use memcpy() here instead of copying
3484 // each element manually.
3485 byte* target = builder.ptr;
3486 auto step = builder.step / BITS_PER_BYTE;
3487 for (auto& list: lists) {
3488 auto count = step * upgradeBound<uint64_t>(list.size());
3489 WireHelpers::copyMemory(target, list.ptr, assumeBits<SEGMENT_WORD_COUNT_BITS>(count));
3490 target += count;
3491 }
3492 break;
3493 }
3494 }
3495
3496 // Return orphan.
3497 result.segment = builder.segment;
3498 result.capTable = capTable;
3499 result.location = builder.getLocation();
3500 return result;
3501}
3502
3503OrphanBuilder OrphanBuilder::referenceExternalData(BuilderArena* arena, Data::Reader data) {
3504 // TODO(someday): We now allow unaligned segments on architectures thata support it. We could
3505 // consider relaxing this check as well?
3506 KJ_REQUIRE(reinterpret_cast<uintptr_t>(data.begin()) % sizeof(void*) == 0,
3507 "Cannot referenceExternalData() that is not aligned.");
3508
3509 auto checkedSize = assertMaxBits<BLOB_SIZE_BITS>(bounded(data.size()));
3510 auto wordCount = WireHelpers::roundBytesUpToWords(checkedSize * BYTES);
3511 kj::ArrayPtr<const word> words(reinterpret_cast<const word*>(data.begin()),
3512 unbound(wordCount / WORDS));
3513
3514 OrphanBuilder result;
3515 result.tagAsPtr()->setKindForOrphan(WirePointer::LIST);
3516 result.tagAsPtr()->listRef.set(ElementSize::BYTE, checkedSize * ELEMENTS);
3517 result.segment = arena->addExternalSegment(words);
3518
3519 // External data cannot possibly contain capabilities.
3520 result.capTable = nullptr;
3521
3522 // const_cast OK here because we will check whether the segment is writable when we try to get
3523 // a builder.
3524 result.location = const_cast<word*>(words.begin());
3525
3526 return result;
3527}
3528
3529StructBuilder OrphanBuilder::asStruct(StructSize size) {
3530 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3531
3532 StructBuilder result = WireHelpers::getWritableStructPointer(
3533 tagAsPtr(), location, segment, capTable, size, nullptr, segment->getArena());
3534
3535 // Watch out, the pointer could have been updated if the object had to be relocated.
3536 location = reinterpret_cast<word*>(result.data);
3537
3538 return result;
3539}
3540
3541ListBuilder OrphanBuilder::asList(ElementSize elementSize) {
3542 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3543
3544 ListBuilder result = WireHelpers::getWritableListPointer(
3545 tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3546
3547 // Watch out, the pointer could have been updated if the object had to be relocated.
3548 // (Actually, currently this is not true for primitive lists, but let's not turn into a bug if
3549 // it changes!)
3550 location = result.getLocation();
3551
3552 return result;
3553}
3554
3555ListBuilder OrphanBuilder::asStructList(StructSize elementSize) {
3556 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3557
3558 ListBuilder result = WireHelpers::getWritableStructListPointer(
3559 tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3560
3561 // Watch out, the pointer could have been updated if the object had to be relocated.
3562 location = result.getLocation();
3563
3564 return result;
3565}
3566
3567ListBuilder OrphanBuilder::asListAnySize() {
3568 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3569
3570 ListBuilder result = WireHelpers::getWritableListPointerAnySize(
3571 tagAsPtr(), location, segment, capTable, nullptr, segment->getArena());
3572
3573 // Watch out, the pointer could have been updated if the object had to be relocated.
3574 location = result.getLocation();
3575
3576 return result;
3577}
3578
3579Text::Builder OrphanBuilder::asText() {
3580 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3581
3582 // Never relocates.
3583 return WireHelpers::getWritableTextPointer(
3584 tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
3585}
3586
3587Data::Builder OrphanBuilder::asData() {
3588 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3589
3590 // Never relocates.
3591 return WireHelpers::getWritableDataPointer(
3592 tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
3593}
3594
3595StructReader OrphanBuilder::asStructReader(StructSize size) const {
3596 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3597 return WireHelpers::readStructPointer(
3598 segment, capTable, tagAsPtr(), location, nullptr, kj::maxValue);
3599}
3600
3601ListReader OrphanBuilder::asListReader(ElementSize elementSize) const {
3602 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3603 return WireHelpers::readListPointer(
3604 segment, capTable, tagAsPtr(), location, nullptr, elementSize, kj::maxValue);
3605}
3606
3607ListReader OrphanBuilder::asListReaderAnySize() const {
3608 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3609 return WireHelpers::readListPointer(
3610 segment, capTable, tagAsPtr(), location, nullptr, ElementSize::VOID /* dummy */,
3611 kj::maxValue);
3612}
3613
3614#if !CAPNP_LITE
3615kj::Own<ClientHook> OrphanBuilder::asCapability() const {
3616 return WireHelpers::readCapabilityPointer(segment, capTable, tagAsPtr(), kj::maxValue);
3617}
3618#endif // !CAPNP_LITE
3619
3620Text::Reader OrphanBuilder::asTextReader() const {
3621 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3622 return WireHelpers::readTextPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
3623}
3624
3625Data::Reader OrphanBuilder::asDataReader() const {
3626 KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3627 return WireHelpers::readDataPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
3628}
3629
3630bool OrphanBuilder::truncate(ElementCount uncheckedSize, bool isText) {
3631 ListElementCount size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(uncheckedSize,
3632 []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
3633
3634 WirePointer* ref = tagAsPtr();
3635 SegmentBuilder* segment = this->segment;
3636
3637 word* target = WireHelpers::followFars(ref, location, segment);
3638
3639 if (ref->isNull()) {
3640 // We don't know the right element size, so we can't resize this list.
3641 return size == ZERO * ELEMENTS;
3642 }
3643
3644 KJ_REQUIRE(ref->kind() == WirePointer::LIST, "Can't truncate non-list.") {
3645 return false;
3646 }
3647
3648 if (isText) {
3649 // Add space for the NUL terminator.
3650 size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(size + ONE * ELEMENTS,
3651 []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
3652 }
3653
3654 auto elementSize = ref->listRef.elementSize();
3655
3656 if (elementSize == ElementSize::INLINE_COMPOSITE) {
3657 auto oldWordCount = ref->listRef.inlineCompositeWordCount();
3658
3659 WirePointer* tag = reinterpret_cast<WirePointer*>(target);
3660 ++target;
3661 KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
3662 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
3663 return false;
3664 }
3665 StructSize structSize(tag->structRef.dataSize.get(), tag->structRef.ptrCount.get());
3666 auto elementStep = structSize.total() / ELEMENTS;
3667
3668 auto oldSize = tag->inlineCompositeListElementCount();
3669
3670 SegmentWordCount sizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
3671 upgradeBound<uint64_t>(size) * elementStep,
3672 []() { KJ_FAIL_ASSERT("requested list size too large to fit in message segment"); });
3673 SegmentWordCount oldSizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
3674 upgradeBound<uint64_t>(oldSize) * elementStep,
3675 []() { KJ_FAIL_ASSERT("prior to truncate, list is larger than max segment size?"); });
3676
3677 word* newEndWord = target + sizeWords;
3678 word* oldEndWord = target + oldWordCount;
3679
3680 if (size <= oldSize) {
3681 // Zero the trailing elements.
3682 for (auto i: kj::range(size, oldSize)) {
3683 // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
3684 // above.
3685 WireHelpers::zeroObject(segment, capTable, tag, target +
3686 assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep));
3687 }
3688 ref->listRef.setInlineComposite(sizeWords);
3689 tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
3690 segment->tryTruncate(oldEndWord, newEndWord);
3691 } else if (newEndWord <= oldEndWord) {
3692 // Apparently the old list was over-allocated? The word count is more than needed to store
3693 // the elements. This is "valid" but shouldn't happen in practice unless someone is toying
3694 // with us.
3695 word* expectedEnd = target + oldSizeWords;
3696 KJ_ASSERT(newEndWord >= expectedEnd);
3697 WireHelpers::zeroMemory(expectedEnd,
3698 intervalLength(expectedEnd, newEndWord, MAX_SEGMENT_WORDS));
3699 tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
3700 } else {
3701 if (segment->tryExtend(oldEndWord, newEndWord)) {
3702 // Done in-place. Nothing else to do now; the new memory is already zero'd.
3703 ref->listRef.setInlineComposite(sizeWords);
3704 tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
3705 } else {
3706 // Need to re-allocate and transfer.
3707 OrphanBuilder replacement = initStructList(segment->getArena(), capTable, size, structSize);
3708
3709 ListBuilder newList = replacement.asStructList(structSize);
3710 for (auto i: kj::zeroTo(oldSize)) {
3711 // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
3712 // above.
3713 word* element = target +
3714 assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep);
3715 newList.getStructElement(i).transferContentFrom(
3716 StructBuilder(segment, capTable, element,
3717 reinterpret_cast<WirePointer*>(element + structSize.data),
3718 structSize.data * BITS_PER_WORD, structSize.pointers));
3719 }
3720
3721 *this = kj::mv(replacement);
3722 }
3723 }
3724 } else if (elementSize == ElementSize::POINTER) {
3725 // TODO(cleanup): GCC won't let me declare this constexpr, claiming POINTERS is not constexpr,
3726 // but it is?
3727 const auto POINTERS_PER_ELEMENT = ONE * POINTERS / ELEMENTS;
3728
3729 auto oldSize = ref->listRef.elementCount();
3730 word* newEndWord = target + size * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
3731 word* oldEndWord = target + oldSize * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
3732
3733 if (size <= oldSize) {
3734 // Zero the trailing elements.
3735 for (WirePointer* element = reinterpret_cast<WirePointer*>(newEndWord);
3736 element < reinterpret_cast<WirePointer*>(oldEndWord); ++element) {
3737 WireHelpers::zeroPointerAndFars(segment, element);
3738 }
3739 ref->listRef.set(ElementSize::POINTER, size);
3740 segment->tryTruncate(oldEndWord, newEndWord);
3741 } else {
3742 if (segment->tryExtend(oldEndWord, newEndWord)) {
3743 // Done in-place. Nothing else to do now; the new memory is already zero'd.
3744 ref->listRef.set(ElementSize::POINTER, size);
3745 } else {
3746 // Need to re-allocate and transfer.
3747 OrphanBuilder replacement = initList(
3748 segment->getArena(), capTable, size, ElementSize::POINTER);
3749 ListBuilder newList = replacement.asList(ElementSize::POINTER);
3750 WirePointer* oldPointers = reinterpret_cast<WirePointer*>(target);
3751 for (auto i: kj::zeroTo(oldSize)) {
3752 newList.getPointerElement(i).transferFrom(
3753 PointerBuilder(segment, capTable, oldPointers + i * POINTERS_PER_ELEMENT));
3754 }
3755 *this = kj::mv(replacement);
3756 }
3757 }
3758 } else {
3759 auto oldSize = ref->listRef.elementCount();
3760 auto step = dataBitsPerElement(elementSize);
3761 const auto MAX_STEP_BYTES = ONE * WORDS / ELEMENTS * BYTES_PER_WORD;
3762 word* newEndWord = target + WireHelpers::roundBitsUpToWords(
3763 upgradeBound<uint64_t>(size) * step);
3764 word* oldEndWord = target + WireHelpers::roundBitsUpToWords(
3765 upgradeBound<uint64_t>(oldSize) * step);
3766
3767 if (size <= oldSize) {
3768 // When truncating text, we want to set the null terminator as well, so we'll do our zeroing
3769 // at the byte level.
3770 byte* begin = reinterpret_cast<byte*>(target);
3771 byte* newEndByte = begin + WireHelpers::roundBitsUpToBytes(
3772 upgradeBound<uint64_t>(size) * step) - isText;
3773 byte* oldEndByte = reinterpret_cast<byte*>(oldEndWord);
3774
3775 WireHelpers::zeroMemory(newEndByte,
3776 intervalLength(newEndByte, oldEndByte, MAX_LIST_ELEMENTS * MAX_STEP_BYTES));
3777 ref->listRef.set(elementSize, size);
3778 segment->tryTruncate(oldEndWord, newEndWord);
3779 } else {
3780 // We're trying to extend, not truncate.
3781 if (segment->tryExtend(oldEndWord, newEndWord)) {
3782 // Done in-place. Nothing else to do now; the memory is already zero'd.
3783 ref->listRef.set(elementSize, size);
3784 } else {
3785 // Need to re-allocate and transfer.
3786 OrphanBuilder replacement = initList(segment->getArena(), capTable, size, elementSize);
3787 ListBuilder newList = replacement.asList(elementSize);
3788 auto words = WireHelpers::roundBitsUpToWords(
3789 dataBitsPerElement(elementSize) * upgradeBound<uint64_t>(oldSize));
3790 WireHelpers::copyMemory(reinterpret_cast<word*>(newList.ptr), target, words);
3791 *this = kj::mv(replacement);
3792 }
3793 }
3794 }
3795
3796 return true;
3797}
3798
3799void OrphanBuilder::truncate(ElementCount size, ElementSize elementSize) {
3800 if (!truncate(size, false)) {
3801 // assumeBits() safe since it's checked inside truncate()
3802 *this = initList(segment->getArena(), capTable,
3803 assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
3804 }
3805}
3806
3807void OrphanBuilder::truncate(ElementCount size, StructSize elementSize) {
3808 if (!truncate(size, false)) {
3809 // assumeBits() safe since it's checked inside truncate()
3810 *this = initStructList(segment->getArena(), capTable,
3811 assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
3812 }
3813}
3814
3815void OrphanBuilder::truncateText(ElementCount size) {
3816 if (!truncate(size, true)) {
3817 // assumeBits() safe since it's checked inside truncate()
3818 *this = initText(segment->getArena(), capTable,
3819 assumeBits<LIST_ELEMENT_COUNT_BITS>(size) * (ONE * BYTES / ELEMENTS));
3820 }
3821}
3822
3823void OrphanBuilder::euthanize() {
3824 // Carefully catch any exceptions and rethrow them as recoverable exceptions since we may be in
3825 // a destructor.
3826 auto exception = kj::runCatchingExceptions([&]() {
3827 if (tagAsPtr()->isPositional()) {
3828 WireHelpers::zeroObject(segment, capTable, tagAsPtr(), location);
3829 } else {
3830 WireHelpers::zeroObject(segment, capTable, tagAsPtr());
3831 }
3832
3833 WireHelpers::zeroMemory(&tag, ONE * WORDS);
3834 segment = nullptr;
3835 location = nullptr;
3836 });
3837
3838 KJ_IF_MAYBE(e, exception) {
3839 kj::getExceptionCallback().onRecoverableException(kj::mv(*e));
3840 }
3841}
3842
3843} // namespace _ (private)
3844} // namespace capnp
3845