layout.c++ 145 KB
Newer Older
1
// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
Kenton Varda's avatar
Kenton Varda committed
2
// Licensed under the MIT License:
3
//
Kenton Varda's avatar
Kenton Varda committed
4 5 6 7 8 9
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
10
//
Kenton Varda's avatar
Kenton Varda committed
11 12
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
13
//
Kenton Varda's avatar
Kenton Varda committed
14 15 16 17 18 19 20
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
21

Kenton Varda's avatar
Kenton Varda committed
22
#define CAPNP_PRIVATE
23
#include "layout.h"
Kenton Varda's avatar
Kenton Varda committed
24
#include <kj/debug.h>
25
#include "arena.h"
26
#include <string.h>
27
#include <stdlib.h>
28

29 30 31 32
#if !CAPNP_LITE
#include "capability.h"
#endif  // !CAPNP_LITE

33
namespace capnp {
34
namespace _ {  // private
Kenton Varda's avatar
Kenton Varda committed
35

36
#if !CAPNP_LITE
37 38 39 40 41
static BrokenCapFactory* brokenCapFactory = nullptr;
// Horrible hack:  We need to be able to construct broken caps without any capability context,
// but we can't have a link-time dependency on libcapnp-rpc.

void setGlobalBrokenCapFactoryForLayoutCpp(BrokenCapFactory& factory) {
42 43
  // Called from capability.c++ when the capability API is used, to make sure that layout.c++
  // is ready for it.  May be called multiple times but always with the same value.
44
#if __GNUC__
45
  __atomic_store_n(&brokenCapFactory, &factory, __ATOMIC_RELAXED);
46 47 48 49 50
#elif _MSC_VER
  *static_cast<BrokenCapFactory* volatile*>(&brokenCapFactory) = &factory;
#else
#error "Platform not supported"
#endif
51
}
Kenton Varda's avatar
Kenton Varda committed
52 53 54 55 56 57 58 59

}  // namespace _ (private)

const uint ClientHook::NULL_CAPABILITY_BRAND = 0;
// Defined here rather than capability.c++ so that we can safely call isNull() in this file.

namespace _ {  // private

60
#endif  // !CAPNP_LITE
61

62
#if CAPNP_DEBUG_TYPES
63
#define G(n) bounded<n>()
64 65 66
#else
#define G(n) n
#endif
67

68 69
// =======================================================================================

70 71
struct WirePointer {
  // A pointer, in exactly the format in which it appears on the wire.
72 73

  // Copying and moving is not allowed because the offset would become wrong.
74 75 76 77
  WirePointer(const WirePointer& other) = delete;
  WirePointer(WirePointer&& other) = delete;
  WirePointer& operator=(const WirePointer& other) = delete;
  WirePointer& operator=(WirePointer&& other) = delete;
78

79
  // -----------------------------------------------------------------
80
  // Common part of all pointers:  kind + offset
81 82 83
  //
  // Actually this is not terribly common.  The "offset" could actually be different things
  // depending on the context:
84 85
  // - For a regular (e.g. struct/list) pointer, a signed word offset from the word immediately
  //   following the pointer pointer.  (The off-by-one means the offset is more often zero, saving
86
  //   bytes on the wire when packed.)
87
  // - For an inline composite list tag (not really a pointer, but structured similarly), an
88
  //   element count.
89
  // - For a FAR pointer, an unsigned offset into the target segment.
90
  // - For a FAR landing pad, zero indicates that the target value immediately follows the pad while
91
  //   1 indicates that the pad is followed by another FAR pointer that actually points at the
92 93 94
  //   value.

  enum Kind {
95
    STRUCT = 0,
96 97
    // Reference points at / describes a struct.

98
    LIST = 1,
99 100 101 102 103 104
    // Reference points at / describes a list.

    FAR = 2,
    // Reference is a "far pointer", which points at data located in a different segment.  The
    // eventual target is one of the other kinds.

105 106 107
    OTHER = 3
    // Reference has type "other".  If the next 30 bits are all zero (i.e. the lower 32 bits contain
    // only the kind OTHER) then the pointer is a capability.  All other values are reserved.
108 109
  };

110 111
  WireValue<uint32_t> offsetAndKind;

112
  KJ_ALWAYS_INLINE(Kind kind() const) {
113 114
    return static_cast<Kind>(offsetAndKind.get() & 3);
  }
115 116 117 118 119 120
  KJ_ALWAYS_INLINE(bool isPositional() const) {
    return (offsetAndKind.get() & 2) == 0;  // match STRUCT and LIST but not FAR or OTHER
  }
  KJ_ALWAYS_INLINE(bool isCapability() const) {
    return offsetAndKind.get() == OTHER;
  }
121

122
  KJ_ALWAYS_INLINE(word* target()) {
123
    return reinterpret_cast<word*>(this) + 1 + (static_cast<int32_t>(offsetAndKind.get()) >> 2);
124
  }
125 126 127 128 129 130 131 132
  KJ_ALWAYS_INLINE(const word* target(SegmentReader* segment) const) {
    if (segment == nullptr) {
      return reinterpret_cast<const word*>(this + 1) +
          (static_cast<int32_t>(offsetAndKind.get()) >> 2);
    } else {
      return segment->checkOffset(reinterpret_cast<const word*>(this + 1),
                                  static_cast<int32_t>(offsetAndKind.get()) >> 2);
    }
133
  }
134 135 136 137 138 139 140 141 142 143 144 145 146 147
  KJ_ALWAYS_INLINE(void setKindAndTarget(Kind kind, word* target, SegmentBuilder* segment)) {
    // Check that the target is really in the same segment, otherwise subtracting pointers is
    // undefined behavior.  As it turns out, it's undefined behavior that actually produces
    // unexpected results in a real-world situation that actually happened:  At one time,
    // OrphanBuilder's "tag" (a WirePointer) was allowed to be initialized as if it lived in
    // a particular segment when in fact it does not.  On 32-bit systems, where words might
    // only be 32-bit aligned, it's possible that the difference between `this` and `target` is
    // not a whole number of words.  But clang optimizes:
    //     (target - (word*)this - 1) << 2
    // to:
    //     (((ptrdiff_t)target - (ptrdiff_t)this - 8) >> 1)
    // So now when the pointers are not aligned the same, we can end up corrupting the bottom
    // two bits, where `kind` is stored.  For example, this turns a struct into a far pointer.
    // Ouch!
148 149 150 151 152 153 154 155
    KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) >=
                reinterpret_cast<uintptr_t>(segment->getStartPtr()));
    KJ_DREQUIRE(reinterpret_cast<uintptr_t>(this) <
                reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
    KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) >=
                reinterpret_cast<uintptr_t>(segment->getStartPtr()));
    KJ_DREQUIRE(reinterpret_cast<uintptr_t>(target) <=
                reinterpret_cast<uintptr_t>(segment->getStartPtr() + segment->getSize()));
156
    offsetAndKind.set(((target - reinterpret_cast<word*>(this) - 1) << 2) | kind);
157
  }
158
  KJ_ALWAYS_INLINE(void setKindWithZeroOffset(Kind kind)) {
159 160
    offsetAndKind.set(kind);
  }
161 162 163 164 165 166 167 168 169 170
  KJ_ALWAYS_INLINE(void setKindAndTargetForEmptyStruct()) {
    // This pointer points at an empty struct.  Assuming the WirePointer itself is in-bounds, we
    // can set the target to point either at the WirePointer itself or immediately after it.  The
    // latter would cause the WirePointer to be "null" (since for an empty struct the upper 32
    // bits are going to be zero).  So we set an offset of -1, as if the struct were allocated
    // immediately before this pointer, to distinguish it from null.
    offsetAndKind.set(0xfffffffc);
  }
  KJ_ALWAYS_INLINE(void setKindForOrphan(Kind kind)) {
    // OrphanBuilder contains a WirePointer, but since it isn't located in a segment, it should
171 172 173 174
    // not have a valid offset (unless it is a FAR or OTHER pointer).  We set its offset to -1
    // because setting it to zero would mean a pointer to an empty struct would appear to be a null
    // pointer.
    KJ_DREQUIRE(isPositional());
175 176
    offsetAndKind.set(kind | 0xfffffffc);
  }
177

178
  KJ_ALWAYS_INLINE(ListElementCount inlineCompositeListElementCount() const) {
179
    return ((bounded(offsetAndKind.get()) >> G(2))
180
            & G(kj::maxValueForBits<LIST_ELEMENT_COUNT_BITS>())) * ELEMENTS;
181
  }
182
  KJ_ALWAYS_INLINE(void setKindAndInlineCompositeListElementCount(
183
      Kind kind, ListElementCount elementCount)) {
184
    offsetAndKind.set(unboundAs<uint32_t>((elementCount / ELEMENTS) << G(2)) | kind);
185 186
  }

187 188 189 190 191 192
  KJ_ALWAYS_INLINE(const word* farTarget(SegmentReader* segment) const) {
    KJ_DREQUIRE(kind() == FAR,
        "farTarget() should only be called on FAR pointers.");
    return segment->checkOffset(segment->getStartPtr(), offsetAndKind.get() >> 3);
  }
  KJ_ALWAYS_INLINE(word* farTarget(SegmentBuilder* segment) const) {
193
    KJ_DREQUIRE(kind() == FAR,
194 195
        "farTarget() should only be called on FAR pointers.");
    return segment->getPtrUnchecked((bounded(offsetAndKind.get()) >> G(3)) * WORDS);
196
  }
197
  KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
198
    KJ_DREQUIRE(kind() == FAR,
199
        "isDoubleFar() should only be called on FAR pointers.");
200
    return (offsetAndKind.get() >> 2) & 1;
201
  }
202
  KJ_ALWAYS_INLINE(void setFar(bool isDoubleFar, WordCountN<29> pos)) {
203
    offsetAndKind.set(unboundAs<uint32_t>((pos / WORDS) << G(3)) |
204
                      (static_cast<uint32_t>(isDoubleFar) << 2) |
205
                      static_cast<uint32_t>(Kind::FAR));
206
  }
207 208 209 210
  KJ_ALWAYS_INLINE(void setCap(uint index)) {
    offsetAndKind.set(static_cast<uint32_t>(Kind::OTHER));
    capRef.index.set(index);
  }
211 212

  // -----------------------------------------------------------------
213
  // Part of pointer that depends on the kind.
214

215 216 217 218 219 220 221
  // Note:  Originally StructRef, ListRef, and FarRef were unnamed types, but this somehow
  //   tickled a bug in GCC:
  //     http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58192
  struct StructRef {
    WireValue<WordCount16> dataSize;
    WireValue<WirePointerCount16> ptrCount;

222
    inline WordCountN<17> wordSize() const {
223
      return upgradeBound<uint32_t>(dataSize.get()) + ptrCount.get() * WORDS_PER_POINTER;
224 225
    }

226
    KJ_ALWAYS_INLINE(void set(WordCount16 ds, WirePointerCount16 rc)) {
227 228 229 230 231 232 233 234 235 236 237 238
      dataSize.set(ds);
      ptrCount.set(rc);
    }
    KJ_ALWAYS_INLINE(void set(StructSize size)) {
      dataSize.set(size.data);
      ptrCount.set(size.pointers);
    }
  };

  struct ListRef {
    WireValue<uint32_t> elementSizeAndCount;

239 240
    KJ_ALWAYS_INLINE(ElementSize elementSize() const) {
      return static_cast<ElementSize>(elementSizeAndCount.get() & 7);
241
    }
242
    KJ_ALWAYS_INLINE(ElementCountN<29> elementCount() const) {
243
      return (bounded(elementSizeAndCount.get()) >> G(3)) * ELEMENTS;
244
    }
245 246
    KJ_ALWAYS_INLINE(WordCountN<29> inlineCompositeWordCount() const) {
      return elementCount() * (ONE * WORDS / ELEMENTS);
247 248
    }

249
    KJ_ALWAYS_INLINE(void set(ElementSize es, ElementCountN<29> ec)) {
250
      elementSizeAndCount.set(unboundAs<uint32_t>((ec / ELEMENTS) << G(3)) |
251
                              static_cast<int>(es));
252 253
    }

254
    KJ_ALWAYS_INLINE(void setInlineComposite(WordCountN<29> wc)) {
255
      elementSizeAndCount.set(unboundAs<uint32_t>((wc / WORDS) << G(3)) |
256
                              static_cast<int>(ElementSize::INLINE_COMPOSITE));
257 258 259 260 261 262 263 264 265 266 267
    }
  };

  struct FarRef {
    WireValue<SegmentId> segmentId;

    KJ_ALWAYS_INLINE(void set(SegmentId si)) {
      segmentId.set(si);
    }
  };

268 269 270 271 272
  struct CapRef {
    WireValue<uint32_t> index;
    // Index into the message's capability table.
  };

273
  union {
274 275
    uint32_t upper32Bits;

276
    StructRef structRef;
277

278 279 280
    ListRef listRef;

    FarRef farRef;
281 282

    CapRef capRef;
283
  };
284

285
  KJ_ALWAYS_INLINE(bool isNull() const) {
286 287 288 289
    // If the upper 32 bits are zero, this is a pointer to an empty struct.  We consider that to be
    // our "null" value.
    return (offsetAndKind.get() == 0) & (upper32Bits == 0);
  }
290

291
};
292
static_assert(sizeof(WirePointer) == sizeof(word),
293
    "capnp::WirePointer is not exactly one word.  This will probably break everything.");
294
static_assert(unboundAs<size_t>(POINTERS * WORDS_PER_POINTER * BYTES_PER_WORD / BYTES) ==
295
              sizeof(WirePointer),
296
    "WORDS_PER_POINTER is wrong.");
297
static_assert(unboundAs<size_t>(POINTERS * BYTES_PER_POINTER / BYTES) == sizeof(WirePointer),
298
    "BYTES_PER_POINTER is wrong.");
299
static_assert(unboundAs<size_t>(POINTERS * BITS_PER_POINTER / BITS_PER_BYTE / BYTES) ==
300
              sizeof(WirePointer),
301
    "BITS_PER_POINTER is wrong.");
302

303 304 305
namespace {

static const union {
306
  AlignedData<unbound(POINTER_SIZE_IN_WORDS / WORDS)> word;
307 308 309 310 311
  WirePointer pointer;
} zero = {{{0}}};

}  // namespace

312 313
// =======================================================================================

314 315 316 317 318 319 320 321 322 323
namespace {

template <typename T>
struct SegmentAnd {
  SegmentBuilder* segment;
  T value;
};

}  // namespace

324
struct WireHelpers {
325 326 327
#if CAPNP_DEBUG_TYPES
  template <uint64_t maxN, typename T>
  static KJ_ALWAYS_INLINE(
328 329
      kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, word> roundBytesUpToWords(
          kj::Quantity<kj::Bounded<maxN, T>, byte> bytes)) {
330 331 332 333 334 335
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
  }

  template <uint64_t maxN, typename T>
  static KJ_ALWAYS_INLINE(
336 337
      kj::Quantity<kj::Bounded<(maxN + 7) / 8, T>, byte> roundBitsUpToBytes(
          kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
338 339 340 341 342
    return (bits + G(7) * BITS) / BITS_PER_BYTE;
  }

  template <uint64_t maxN, typename T>
  static KJ_ALWAYS_INLINE(
343 344
      kj::Quantity<kj::Bounded<(maxN + 63) / 64, T>, word> roundBitsUpToWords(
          kj::Quantity<kj::Bounded<maxN, T>, BitLabel> bits)) {
345 346 347 348
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    return (bits + G(63) * BITS) / BITS_PER_WORD;
  }
#else
349
  static KJ_ALWAYS_INLINE(WordCount roundBytesUpToWords(ByteCount bytes)) {
Kenton Varda's avatar
Kenton Varda committed
350
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
351
    return (bytes + G(7) * BYTES) / BYTES_PER_WORD;
352 353
  }

354
  static KJ_ALWAYS_INLINE(ByteCount roundBitsUpToBytes(BitCount bits)) {
355
    return (bits + G(7) * BITS) / BITS_PER_BYTE;
Kenton Varda's avatar
Kenton Varda committed
356 357
  }

358
  static KJ_ALWAYS_INLINE(WordCount64 roundBitsUpToWords(BitCount64 bits)) {
359
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
360
    return (bits + G(63) * BITS) / BITS_PER_WORD;
361 362
  }

363
  static KJ_ALWAYS_INLINE(ByteCount64 roundBitsUpToBytes(BitCount64 bits)) {
364 365 366 367 368
    return (bits + G(7) * BITS) / BITS_PER_BYTE;
  }
#endif

  static KJ_ALWAYS_INLINE(void zeroMemory(byte* ptr, ByteCount32 count)) {
369
    memset(ptr, 0, unbound(count / BYTES));
370 371 372
  }

  static KJ_ALWAYS_INLINE(void zeroMemory(word* ptr, WordCountN<29> count)) {
373
    memset(ptr, 0, unbound(count * BYTES_PER_WORD / BYTES));
374 375 376
  }

  static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr, WirePointerCountN<29> count)) {
377
    memset(ptr, 0, unbound(count * BYTES_PER_POINTER / BYTES));
378 379
  }

380 381 382 383 384
  static KJ_ALWAYS_INLINE(void zeroMemory(WirePointer* ptr)) {
    memset(ptr, 0, sizeof(*ptr));
  }

  template <typename T>
Kenton Varda's avatar
Kenton Varda committed
385
  static inline void zeroMemory(kj::ArrayPtr<T> array) {
386 387 388
    memset(array.begin(), 0, array.size() * sizeof(array[0]));
  }

389
  static KJ_ALWAYS_INLINE(void copyMemory(byte* to, const byte* from, ByteCount32 count)) {
390
    memcpy(to, from, unbound(count / BYTES));
391 392 393
  }

  static KJ_ALWAYS_INLINE(void copyMemory(word* to, const word* from, WordCountN<29> count)) {
394
    memcpy(to, from, unbound(count * BYTES_PER_WORD / BYTES));
395 396 397 398
  }

  static KJ_ALWAYS_INLINE(void copyMemory(WirePointer* to, const WirePointer* from,
                                          WirePointerCountN<29> count)) {
399
    memcpy(to, from, unbound(count * BYTES_PER_POINTER  / BYTES));
400 401
  }

402
  template <typename T>
Kenton Varda's avatar
Kenton Varda committed
403
  static inline void copyMemory(T* to, const T* from) {
404 405 406 407 408
    memcpy(to, from, sizeof(*from));
  }

  // TODO(cleanup): Turn these into a .copyTo() method of ArrayPtr?
  template <typename T>
Kenton Varda's avatar
Kenton Varda committed
409
  static inline void copyMemory(T* to, kj::ArrayPtr<T> from) {
410 411 412
    memcpy(to, from.begin(), from.size() * sizeof(from[0]));
  }
  template <typename T>
Kenton Varda's avatar
Kenton Varda committed
413
  static inline void copyMemory(T* to, kj::ArrayPtr<const T> from) {
414 415 416 417 418 419
    memcpy(to, from.begin(), from.size() * sizeof(from[0]));
  }
  static KJ_ALWAYS_INLINE(void copyMemory(char* to, kj::StringPtr from)) {
    memcpy(to, from.begin(), from.size() * sizeof(from[0]));
  }

420
  static KJ_ALWAYS_INLINE(bool boundsCheck(
421
      SegmentReader* segment, const word* start, WordCountN<31> size)) {
422
    // If segment is null, this is an unchecked message, so we don't do bounds checks.
423
    return segment == nullptr || segment->checkObject(start, size);
424 425
  }

426 427 428 429 430
  static KJ_ALWAYS_INLINE(bool amplifiedRead(SegmentReader* segment, WordCount virtualAmount)) {
    // If segment is null, this is an unchecked message, so we don't do read limiter checks.
    return segment == nullptr || segment->amplifiedRead(virtualAmount);
  }

431
  static KJ_ALWAYS_INLINE(word* allocate(
432 433
      WirePointer*& ref, SegmentBuilder*& segment, CapTableBuilder* capTable,
      SegmentWordCount amount, WirePointer::Kind kind, BuilderArena* orphanArena)) {
David Renshaw's avatar
David Renshaw committed
434
    // Allocate space in the message for a new object, creating far pointers if necessary.
435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
    //
    // * `ref` starts out being a reference to the pointer which shall be assigned to point at the
    //   new object.  On return, `ref` points to a pointer which needs to be initialized with
    //   the object's type information.  Normally this is the same pointer, but it can change if
    //   a far pointer was allocated -- in this case, `ref` will end up pointing to the far
    //   pointer's tag.  Either way, `allocate()` takes care of making sure that the original
    //   pointer ends up leading to the new object.  On return, only the upper 32 bit of `*ref`
    //   need to be filled in by the caller.
    // * `segment` starts out pointing to the segment containing `ref`.  On return, it points to
    //   the segment containing the allocated object, which is usually the same segment but could
    //   be a different one if the original segment was out of space.
    // * `amount` is the number of words to allocate.
    // * `kind` is the kind of object to allocate.  It is used to initialize the pointer.  It
    //   cannot be `FAR` -- far pointers are allocated automatically as needed.
    // * `orphanArena` is usually null.  If it is non-null, then we're allocating an orphan object.
    //   In this case, `segment` starts out null; the allocation takes place in an arbitrary
    //   segment belonging to the arena.  `ref` will be initialized as a non-far pointer, but its
    //   target offset will be set to zero.

454
    if (orphanArena == nullptr) {
455
      if (!ref->isNull()) zeroObject(segment, capTable, ref);
456

457
      if (amount == ZERO * WORDS && kind == WirePointer::STRUCT) {
458 459 460 461 462 463
        // Note that the check for kind == WirePointer::STRUCT will hopefully cause this whole
        // branch to be optimized away from all the call sites that are allocating non-structs.
        ref->setKindAndTargetForEmptyStruct();
        return reinterpret_cast<word*>(ref);
      }

464 465 466
      word* ptr = segment->allocate(amount);

      if (ptr == nullptr) {
Matthew Maurer's avatar
Matthew Maurer committed
467

468 469 470 471
        // Need to allocate in a new segment.  We'll need to allocate an extra pointer worth of
        // space to act as the landing pad for a far pointer.

        WordCount amountPlusRef = amount + POINTER_SIZE_IN_WORDS;
472 473 474 475
        auto allocation = segment->getArena()->allocate(
            assertMaxBits<SEGMENT_WORD_COUNT_BITS>(amountPlusRef, []() {
              KJ_FAIL_REQUIRE("requested object size exceeds maximum segment size");
            }));
476 477 478 479 480 481 482 483 484
        segment = allocation.segment;
        ptr = allocation.words;

        // Set up the original pointer to be a far pointer to the new segment.
        ref->setFar(false, segment->getOffsetTo(ptr));
        ref->farRef.set(segment->getSegmentId());

        // Initialize the landing pad to indicate that the data immediately follows the pad.
        ref = reinterpret_cast<WirePointer*>(ptr);
485
        ref->setKindAndTarget(kind, ptr + POINTER_SIZE_IN_WORDS, segment);
486 487 488 489

        // Allocated space follows new pointer.
        return ptr + POINTER_SIZE_IN_WORDS;
      } else {
490
        ref->setKindAndTarget(kind, ptr, segment);
491 492
        return ptr;
      }
493
    } else {
494
      // orphanArena is non-null.  Allocate an orphan.
495
      KJ_DASSERT(ref->isNull());
496 497
      auto allocation = orphanArena->allocate(amount);
      segment = allocation.segment;
498
      ref->setKindForOrphan(kind);
499
      return allocation.words;
500 501 502
    }
  }

503
  static KJ_ALWAYS_INLINE(word* followFarsNoWritableCheck(
504 505 506 507 508 509 510 511 512 513
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    // If `ref` is a far pointer, follow it.  On return, `ref` will have been updated to point at
    // a WirePointer that contains the type information about the target object, and a pointer to
    // the object contents is returned.  The caller must NOT use `ref->target()` as this may or may
    // not actually return a valid pointer.  `segment` is also updated to point at the segment which
    // actually contains the object.
    //
    // If `ref` is not a far pointer, this simply returns `refTarget`.  Usually, `refTarget` should
    // be the same as `ref->target()`, but may not be in cases where `ref` is only a tag.

514
    if (ref->kind() == WirePointer::FAR) {
515
      segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
516
      WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
517 518 519
      if (!ref->isDoubleFar()) {
        ref = pad;
        return pad->target();
520
      }
521 522 523 524 525 526

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

      segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
527
      return pad->farTarget(segment);
528
    } else {
529
      return refTarget;
530 531 532
    }
  }

533 534 535 536 537 538 539
  static KJ_ALWAYS_INLINE(word* followFars(
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    auto result = followFarsNoWritableCheck(ref, refTarget, segment);
    segment->checkWritable();
    return result;
  }

540 541 542
  static KJ_ALWAYS_INLINE(kj::Maybe<const word&> followFars(
      const WirePointer*& ref, const word* refTarget, SegmentReader*& segment))
      KJ_WARN_UNUSED_RESULT {
543
    // Like the other followFars() but operates on readers.
544

545
    // If the segment is null, this is an unchecked message, so there are no FAR pointers.
546
    if (segment != nullptr && ref->kind() == WirePointer::FAR) {
547
      // Look up the segment containing the landing pad.
548
      segment = segment->getArena()->tryGetSegment(ref->farRef.segmentId.get());
549
      KJ_REQUIRE(segment != nullptr, "Message contains far pointer to unknown segment.") {
550
        return nullptr;
Kenton Varda's avatar
Kenton Varda committed
551
      }
552

553
      // Find the landing pad and check that it is within bounds.
554 555 556
      const word* ptr = ref->farTarget(segment);
      auto padWords = (ONE + bounded(ref->isDoubleFar())) * POINTER_SIZE_IN_WORDS;
      KJ_REQUIRE(boundsCheck(segment, ptr, padWords),
557
                 "Message contains out-of-bounds far pointer.") {
558
        return nullptr;
559 560
      }

561
      const WirePointer* pad = reinterpret_cast<const WirePointer*>(ptr);
562 563 564 565

      // If this is not a double-far then the landing pad is our final pointer.
      if (!ref->isDoubleFar()) {
        ref = pad;
566
        return pad->target(segment);
567 568 569 570 571 572
      }

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

573 574 575 576 577 578 579
      SegmentReader* newSegment = segment->getArena()->tryGetSegment(pad->farRef.segmentId.get());
      KJ_REQUIRE(newSegment != nullptr,
          "Message contains double-far pointer to unknown segment.") {
        return nullptr;
      }
      KJ_REQUIRE(pad->kind() == WirePointer::FAR,
          "Second word of double-far pad must be far pointer.") {
580
        return nullptr;
581
      }
582

583
      segment = newSegment;
584
      return pad->farTarget(segment);
585
    } else {
586
      KJ_DASSERT(refTarget != nullptr);
587
      return refTarget;
588 589 590
    }
  }

591 592
  // -----------------------------------------------------------------

593
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref) {
594 595 596
    // Zero out the pointed-to object.  Use when the pointer is about to be overwritten making the
    // target object no longer reachable.

597 598 599
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

600 601 602
    switch (ref->kind()) {
      case WirePointer::STRUCT:
      case WirePointer::LIST:
603
        zeroObject(segment, capTable, ref, ref->target());
604 605 606
        break;
      case WirePointer::FAR: {
        segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
607
        if (segment->isWritable()) {  // Don't zero external data.
608
          WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(segment));
609 610 611 612

          if (ref->isDoubleFar()) {
            segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
            if (segment->isWritable()) {
613
              zeroObject(segment, capTable, pad + 1, pad->farTarget(segment));
614
            }
615
            zeroMemory(pad, G(2) * POINTERS);
616
          } else {
617
            zeroObject(segment, capTable, pad);
618
            zeroMemory(pad);
619
          }
620 621 622
        }
        break;
      }
623 624
      case WirePointer::OTHER:
        if (ref->isCapability()) {
625 626 627
#if CAPNP_LITE
          KJ_FAIL_ASSERT("Capability encountered in builder in lite mode?") { break; }
#else  // CAPNP_LINE
628
          capTable->dropCap(ref->capRef.index.get());
629
#endif  // CAPNP_LITE, else
630 631 632 633
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
        break;
634 635 636
    }
  }

637 638
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable,
                         WirePointer* tag, word* ptr) {
639 640 641
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

642 643 644 645
    switch (tag->kind()) {
      case WirePointer::STRUCT: {
        WirePointer* pointerSection =
            reinterpret_cast<WirePointer*>(ptr + tag->structRef.dataSize.get());
646
        for (auto i: kj::zeroTo(tag->structRef.ptrCount.get())) {
647
          zeroObject(segment, capTable, pointerSection + i);
648
        }
649
        zeroMemory(ptr, tag->structRef.wordSize());
650 651 652 653
        break;
      }
      case WirePointer::LIST: {
        switch (tag->listRef.elementSize()) {
654
          case ElementSize::VOID:
655 656
            // Nothing.
            break;
657 658 659 660
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
661 662
          case ElementSize::EIGHT_BYTES: {
            zeroMemory(ptr, roundBitsUpToWords(
663
                upgradeBound<uint64_t>(tag->listRef.elementCount()) *
664
                dataBitsPerElement(tag->listRef.elementSize())));
665
            break;
666
          }
667
          case ElementSize::POINTER: {
668 669 670 671
            WirePointer* typedPtr = reinterpret_cast<WirePointer*>(ptr);
            auto count = tag->listRef.elementCount() * (ONE * POINTERS / ELEMENTS);
            for (auto i: kj::zeroTo(count)) {
              zeroObject(segment, capTable, typedPtr + i);
672
            }
673
            zeroMemory(typedPtr, count);
674 675
            break;
          }
676
          case ElementSize::INLINE_COMPOSITE: {
677 678
            WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);

679
            KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
680 681 682 683
                  "Don't know how to handle non-STRUCT inline composite.");
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

684
            auto count = elementTag->inlineCompositeListElementCount();
685
            if (pointerCount > ZERO * POINTERS) {
686
              word* pos = ptr + POINTER_SIZE_IN_WORDS;
687
              for (auto i KJ_UNUSED: kj::zeroTo(count)) {
688 689
                pos += dataSize;

690
                for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
691
                  zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(pos));
692 693
                  pos += POINTER_SIZE_IN_WORDS;
                }
694 695 696
              }
            }

697 698
            auto wordsPerElement = elementTag->structRef.wordSize() / ELEMENTS;
            zeroMemory(ptr, assertMaxBits<SEGMENT_WORD_COUNT_BITS>(POINTER_SIZE_IN_WORDS +
699
                upgradeBound<uint64_t>(count) * wordsPerElement, []() {
700 701 702
                  KJ_FAIL_ASSERT("encountered list pointer in builder which is too large to "
                      "possibly fit in a segment. Bug in builder code?");
                }));
703 704 705 706 707 708
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
709 710 711
        KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
          break;
        }
712
        break;
713 714 715 716 717
      case WirePointer::OTHER:
        KJ_FAIL_ASSERT("Unexpected OTHER pointer.") {
          break;
        }
        break;
718 719 720
    }
  }

721
  static KJ_ALWAYS_INLINE(
722 723 724 725 726
      void zeroPointerAndFars(SegmentBuilder* segment, WirePointer* ref)) {
    // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, but
    // do not zero the object body.  Used when upgrading.

    if (ref->kind() == WirePointer::FAR) {
727 728
      SegmentBuilder* padSegment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
      if (padSegment->isWritable()) {  // Don't zero external data.
729
        WirePointer* pad = reinterpret_cast<WirePointer*>(ref->farTarget(padSegment));
730 731 732 733 734
        if (ref->isDoubleFar()) {
          zeroMemory(pad, G(2) * POINTERS);
        } else {
          zeroMemory(pad);
        }
735
      }
736
    }
737 738

    zeroMemory(ref);
739 740
  }

741 742 743

  // -----------------------------------------------------------------

744 745
  static MessageSizeCounts totalSize(
      SegmentReader* segment, const WirePointer* ref, int nestingLimit) {
746 747
    // Compute the total size of the object pointed to, not counting far pointer overhead.

748
    MessageSizeCounts result = { ZERO * WORDS, 0 };
749

750
    if (ref->isNull()) {
751
      return result;
752 753
    }

754
    KJ_REQUIRE(nestingLimit > 0, "Message is too deeply-nested.") {
755
      return result;
756 757 758
    }
    --nestingLimit;

759 760 761 762 763 764
    const word* ptr;
    KJ_IF_MAYBE(p, followFars(ref, ref->target(segment), segment)) {
      ptr = p;
    } else {
      return result;
    }
765 766

    switch (ref->kind()) {
767
      case WirePointer::STRUCT: {
768
        KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
769 770
                   "Message contained out-of-bounds struct pointer.") {
          return result;
771
        }
772
        result.addWords(ref->structRef.wordSize());
773 774 775

        const WirePointer* pointerSection =
            reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get());
776
        for (auto i: kj::zeroTo(ref->structRef.ptrCount.get())) {
777 778 779 780 781 782
          result += totalSize(segment, pointerSection + i, nestingLimit);
        }
        break;
      }
      case WirePointer::LIST: {
        switch (ref->listRef.elementSize()) {
783
          case ElementSize::VOID:
784 785
            // Nothing.
            break;
786 787 788 789 790
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
791
            auto totalWords = roundBitsUpToWords(
792
                upgradeBound<uint64_t>(ref->listRef.elementCount()) *
793
                dataBitsPerElement(ref->listRef.elementSize()));
794
            KJ_REQUIRE(boundsCheck(segment, ptr, totalWords),
795 796
                       "Message contained out-of-bounds list pointer.") {
              return result;
797
            }
798
            result.addWords(totalWords);
799 800
            break;
          }
801
          case ElementSize::POINTER: {
802
            auto count = ref->listRef.elementCount() * (POINTERS / ELEMENTS);
803

804
            KJ_REQUIRE(boundsCheck(segment, ptr, count * WORDS_PER_POINTER),
805 806
                       "Message contained out-of-bounds list pointer.") {
              return result;
807 808
            }

809
            result.addWords(count * WORDS_PER_POINTER);
810

811
            for (auto i: kj::zeroTo(count)) {
812 813 814 815 816
              result += totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
                                  nestingLimit);
            }
            break;
          }
817
          case ElementSize::INLINE_COMPOSITE: {
818
            auto wordCount = ref->listRef.inlineCompositeWordCount();
819
            KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
820 821
                       "Message contained out-of-bounds list pointer.") {
              return result;
822 823 824
            }

            const WirePointer* elementTag = reinterpret_cast<const WirePointer*>(ptr);
825
            auto count = elementTag->inlineCompositeListElementCount();
826

827 828 829
            KJ_REQUIRE(elementTag->kind() == WirePointer::STRUCT,
                       "Don't know how to handle non-STRUCT inline composite.") {
              return result;
830
            }
831

832
            auto actualSize = elementTag->structRef.wordSize() / ELEMENTS *
833
                              upgradeBound<uint64_t>(count);
834
            KJ_REQUIRE(actualSize <= wordCount,
835 836
                       "Struct list pointer's elements overran size.") {
              return result;
837 838
            }

839 840
            // We count the actual size rather than the claimed word count because that's what
            // we'll end up with if we make a copy.
841
            result.addWords(wordCount + POINTER_SIZE_IN_WORDS);
842

843 844 845
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

846
            if (pointerCount > ZERO * POINTERS) {
847
              const word* pos = ptr + POINTER_SIZE_IN_WORDS;
848
              for (auto i KJ_UNUSED: kj::zeroTo(count)) {
849
                pos += dataSize;
850

851
                for (auto j KJ_UNUSED: kj::zeroTo(pointerCount)) {
852 853 854 855
                  result += totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
                                      nestingLimit);
                  pos += POINTER_SIZE_IN_WORDS;
                }
856 857 858 859 860 861 862 863
              }
            }
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
864
        KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
865 866 867
          break;
        }
        break;
868
      case WirePointer::OTHER:
869 870 871 872 873
        if (ref->isCapability()) {
          result.capCount++;
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
874
        break;
875 876 877 878 879
    }

    return result;
  }

880
  // -----------------------------------------------------------------
881
  // Copy from an unchecked message.
882

883
  static KJ_ALWAYS_INLINE(
884 885
      void copyStruct(SegmentBuilder* segment, CapTableBuilder* capTable,
                      word* dst, const word* src,
886 887
                      StructDataWordCount dataSize, StructPointerCount pointerCount)) {
    copyMemory(dst, src, dataSize);
888

889 890
    const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src + dataSize);
    WirePointer* dstRefs = reinterpret_cast<WirePointer*>(dst + dataSize);
891

892
    for (auto i: kj::zeroTo(pointerCount)) {
893
      SegmentBuilder* subSegment = segment;
894
      WirePointer* dstRef = dstRefs + i;
895
      copyMessage(subSegment, capTable, dstRef, srcRefs + i);
896 897 898
    }
  }

899
  static word* copyMessage(
900 901
      SegmentBuilder*& segment, CapTableBuilder* capTable,
      WirePointer*& dst, const WirePointer* src) {
902 903
    // Not always-inline because it's recursive.

904
    switch (src->kind()) {
905
      case WirePointer::STRUCT: {
906
        if (src->isNull()) {
907
          zeroMemory(dst);
908
          return nullptr;
909
        } else {
910
          const word* srcPtr = src->target(nullptr);
911
          word* dstPtr = allocate(
912
              dst, segment, capTable, src->structRef.wordSize(), WirePointer::STRUCT, nullptr);
913

914
          copyStruct(segment, capTable, dstPtr, srcPtr, src->structRef.dataSize.get(),
915
                     src->structRef.ptrCount.get());
916

917
          dst->structRef.set(src->structRef.dataSize.get(), src->structRef.ptrCount.get());
918
          return dstPtr;
919 920
        }
      }
921
      case WirePointer::LIST: {
922
        switch (src->listRef.elementSize()) {
923 924 925 926 927 928
          case ElementSize::VOID:
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
929
            auto wordCount = roundBitsUpToWords(
930
                upgradeBound<uint64_t>(src->listRef.elementCount()) *
931
                dataBitsPerElement(src->listRef.elementSize()));
932
            const word* srcPtr = src->target(nullptr);
933
            word* dstPtr = allocate(dst, segment, capTable, wordCount, WirePointer::LIST, nullptr);
934
            copyMemory(dstPtr, srcPtr, wordCount);
935

936 937
            dst->listRef.set(src->listRef.elementSize(), src->listRef.elementCount());
            return dstPtr;
938 939
          }

940
          case ElementSize::POINTER: {
941
            const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src->target(nullptr));
942
            WirePointer* dstRefs = reinterpret_cast<WirePointer*>(
943
                allocate(dst, segment, capTable, src->listRef.elementCount() *
944
                    (ONE * POINTERS / ELEMENTS) * WORDS_PER_POINTER,
945
                    WirePointer::LIST, nullptr));
946

947
            for (auto i: kj::zeroTo(src->listRef.elementCount() * (ONE * POINTERS / ELEMENTS))) {
948
              SegmentBuilder* subSegment = segment;
949
              WirePointer* dstRef = dstRefs + i;
950
              copyMessage(subSegment, capTable, dstRef, srcRefs + i);
951 952
            }

953
            dst->listRef.set(ElementSize::POINTER, src->listRef.elementCount());
954
            return reinterpret_cast<word*>(dstRefs);
955 956
          }

957
          case ElementSize::INLINE_COMPOSITE: {
958
            const word* srcPtr = src->target(nullptr);
959
            word* dstPtr = allocate(dst, segment, capTable,
960 961 962
                assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
                    src->listRef.inlineCompositeWordCount() + POINTER_SIZE_IN_WORDS,
                    []() { KJ_FAIL_ASSERT("list too big to fit in a segment"); }),
963
                WirePointer::LIST, nullptr);
964

965
            dst->listRef.setInlineComposite(src->listRef.inlineCompositeWordCount());
966

967
            const WirePointer* srcTag = reinterpret_cast<const WirePointer*>(srcPtr);
968
            copyMemory(reinterpret_cast<WirePointer*>(dstPtr), srcTag);
969

970 971
            const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
            word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
972

973
            KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
974 975
                "INLINE_COMPOSITE of lists is not yet supported.");

976
            for (auto i KJ_UNUSED: kj::zeroTo(srcTag->inlineCompositeListElementCount())) {
977
              copyStruct(segment, capTable, dstElement, srcElement,
978
                  srcTag->structRef.dataSize.get(), srcTag->structRef.ptrCount.get());
979 980
              srcElement += srcTag->structRef.wordSize();
              dstElement += srcTag->structRef.wordSize();
981
            }
982
            return dstPtr;
983 984 985 986
          }
        }
        break;
      }
987 988
      case WirePointer::OTHER:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain OTHER pointers (e.g. capabilities).");
989 990 991
        break;
      case WirePointer::FAR:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain far pointers.");
992 993 994
        break;
    }

995
    return nullptr;
996 997
  }

998 999
  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, WirePointer* src) {
1000 1001
    // Make *dst point to the same object as *src.  Both must reside in the same message, but can
    // be in different segments.  Not always-inline because this is rarely used.
1002 1003 1004 1005 1006
    //
    // Caller MUST zero out the source pointer after calling this, to make sure no later code
    // mistakenly thinks the source location still owns the object.  transferPointer() doesn't do
    // this zeroing itself because many callers transfer several pointers in a loop then zero out
    // the whole section.
1007 1008 1009 1010

    KJ_DASSERT(dst->isNull());
    // We expect the caller to ensure the target is already null so won't leak.

1011
    if (src->isNull()) {
1012
      zeroMemory(dst);
1013
    } else if (src->isPositional()) {
1014
      transferPointer(dstSegment, dst, srcSegment, src, src->target());
1015 1016
    } else {
      // Far and other pointers are position-independent, so we can just copy.
1017
      copyMemory(dst, src);
1018 1019 1020 1021 1022 1023 1024 1025 1026 1027
    }
  }

  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, const WirePointer* srcTag,
                              word* srcPtr) {
    // Like the other overload, but splits src into a tag and a target.  Particularly useful for
    // OrphanBuilder.

    if (dstSegment == srcSegment) {
1028
      // Same segment, so create a direct pointer.
1029

1030
      if (srcTag->kind() == WirePointer::STRUCT && srcTag->structRef.wordSize() == ZERO * WORDS) {
1031 1032 1033 1034
        dst->setKindAndTargetForEmptyStruct();
      } else {
        dst->setKindAndTarget(srcTag->kind(), srcPtr, dstSegment);
      }
1035 1036

      // We can just copy the upper 32 bits.  (Use memcpy() to comply with aliasing rules.)
1037
      copyMemory(&dst->upper32Bits, &srcTag->upper32Bits);
1038 1039 1040 1041
    } else {
      // Need to create a far pointer.  Try to allocate it in the same segment as the source, so
      // that it doesn't need to be a double-far.

1042
      WirePointer* landingPad =
1043
          reinterpret_cast<WirePointer*>(srcSegment->allocate(G(1) * WORDS));
1044 1045
      if (landingPad == nullptr) {
        // Darn, need a double-far.
1046
        auto allocation = srcSegment->getArena()->allocate(G(2) * WORDS);
1047 1048
        SegmentBuilder* farSegment = allocation.segment;
        landingPad = reinterpret_cast<WirePointer*>(allocation.words);
1049

1050
        landingPad[0].setFar(false, srcSegment->getOffsetTo(srcPtr));
1051 1052
        landingPad[0].farRef.segmentId.set(srcSegment->getSegmentId());

1053
        landingPad[1].setKindWithZeroOffset(srcTag->kind());
1054
        copyMemory(&landingPad[1].upper32Bits, &srcTag->upper32Bits);
1055 1056 1057 1058 1059

        dst->setFar(true, farSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(farSegment->getSegmentId());
      } else {
        // Simple landing pad is just a pointer.
1060
        landingPad->setKindAndTarget(srcTag->kind(), srcPtr, srcSegment);
1061
        copyMemory(&landingPad->upper32Bits, &srcTag->upper32Bits);
1062 1063 1064 1065 1066 1067 1068

        dst->setFar(false, srcSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(srcSegment->getSegmentId());
      }
    }
  }

1069 1070
  // -----------------------------------------------------------------

1071
  static KJ_ALWAYS_INLINE(StructBuilder initStructPointer(
1072
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
1073
      BuilderArena* orphanArena = nullptr)) {
1074
    // Allocate space for the new struct.  Newly-allocated space is automatically zeroed.
1075
    word* ptr = allocate(ref, segment, capTable, size.total(), WirePointer::STRUCT, orphanArena);
1076

1077
    // Initialize the pointer.
1078
    ref->structRef.set(size);
1079 1080

    // Build the StructBuilder.
1081
    return StructBuilder(segment, capTable, ptr, reinterpret_cast<WirePointer*>(ptr + size.data),
1082
                         size.data * BITS_PER_WORD, size.pointers);
1083
  }
1084

1085
  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
1086 1087 1088
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
      const word* defaultValue)) {
    return getWritableStructPointer(ref, ref->target(), segment, capTable, size, defaultValue);
1089 1090 1091
  }

  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
1092 1093
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
      StructSize size, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1094
    if (ref->isNull()) {
1095
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1096
      if (defaultValue == nullptr ||
1097
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1098
        return initStructPointer(ref, segment, capTable, size, orphanArena);
1099
      }
1100 1101
      refTarget = copyMessage(segment, capTable, ref,
          reinterpret_cast<const WirePointer*>(defaultValue));
1102 1103
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1104

1105 1106
    WirePointer* oldRef = ref;
    SegmentBuilder* oldSegment = segment;
1107
    word* oldPtr = followFars(oldRef, refTarget, oldSegment);
1108

1109
    KJ_REQUIRE(oldRef->kind() == WirePointer::STRUCT,
1110 1111 1112
        "Message contains non-struct pointer where struct pointer was expected.") {
      goto useDefault;
    }
1113

1114 1115
    auto oldDataSize = oldRef->structRef.dataSize.get();
    auto oldPointerCount = oldRef->structRef.ptrCount.get();
1116 1117
    WirePointer* oldPointerSection =
        reinterpret_cast<WirePointer*>(oldPtr + oldDataSize);
1118

1119 1120 1121 1122
    if (oldDataSize < size.data || oldPointerCount < size.pointers) {
      // The space allocated for this struct is too small.  Unlike with readers, we can't just
      // run with it and do bounds checks at access time, because how would we handle writes?
      // Instead, we have to copy the struct to a new space now.
1123

1124 1125 1126
      auto newDataSize = kj::max(oldDataSize, size.data);
      auto newPointerCount = kj::max(oldPointerCount, size.pointers);
      auto totalSize = newDataSize + newPointerCount * WORDS_PER_POINTER;
1127

1128 1129
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(segment, ref);
1130

1131
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1132
      ref->structRef.set(newDataSize, newPointerCount);
1133

1134
      // Copy data section.
1135
      copyMemory(ptr, oldPtr, oldDataSize);
1136

1137 1138
      // Copy pointer section.
      WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(ptr + newDataSize);
1139
      for (auto i: kj::zeroTo(oldPointerCount)) {
1140
        transferPointer(segment, newPointerSection + i, oldSegment, oldPointerSection + i);
1141
      }
1142 1143 1144 1145 1146 1147

      // Zero out old location.  This has two purposes:
      // 1) We don't want to leak the original contents of the struct when the message is written
      //    out as it may contain secrets that the caller intends to remove from the new copy.
      // 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
      //    hits the wire.
1148
      zeroMemory(oldPtr, oldDataSize + oldPointerCount * WORDS_PER_POINTER);
1149

1150
      return StructBuilder(segment, capTable, ptr, newPointerSection, newDataSize * BITS_PER_WORD,
1151
                           newPointerCount);
1152
    } else {
1153 1154
      return StructBuilder(oldSegment, capTable, oldPtr, oldPointerSection,
                           oldDataSize * BITS_PER_WORD, oldPointerCount);
1155
    }
1156 1157
  }

1158
  static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
1159 1160
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, ElementSize elementSize, BuilderArena* orphanArena = nullptr)) {
1161
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1162
        "Should have called initStructListPointer() instead.");
1163

1164 1165 1166 1167 1168 1169 1170
    auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
        []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });

    auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
    auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
    auto step = bitsPerElementIncludingPointers(elementSize);
    KJ_DASSERT(step * ELEMENTS == (dataSize + pointerCount * BITS_PER_POINTER));
1171

1172
    // Calculate size of the list.
1173
    auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(checkedElementCount) * step);
1174

1175
    // Allocate the list.
1176
    word* ptr = allocate(ref, segment, capTable, wordCount, WirePointer::LIST, orphanArena);
1177

1178
    // Initialize the pointer.
1179
    ref->listRef.set(elementSize, checkedElementCount);
1180

1181
    // Build the ListBuilder.
1182 1183
    return ListBuilder(segment, capTable, ptr, step, checkedElementCount,
                       dataSize, pointerCount, elementSize);
1184
  }
1185

1186
  static KJ_ALWAYS_INLINE(ListBuilder initStructListPointer(
1187 1188
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, StructSize elementSize, BuilderArena* orphanArena = nullptr)) {
1189 1190 1191 1192
    auto checkedElementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount,
        []() { KJ_FAIL_REQUIRE("tried to allocate list with too many elements"); });

    WordsPerElementN<17> wordsPerElement = elementSize.total() / ELEMENTS;
1193

1194
    // Allocate the list, prefixed by a single WirePointer.
1195
    auto wordCount = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1196
        upgradeBound<uint64_t>(checkedElementCount) * wordsPerElement,
1197
        []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
1198 1199
    word* ptr = allocate(ref, segment, capTable, POINTER_SIZE_IN_WORDS + wordCount,
                         WirePointer::LIST, orphanArena);
1200

1201
    // Initialize the pointer.
1202
    // INLINE_COMPOSITE lists replace the element count with the word count.
1203
    ref->listRef.setInlineComposite(wordCount);
1204

1205
    // Initialize the list tag.
1206
    reinterpret_cast<WirePointer*>(ptr)->setKindAndInlineCompositeListElementCount(
1207
        WirePointer::STRUCT, checkedElementCount);
1208 1209
    reinterpret_cast<WirePointer*>(ptr)->structRef.set(elementSize);
    ptr += POINTER_SIZE_IN_WORDS;
1210

1211
    // Build the ListBuilder.
1212
    return ListBuilder(segment, capTable, ptr, wordsPerElement * BITS_PER_WORD, checkedElementCount,
1213
                       elementSize.data * BITS_PER_WORD, elementSize.pointers,
1214
                       ElementSize::INLINE_COMPOSITE);
1215 1216
  }

1217
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1218 1219 1220
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      ElementSize elementSize, const word* defaultValue)) {
    return getWritableListPointer(origRef, origRef->target(), origSegment, capTable, elementSize,
1221 1222 1223 1224
                                  defaultValue);
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1225 1226
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable, ElementSize elementSize,
1227
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1228
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1229
             "Use getWritableStructListPointer() for struct lists.");
1230

1231 1232
    if (origRef->isNull()) {
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1233
      if (defaultValue == nullptr ||
1234
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1235
        return ListBuilder(elementSize);
1236
      }
1237
      origRefTarget = copyMessage(
1238
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1239 1240
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1241

1242
    // We must verify that the pointer has the right size.  Unlike in
David Renshaw's avatar
David Renshaw committed
1243
    // getWritableStructListPointer(), we never need to "upgrade" the data, because this
1244 1245
    // method is called only for non-struct lists, and there is no allowed upgrade path *to*
    // a non-struct list, only *from* them.
1246

1247 1248
    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
1249
    word* ptr = followFars(ref, origRefTarget, segment);
1250

1251
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1252
        "Called getWritableListPointer() but existing pointer is not a list.") {
1253 1254
      goto useDefault;
    }
1255

1256
    ElementSize oldSize = ref->listRef.elementSize();
1257

1258
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1259 1260 1261 1262
      // The existing element size is INLINE_COMPOSITE, though we expected a list of primitives.
      // The existing data must have been written with a newer version of the protocol.  We
      // therefore never need to upgrade the data in this case, but we do need to validate that it
      // is a valid upgrade from what we expected.
1263

1264 1265
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1266
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1267 1268
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;
1269

1270 1271
      auto dataSize = tag->structRef.dataSize.get();
      auto pointerCount = tag->structRef.ptrCount.get();
1272

1273
      switch (elementSize) {
1274
        case ElementSize::VOID:
1275 1276
          // Anything is a valid upgrade from Void.
          break;
1277

1278
        case ElementSize::BIT:
1279 1280 1281 1282 1283 1284 1285
          KJ_FAIL_REQUIRE(
              "Found struct list where bit list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
          break;

1286 1287 1288 1289
        case ElementSize::BYTE:
        case ElementSize::TWO_BYTES:
        case ElementSize::FOUR_BYTES:
        case ElementSize::EIGHT_BYTES:
1290
          KJ_REQUIRE(dataSize >= ONE * WORDS,
1291 1292 1293
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1294
          break;
1295

1296
        case ElementSize::POINTER:
1297
          KJ_REQUIRE(pointerCount >= ONE * POINTERS,
1298 1299 1300
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1301 1302 1303
          // Adjust the pointer to point at the reference segment.
          ptr += dataSize;
          break;
1304

1305
        case ElementSize::INLINE_COMPOSITE:
1306
          KJ_UNREACHABLE;
1307
      }
1308

1309
      // OK, looks valid.
1310

1311
      return ListBuilder(segment, capTable, ptr,
1312 1313
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
1314
                         dataSize * BITS_PER_WORD, pointerCount, ElementSize::INLINE_COMPOSITE);
1315
    } else {
1316 1317
      auto dataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      auto pointerCount = pointersPerElement(oldSize) * ELEMENTS;
1318

1319 1320
      if (elementSize == ElementSize::BIT) {
        KJ_REQUIRE(oldSize == ElementSize::BIT,
1321 1322 1323 1324
            "Found non-bit list where bit list was expected.") {
          goto useDefault;
        }
      } else {
1325
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336
            "Found bit list where non-bit list was expected.") {
          goto useDefault;
        }
        KJ_REQUIRE(dataSize >= dataBitsPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
        KJ_REQUIRE(pointerCount >= pointersPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
1337
      }
1338

1339
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1340
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1341
                         dataSize, pointerCount, oldSize);
1342
    }
1343 1344
  }

1345
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1346 1347 1348 1349
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      const word* defaultValue)) {
    return getWritableListPointerAnySize(origRef, origRef->target(), origSegment,
                                         capTable, defaultValue);
1350 1351 1352
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1353 1354
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1355 1356 1357 1358 1359
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1360
        return ListBuilder(ElementSize::VOID);
1361 1362
      }
      origRefTarget = copyMessage(
1363
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1364 1365 1366 1367 1368 1369 1370 1371
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }

    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
    word* ptr = followFars(ref, origRefTarget, segment);

    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1372
        "Called getWritableListPointerAnySize() but existing pointer is not a list.") {
1373 1374 1375
      goto useDefault;
    }

1376
    ElementSize elementSize = ref->listRef.elementSize();
1377

1378
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
1379 1380 1381 1382 1383 1384
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;

1385
      return ListBuilder(segment, capTable, ptr,
1386 1387 1388
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1389
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE);
1390
    } else {
1391 1392
      auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
      auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
1393 1394

      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1395
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1396 1397 1398 1399
                         dataSize, pointerCount, elementSize);
    }
  }

1400
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1401 1402 1403 1404
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      StructSize elementSize, const word* defaultValue)) {
    return getWritableStructListPointer(origRef, origRef->target(), origSegment, capTable,
                                        elementSize, defaultValue);
1405 1406
  }
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1407 1408
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1409
      StructSize elementSize, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1410 1411 1412 1413
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1414
        return ListBuilder(ElementSize::INLINE_COMPOSITE);
1415
      }
1416
      origRefTarget = copyMessage(
1417
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1418 1419
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1420

1421
    // We must verify that the pointer has the right size and potentially upgrade it if not.
1422

1423 1424
    WirePointer* oldRef = origRef;
    SegmentBuilder* oldSegment = origSegment;
1425
    word* oldPtr = followFars(oldRef, origRefTarget, oldSegment);
1426

1427 1428
    KJ_REQUIRE(oldRef->kind() == WirePointer::LIST,
               "Called getList{Field,Element}() but existing pointer is not a list.") {
1429 1430 1431
      goto useDefault;
    }

1432
    ElementSize oldSize = oldRef->listRef.elementSize();
1433

1434
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1435
      // Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
1436

1437 1438
      WirePointer* oldTag = reinterpret_cast<WirePointer*>(oldPtr);
      oldPtr += POINTER_SIZE_IN_WORDS;
1439 1440
      KJ_REQUIRE(oldTag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE list with non-STRUCT elements not supported.") {
1441 1442 1443
        goto useDefault;
      }

1444 1445
      auto oldDataSize = oldTag->structRef.dataSize.get();
      auto oldPointerCount = oldTag->structRef.ptrCount.get();
1446
      auto oldStep = (oldDataSize + oldPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1447 1448

      auto elementCount = oldTag->inlineCompositeListElementCount();
1449

1450 1451
      if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
        // Old size is at least as large as we need.  Ship it.
1452
        return ListBuilder(oldSegment, capTable, oldPtr, oldStep * BITS_PER_WORD, elementCount,
1453
                           oldDataSize * BITS_PER_WORD, oldPointerCount,
1454
                           ElementSize::INLINE_COMPOSITE);
1455
      }
1456

1457 1458
      // The structs in this list are smaller than expected, probably written using an older
      // version of the protocol.  We need to make a copy and expand them.
1459

1460 1461
      auto newDataSize = kj::max(oldDataSize, elementSize.data);
      auto newPointerCount = kj::max(oldPointerCount, elementSize.pointers);
1462
      auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1463 1464

      auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1465
            newStep * upgradeBound<uint64_t>(elementCount),
1466
            []() { KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size"); });
1467

1468 1469
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(origSegment, origRef);
1470

1471
      word* newPtr = allocate(origRef, origSegment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1472
                              WirePointer::LIST, orphanArena);
1473
      origRef->listRef.setInlineComposite(totalSize);
1474

1475 1476 1477 1478
      WirePointer* newTag = reinterpret_cast<WirePointer*>(newPtr);
      newTag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
      newTag->structRef.set(newDataSize, newPointerCount);
      newPtr += POINTER_SIZE_IN_WORDS;
1479

1480 1481
      word* src = oldPtr;
      word* dst = newPtr;
1482
      for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
1483
        // Copy data section.
1484
        copyMemory(dst, src, oldDataSize);
1485

1486 1487 1488
        // Copy pointer section.
        WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(dst + newDataSize);
        WirePointer* oldPointerSection = reinterpret_cast<WirePointer*>(src + oldDataSize);
1489
        for (auto j: kj::zeroTo(oldPointerCount)) {
1490
          transferPointer(origSegment, newPointerSection + j, oldSegment, oldPointerSection + j);
1491 1492
        }

1493 1494
        dst += newStep * (ONE * ELEMENTS);
        src += oldStep * (ONE * ELEMENTS);
1495
      }
1496

1497
      auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1498
            oldStep * upgradeBound<uint64_t>(elementCount),
1499 1500
            []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });

1501
      // Zero out old location.  See explanation in getWritableStructPointer().
1502
      // Make sure to include the tag word.
1503
      zeroMemory(oldPtr - POINTER_SIZE_IN_WORDS, oldSize + POINTER_SIZE_IN_WORDS);
1504

1505
      return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1506 1507
                         newDataSize * BITS_PER_WORD, newPointerCount,
                         ElementSize::INLINE_COMPOSITE);
1508
    } else {
1509
      // We're upgrading from a non-struct list.
1510

1511 1512
      auto oldDataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      auto oldPointerCount = pointersPerElement(oldSize) * ELEMENTS;
1513
      auto oldStep = (oldDataSize + oldPointerCount * BITS_PER_POINTER) / ELEMENTS;
1514
      auto elementCount = oldRef->listRef.elementCount();
1515

1516
      if (oldSize == ElementSize::VOID) {
1517
        // Nothing to copy, just allocate a new list.
1518
        return initStructListPointer(origRef, origSegment, capTable, elementCount, elementSize);
1519
      } else {
1520
        // Upgrading to an inline composite list.
1521

1522
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1523 1524 1525 1526 1527
            "Found bit list where struct list was expected; upgrading boolean lists to structs "
            "is no longer supported.") {
          goto useDefault;
        }

1528 1529
        auto newDataSize = elementSize.data;
        auto newPointerCount = elementSize.pointers;
1530

1531
        if (oldSize == ElementSize::POINTER) {
1532
          newPointerCount = kj::max(newPointerCount, ONE * POINTERS);
1533 1534
        } else {
          // Old list contains data elements, so we need at least 1 word of data.
1535
          newDataSize = kj::max(newDataSize, ONE * WORDS);
1536
        }
1537

1538
        auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
1539
        auto totalWords = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1540
              newStep * upgradeBound<uint64_t>(elementCount),
1541
              []() {KJ_FAIL_REQUIRE("total size of struct list is larger than max segment size");});
1542

1543 1544
        // Don't let allocate() zero out the object just yet.
        zeroPointerAndFars(origSegment, origRef);
1545

1546
        word* newPtr = allocate(origRef, origSegment, capTable, totalWords + POINTER_SIZE_IN_WORDS,
1547
                                WirePointer::LIST, orphanArena);
1548
        origRef->listRef.setInlineComposite(totalWords);
1549

1550 1551 1552 1553
        WirePointer* tag = reinterpret_cast<WirePointer*>(newPtr);
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
        tag->structRef.set(newDataSize, newPointerCount);
        newPtr += POINTER_SIZE_IN_WORDS;
1554

1555
        if (oldSize == ElementSize::POINTER) {
1556 1557
          WirePointer* dst = reinterpret_cast<WirePointer*>(newPtr + newDataSize);
          WirePointer* src = reinterpret_cast<WirePointer*>(oldPtr);
1558
          for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
1559
            transferPointer(origSegment, dst, oldSegment, src);
1560
            dst += newStep / WORDS_PER_POINTER * (ONE * ELEMENTS);
1561
            ++src;
1562
          }
1563
        } else {
1564 1565 1566 1567 1568 1569 1570 1571
          byte* dst = reinterpret_cast<byte*>(newPtr);
          byte* src = reinterpret_cast<byte*>(oldPtr);
          auto newByteStep = newStep * (ONE * ELEMENTS) * BYTES_PER_WORD;
          auto oldByteStep = oldDataSize / BITS_PER_BYTE;
          for (auto i KJ_UNUSED: kj::zeroTo(elementCount)) {
            copyMemory(dst, src, oldByteStep);
            src += oldByteStep;
            dst += newByteStep;
1572 1573
          }
        }
1574

1575
        auto oldSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1576
              roundBitsUpToWords(oldStep * upgradeBound<uint64_t>(elementCount)),
1577 1578
              []() { KJ_FAIL_ASSERT("old size overflows but new size doesn't?"); });

1579
        // Zero out old location.  See explanation in getWritableStructPointer().
1580
        zeroMemory(oldPtr, oldSize);
1581

1582
        return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1583
                           newDataSize * BITS_PER_WORD, newPointerCount,
1584
                           ElementSize::INLINE_COMPOSITE);
1585
      }
1586 1587 1588
    }
  }

1589
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> initTextPointer(
1590
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, TextSize size,
1591
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1592
    // The byte list must include a NUL terminator.
1593
    auto byteSize = size + ONE * BYTES;
Kenton Varda's avatar
Kenton Varda committed
1594 1595

    // Allocate the space.
1596
    word* ptr = allocate(
1597
        ref, segment, capTable, roundBytesUpToWords(byteSize), WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1598

1599
    // Initialize the pointer.
1600
    ref->listRef.set(ElementSize::BYTE, byteSize * (ONE * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1601 1602

    // Build the Text::Builder.  This will initialize the NUL terminator.
1603
    return { segment, Text::Builder(reinterpret_cast<char*>(ptr), unbound(size / BYTES)) };
Kenton Varda's avatar
Kenton Varda committed
1604 1605
  }

1606
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> setTextPointer(
1607
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Text::Reader value,
1608
      BuilderArena* orphanArena = nullptr)) {
1609
    TextSize size = assertMax<MAX_TEXT_SIZE>(bounded(value.size()),
1610 1611 1612
        []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;

    auto allocation = initTextPointer(ref, segment, capTable, size, orphanArena);
1613
    copyMemory(allocation.value.begin(), value);
1614
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1615 1616
  }

1617
  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1618
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1619 1620
      const void* defaultValue, TextSize defaultSize)) {
    return getWritableTextPointer(ref, ref->target(), segment,capTable,  defaultValue, defaultSize);
1621 1622 1623
  }

  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1624
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1625
      const void* defaultValue, TextSize defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1626
    if (ref->isNull()) {
1627
    useDefault:
1628
      if (defaultSize == ZERO * BYTES) {
1629 1630
        return nullptr;
      } else {
1631
        Text::Builder builder = initTextPointer(ref, segment, capTable, defaultSize).value;
1632 1633
        copyMemory(builder.asBytes().begin(), reinterpret_cast<const byte*>(defaultValue),
                   defaultSize);
1634 1635
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1636
    } else {
1637
      word* ptr = followFars(ref, refTarget, segment);
1638
      byte* bptr = reinterpret_cast<byte*>(ptr);
Kenton Varda's avatar
Kenton Varda committed
1639

1640
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1641 1642 1643
          "Called getText{Field,Element}() but existing pointer is not a list.") {
        goto useDefault;
      }
1644
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1645 1646 1647
          "Called getText{Field,Element}() but existing list pointer is not byte-sized.") {
        goto useDefault;
      }
Kenton Varda's avatar
Kenton Varda committed
1648

1649 1650 1651 1652 1653 1654
      auto maybeSize = trySubtract(ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS),
                                   ONE * BYTES);
      KJ_IF_MAYBE(size, maybeSize) {
        KJ_REQUIRE(*(bptr + *size) == '\0', "Text blob missing NUL terminator.") {
          goto useDefault;
        }
1655

1656 1657 1658 1659 1660 1661
        return Text::Builder(reinterpret_cast<char*>(bptr), unbound(*size / BYTES));
      } else {
        KJ_FAIL_REQUIRE("zero-size blob can't be text (need NUL terminator)") {
          goto useDefault;
        };
      }
Kenton Varda's avatar
Kenton Varda committed
1662 1663 1664
    }
  }

1665
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> initDataPointer(
1666
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, BlobSize size,
1667
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1668
    // Allocate the space.
1669 1670
    word* ptr = allocate(ref, segment, capTable, roundBytesUpToWords(size),
                         WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1671

1672
    // Initialize the pointer.
1673
    ref->listRef.set(ElementSize::BYTE, size * (ONE * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1674 1675

    // Build the Data::Builder.
1676
    return { segment, Data::Builder(reinterpret_cast<byte*>(ptr), unbound(size / BYTES)) };
Kenton Varda's avatar
Kenton Varda committed
1677 1678
  }

1679
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> setDataPointer(
1680
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Data::Reader value,
1681
      BuilderArena* orphanArena = nullptr)) {
1682
    BlobSize size = assertMaxBits<BLOB_SIZE_BITS>(bounded(value.size()),
1683 1684 1685
        []() { KJ_FAIL_REQUIRE("text blob too big"); }) * BYTES;

    auto allocation = initDataPointer(ref, segment, capTable, size, orphanArena);
1686
    copyMemory(allocation.value.begin(), value);
1687
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1688 1689
  }

1690
  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1691
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
1692
      const void* defaultValue, BlobSize defaultSize)) {
1693
    return getWritableDataPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1694 1695 1696
  }

  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1697
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1698
      const void* defaultValue, BlobSize defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1699
    if (ref->isNull()) {
1700
    useDefault:
1701
      if (defaultSize == ZERO * BYTES) {
1702 1703
        return nullptr;
      } else {
1704
        Data::Builder builder = initDataPointer(ref, segment, capTable, defaultSize).value;
1705
        copyMemory(builder.begin(), reinterpret_cast<const byte*>(defaultValue), defaultSize);
1706 1707
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1708
    } else {
1709
      word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
1710

1711
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1712 1713 1714
          "Called getData{Field,Element}() but existing pointer is not a list.") {
        goto useDefault;
      }
1715
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1716 1717 1718
          "Called getData{Field,Element}() but existing list pointer is not byte-sized.") {
        goto useDefault;
      }
Kenton Varda's avatar
Kenton Varda committed
1719

1720
      return Data::Builder(reinterpret_cast<byte*>(ptr),
1721
          unbound(ref->listRef.elementCount() / ELEMENTS));
Kenton Varda's avatar
Kenton Varda committed
1722 1723 1724
    }
  }

1725
  static SegmentAnd<word*> setStructPointer(
1726
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, StructReader value,
Matthew Maurer's avatar
Matthew Maurer committed
1727
      BuilderArena* orphanArena = nullptr, bool canonical = false) {
1728 1729
    auto dataSize = roundBitsUpToBytes(value.dataSize);
    auto ptrCount = value.pointerCount;
Matthew Maurer's avatar
Matthew Maurer committed
1730 1731

    if (canonical) {
1732
      // StructReaders should not have bitwidths other than 1, but let's be safe
1733 1734
      KJ_REQUIRE((value.dataSize == ONE * BITS)
                 || (value.dataSize % BITS_PER_BYTE == ZERO * BITS));
1735

1736 1737 1738 1739
      if (value.dataSize == ONE * BITS) {
        // Handle the truncation case where it's a false in a 1-bit struct
        if (!value.getDataField<bool>(ZERO * ELEMENTS)) {
          dataSize = ZERO * BYTES;
1740 1741 1742
        }
      } else {
        // Truncate the data section
1743 1744 1745 1746
        auto data = value.getDataSectionAsBlob();
        auto end = data.end();
        while (end > data.begin() && end[-1] == 0) --end;
        dataSize = intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD);
Matthew Maurer's avatar
Matthew Maurer committed
1747
      }
1748

Matthew Maurer's avatar
Matthew Maurer committed
1749
      // Truncate pointer section
1750 1751 1752
      const WirePointer* ptr = value.pointers + ptrCount;
      while (ptr > value.pointers && ptr[-1].isNull()) --ptr;
      ptrCount = intervalLength(value.pointers, ptr, MAX_STRUCT_POINTER_COUNT);
Matthew Maurer's avatar
Matthew Maurer committed
1753 1754
    }

1755
    auto dataWords = roundBytesUpToWords(dataSize);
1756

1757
    auto totalSize = dataWords + ptrCount * WORDS_PER_POINTER;
1758

1759
    word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1760
    ref->structRef.set(dataWords, ptrCount);
1761

1762
    if (value.dataSize == ONE * BITS) {
Matthew Maurer's avatar
Matthew Maurer committed
1763
      // Data size could be made 0 by truncation
1764 1765
      if (dataSize != ZERO * BYTES) {
        *reinterpret_cast<char*>(ptr) = value.getDataField<bool>(ZERO * ELEMENTS);
Matthew Maurer's avatar
Matthew Maurer committed
1766
      }
1767
    } else {
1768 1769 1770
      copyMemory(reinterpret_cast<byte*>(ptr),
                 reinterpret_cast<const byte*>(value.data),
                 dataSize);
1771 1772
    }

1773
    WirePointer* pointerSection = reinterpret_cast<WirePointer*>(ptr + dataWords);
1774
    for (auto i: kj::zeroTo(ptrCount)) {
1775
      copyPointer(segment, capTable, pointerSection + i,
Matthew Maurer's avatar
Matthew Maurer committed
1776 1777
                  value.segment, value.capTable, value.pointers + i,
                  value.nestingLimit, nullptr, canonical);
1778
    }
1779

1780
    return { segment, ptr };
1781 1782
  }

1783
#if !CAPNP_LITE
1784
  static void setCapabilityPointer(
1785 1786 1787 1788
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref,
      kj::Own<ClientHook>&& cap) {
    if (!ref->isNull()) {
      zeroObject(segment, capTable, ref);
1789
    }
1790
    if (cap->isNull()) {
1791
      zeroMemory(ref);
1792 1793 1794
    } else {
      ref->setCap(capTable->injectCap(kj::mv(cap)));
    }
1795
  }
1796
#endif  // !CAPNP_LITE
1797

1798
  static SegmentAnd<word*> setListPointer(
1799
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, ListReader value,
Matthew Maurer's avatar
Matthew Maurer committed
1800
      BuilderArena* orphanArena = nullptr, bool canonical = false) {
1801
    auto totalSize = assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(
1802
        roundBitsUpToWords(upgradeBound<uint64_t>(value.elementCount) * value.step),
1803
        []() { KJ_FAIL_ASSERT("encountered impossibly long struct list ListReader"); });
1804

1805
    if (value.elementSize != ElementSize::INLINE_COMPOSITE) {
1806
      // List of non-structs.
1807
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::LIST, orphanArena);
1808

1809
      if (value.elementSize == ElementSize::POINTER) {
1810
        // List of pointers.
1811
        ref->listRef.set(ElementSize::POINTER, value.elementCount);
1812
        for (auto i: kj::zeroTo(value.elementCount * (ONE * POINTERS / ELEMENTS))) {
1813 1814 1815
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i,
                      value.segment, value.capTable,
                      reinterpret_cast<const WirePointer*>(value.ptr) + i,
Matthew Maurer's avatar
Matthew Maurer committed
1816
                      value.nestingLimit, nullptr, canonical);
1817 1818 1819
        }
      } else {
        // List of data.
1820
        ref->listRef.set(value.elementSize, value.elementCount);
1821

1822
        auto wholeByteSize =
Kenton Varda's avatar
Kenton Varda committed
1823
          assertMax(MAX_SEGMENT_WORDS * BYTES_PER_WORD,
1824 1825
            upgradeBound<uint64_t>(value.elementCount) * value.step / BITS_PER_BYTE,
            []() { KJ_FAIL_ASSERT("encountered impossibly long data ListReader"); });
1826
        copyMemory(reinterpret_cast<byte*>(ptr), value.ptr, wholeByteSize);
1827
        auto leftoverBits =
Kenton Varda's avatar
Kenton Varda committed
1828
          (upgradeBound<uint64_t>(value.elementCount) * value.step) % BITS_PER_BYTE;
1829
        if (leftoverBits > ZERO * BITS) {
1830
          // We need to copy a partial byte.
Kenton Varda's avatar
Kenton Varda committed
1831
          uint8_t mask = (1 << unbound(leftoverBits / BITS)) - 1;
1832
          *((reinterpret_cast<byte*>(ptr)) + wholeByteSize) = mask & *(value.ptr + wholeByteSize);
1833
        }
1834
      }
1835

1836
      return { segment, ptr };
1837 1838
    } else {
      // List of structs.
1839 1840
      StructDataWordCount declDataSize = value.structDataSize / BITS_PER_WORD;
      StructPointerCount declPointerCount = value.structPointerCount;
Matthew Maurer's avatar
Matthew Maurer committed
1841

1842 1843
      StructDataWordCount dataSize = ZERO * WORDS;
      StructPointerCount ptrCount = ZERO * POINTERS;
Matthew Maurer's avatar
Matthew Maurer committed
1844 1845

      if (canonical) {
1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860
        for (auto i: kj::zeroTo(value.elementCount)) {
          auto element = value.getStructElement(i);

          // Truncate the data section
          auto data = element.getDataSectionAsBlob();
          auto end = data.end();
          while (end > data.begin() && end[-1] == 0) --end;
          dataSize = kj::max(dataSize, roundBytesUpToWords(
              intervalLength(data.begin(), end, MAX_STUCT_DATA_WORDS * BYTES_PER_WORD)));

          // Truncate pointer section
          const WirePointer* ptr = element.pointers + element.pointerCount;
          while (ptr > element.pointers && ptr[-1].isNull()) --ptr;
          ptrCount = kj::max(ptrCount,
              intervalLength(element.pointers, ptr, MAX_STRUCT_POINTER_COUNT));
Matthew Maurer's avatar
Matthew Maurer committed
1861
        }
1862
        auto newTotalSize = (dataSize + upgradeBound<uint64_t>(ptrCount) * WORDS_PER_POINTER)
1863 1864
            / ELEMENTS * value.elementCount;
        KJ_ASSERT(newTotalSize <= totalSize);  // we've only removed data!
1865
        totalSize = assumeMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS>() - 1>(newTotalSize);
Matthew Maurer's avatar
Matthew Maurer committed
1866 1867 1868 1869 1870
      } else {
        dataSize = declDataSize;
        ptrCount = declPointerCount;
      }

1871
      KJ_DASSERT(value.structDataSize % BITS_PER_WORD == ZERO * BITS);
1872
      word* ptr = allocate(ref, segment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1873
                           WirePointer::LIST, orphanArena);
1874 1875 1876 1877
      ref->listRef.setInlineComposite(totalSize);

      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, value.elementCount);
Matthew Maurer's avatar
Matthew Maurer committed
1878
      tag->structRef.set(dataSize, ptrCount);
1879
      word* dst = ptr + POINTER_SIZE_IN_WORDS;
1880 1881

      const word* src = reinterpret_cast<const word*>(value.ptr);
1882 1883
      for (auto i KJ_UNUSED: kj::zeroTo(value.elementCount)) {
        copyMemory(dst, src, dataSize);
1884
        dst += dataSize;
Matthew Maurer's avatar
Matthew Maurer committed
1885
        src += declDataSize;
1886

1887 1888 1889
        for (auto j: kj::zeroTo(ptrCount)) {
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(dst) + j,
              value.segment, value.capTable, reinterpret_cast<const WirePointer*>(src) + j,
Matthew Maurer's avatar
Matthew Maurer committed
1890
              value.nestingLimit, nullptr, canonical);
1891
        }
1892 1893
        dst += ptrCount * WORDS_PER_POINTER;
        src += declPointerCount * WORDS_PER_POINTER;
1894
      }
1895

1896
      return { segment, ptr };
1897 1898 1899
    }
  }

1900
  static KJ_ALWAYS_INLINE(SegmentAnd<word*> copyPointer(
1901 1902
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
Matthew Maurer's avatar
Matthew Maurer committed
1903 1904
      int nestingLimit, BuilderArena* orphanArena = nullptr,
      bool canonical = false)) {
1905
    return copyPointer(dstSegment, dstCapTable, dst,
1906
                       srcSegment, srcCapTable, src, src->target(srcSegment),
Matthew Maurer's avatar
Matthew Maurer committed
1907
                       nestingLimit, orphanArena, canonical);
1908 1909 1910
  }

  static SegmentAnd<word*> copyPointer(
1911 1912
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
Matthew Maurer's avatar
Matthew Maurer committed
1913 1914
      const word* srcTarget, int nestingLimit,
      BuilderArena* orphanArena = nullptr, bool canonical = false) {
1915 1916 1917 1918
    // Deep-copy the object pointed to by src into dst.  It turns out we can't reuse
    // readStructPointer(), etc. because they do type checking whereas here we want to accept any
    // valid pointer.

1919
    if (src->isNull()) {
1920
    useDefault:
1921
      if (!dst->isNull()) {
1922
        zeroObject(dstSegment, dstCapTable, dst);
1923
        zeroMemory(dst);
1924
      }
1925 1926 1927
      return { dstSegment, nullptr };
    }

1928 1929 1930 1931
    const word* ptr;
    KJ_IF_MAYBE(p, WireHelpers::followFars(src, srcTarget, srcSegment)) {
      ptr = p;
    } else {
1932 1933 1934 1935 1936 1937
      goto useDefault;
    }

    switch (src->kind()) {
      case WirePointer::STRUCT:
        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1938
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1939 1940 1941
          goto useDefault;
        }

1942
        KJ_REQUIRE(boundsCheck(srcSegment, ptr, src->structRef.wordSize()),
1943 1944 1945
                   "Message contained out-of-bounds struct pointer.") {
          goto useDefault;
        }
1946 1947
        return setStructPointer(dstSegment, dstCapTable, dst,
            StructReader(srcSegment, srcCapTable, ptr,
1948 1949 1950
                         reinterpret_cast<const WirePointer*>(ptr + src->structRef.dataSize.get()),
                         src->structRef.dataSize.get() * BITS_PER_WORD,
                         src->structRef.ptrCount.get(),
1951
                         nestingLimit - 1),
Matthew Maurer's avatar
Matthew Maurer committed
1952
            orphanArena, canonical);
1953 1954

      case WirePointer::LIST: {
1955
        ElementSize elementSize = src->listRef.elementSize();
1956 1957

        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1958
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1959 1960 1961
          goto useDefault;
        }

1962
        if (elementSize == ElementSize::INLINE_COMPOSITE) {
1963
          auto wordCount = src->listRef.inlineCompositeWordCount();
1964 1965
          const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);

1966
          KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
1967 1968 1969 1970
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

1971 1972
          ptr += POINTER_SIZE_IN_WORDS;

1973 1974 1975 1976 1977
          KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                     "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
            goto useDefault;
          }

1978
          auto elementCount = tag->inlineCompositeListElementCount();
1979 1980
          auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;

1981
          KJ_REQUIRE(wordsPerElement * upgradeBound<uint64_t>(elementCount) <= wordCount,
1982 1983 1984 1985
                     "INLINE_COMPOSITE list's elements overrun its word count.") {
            goto useDefault;
          }

1986
          if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
1987 1988
            // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
            // without having sent actual data.
1989
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
1990 1991 1992 1993 1994
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

1995 1996 1997
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr,
                         elementCount, wordsPerElement * BITS_PER_WORD,
1998
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1999
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2000
                         nestingLimit - 1),
Matthew Maurer's avatar
Matthew Maurer committed
2001
              orphanArena, canonical);
2002
        } else {
2003 2004
          auto dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
          auto pointerCount = pointersPerElement(elementSize) * ELEMENTS;
2005
          auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2006
          auto elementCount = src->listRef.elementCount();
2007
          auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
2008

2009
          KJ_REQUIRE(boundsCheck(srcSegment, ptr, wordCount),
2010 2011 2012 2013
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

2014 2015 2016
          if (elementSize == ElementSize::VOID) {
            // Watch out for lists of void, which can claim to be arbitrarily large without having
            // sent actual data.
2017
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (ONE * WORDS / ELEMENTS)),
2018 2019 2020 2021 2022
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

2023 2024 2025
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr, elementCount, step, dataSize, pointerCount,
                         elementSize, nestingLimit - 1),
Matthew Maurer's avatar
Matthew Maurer committed
2026
              orphanArena, canonical);
2027 2028 2029
        }
      }

2030
      case WirePointer::FAR:
2031
        KJ_FAIL_REQUIRE("Unexpected FAR pointer.") {
2032 2033 2034
          goto useDefault;
        }

2035 2036
      case WirePointer::OTHER: {
        KJ_REQUIRE(src->isCapability(), "Unknown pointer type.") {
2037 2038 2039
          goto useDefault;
        }

Matthew Maurer's avatar
Matthew Maurer committed
2040 2041
        if (canonical) {
          KJ_FAIL_REQUIRE("Cannot create a canonical message with a capability") {
2042
            break;
Matthew Maurer's avatar
Matthew Maurer committed
2043 2044
          }
        }
2045
#if !CAPNP_LITE
2046 2047
        KJ_IF_MAYBE(cap, srcCapTable->extractCap(src->capRef.index.get())) {
          setCapabilityPointer(dstSegment, dstCapTable, dst, kj::mv(*cap));
2048 2049
          // Return dummy non-null pointer so OrphanBuilder doesn't end up null.
          return { dstSegment, reinterpret_cast<word*>(1) };
2050
        } else {
2051
#endif  // !CAPNP_LITE
2052
          KJ_FAIL_REQUIRE("Message contained invalid capability pointer.") {
2053 2054
            goto useDefault;
          }
2055
#if !CAPNP_LITE
2056
        }
2057
#endif  // !CAPNP_LITE
2058
      }
2059
    }
Kenton Varda's avatar
Kenton Varda committed
2060 2061

    KJ_UNREACHABLE;
2062 2063
  }

2064 2065
  static void adopt(SegmentBuilder* segment, CapTableBuilder* capTable,
                    WirePointer* ref, OrphanBuilder&& value) {
2066
    KJ_REQUIRE(value.segment == nullptr || value.segment->getArena() == segment->getArena(),
2067 2068 2069
               "Adopted object must live in the same message.");

    if (!ref->isNull()) {
2070
      zeroObject(segment, capTable, ref);
2071 2072
    }

2073
    if (value == nullptr) {
2074
      // Set null.
2075
      zeroMemory(ref);
2076
    } else if (value.tagAsPtr()->isPositional()) {
2077
      WireHelpers::transferPointer(segment, ref, value.segment, value.tagAsPtr(), value.location);
2078 2079
    } else {
      // FAR and OTHER pointers are position-independent, so we can just copy.
2080
      copyMemory(ref, value.tagAsPtr());
2081 2082 2083
    }

    // Take ownership away from the OrphanBuilder.
2084
    zeroMemory(value.tagAsPtr());
2085 2086 2087 2088
    value.location = nullptr;
    value.segment = nullptr;
  }

2089 2090
  static OrphanBuilder disown(SegmentBuilder* segment, CapTableBuilder* capTable,
                              WirePointer* ref) {
2091 2092 2093 2094
    word* location;

    if (ref->isNull()) {
      location = nullptr;
2095 2096
    } else if (ref->kind() == WirePointer::OTHER) {
      KJ_REQUIRE(ref->isCapability(), "Unknown pointer type.") { break; }
2097
      location = reinterpret_cast<word*>(1);  // dummy so that it is non-null
2098 2099
    } else {
      WirePointer* refCopy = ref;
2100
      location = followFarsNoWritableCheck(refCopy, ref->target(), segment);
2101 2102
    }

2103
    OrphanBuilder result(ref, segment, capTable, location);
2104

2105
    if (!ref->isNull() && ref->isPositional()) {
2106
      result.tagAsPtr()->setKindForOrphan(ref->kind());
2107
    }
2108 2109

    // Zero out the pointer that was disowned.
2110
    zeroMemory(ref);
2111 2112

    return result;
2113 2114
  }

2115 2116
  // -----------------------------------------------------------------

2117
  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
2118 2119
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
2120
      int nestingLimit)) {
2121 2122
    return readStructPointer(segment, capTable, ref, ref->target(segment),
                             defaultValue, nestingLimit);
2123 2124 2125
  }

  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
2126 2127
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
2128
      const word* defaultValue, int nestingLimit)) {
2129
    if (ref->isNull()) {
2130
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
2131
      if (defaultValue == nullptr ||
2132
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
2133
        return StructReader();
2134
      }
2135
      segment = nullptr;
2136
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
2137
      refTarget = ref->target(segment);
2138 2139
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
2140

2141
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
2142
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
2143 2144
      goto useDefault;
    }
2145

2146 2147 2148 2149
    const word* ptr;
    KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
      ptr = p;
    } else {
2150 2151
      goto useDefault;
    }
2152

2153 2154
    KJ_REQUIRE(ref->kind() == WirePointer::STRUCT,
               "Message contains non-struct pointer where struct pointer was expected.") {
2155 2156 2157
      goto useDefault;
    }

2158
    KJ_REQUIRE(boundsCheck(segment, ptr, ref->structRef.wordSize()),
2159
               "Message contained out-of-bounds struct pointer.") {
2160
      goto useDefault;
2161
    }
2162

2163
    return StructReader(
2164 2165
        segment, capTable,
        ptr, reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get()),
2166
        ref->structRef.dataSize.get() * BITS_PER_WORD,
2167
        ref->structRef.ptrCount.get(),
2168
        nestingLimit - 1);
2169 2170
  }

2171
#if !CAPNP_LITE
2172
  static KJ_ALWAYS_INLINE(kj::Own<ClientHook> readCapabilityPointer(
2173 2174
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, int nestingLimit)) {
2175 2176
    kj::Maybe<kj::Own<ClientHook>> maybeCap;

2177 2178 2179 2180 2181 2182
    KJ_REQUIRE(brokenCapFactory != nullptr,
               "Trying to read capabilities without ever having created a capability context.  "
               "To read capabilities from a message, you must imbue it with CapReaderContext, or "
               "use the Cap'n Proto RPC system.");

    if (ref->isNull()) {
2183
      return brokenCapFactory->newNullCap();
2184 2185 2186 2187 2188
    } else if (!ref->isCapability()) {
      KJ_FAIL_REQUIRE(
          "Message contains non-capability pointer where capability pointer was expected.") {
        break;
      }
2189
      return brokenCapFactory->newBrokenCap(
2190
          "Calling capability extracted from a non-capability pointer.");
2191
    } else KJ_IF_MAYBE(cap, capTable->extractCap(ref->capRef.index.get())) {
2192 2193
      return kj::mv(*cap);
    } else {
2194 2195 2196 2197
      KJ_FAIL_REQUIRE("Message contains invalid capability pointer.") {
        break;
      }
      return brokenCapFactory->newBrokenCap("Calling invalid capability pointer.");
2198 2199
    }
  }
2200
#endif  // !CAPNP_LITE
2201

2202
  static KJ_ALWAYS_INLINE(ListReader readListPointer(
2203 2204
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
2205
      ElementSize expectedElementSize, int nestingLimit, bool checkElementSize = true)) {
2206
    return readListPointer(segment, capTable, ref, ref->target(segment), defaultValue,
2207
                           expectedElementSize, nestingLimit, checkElementSize);
2208 2209 2210
  }

  static KJ_ALWAYS_INLINE(ListReader readListPointer(
2211 2212
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
2213
      const word* defaultValue, ElementSize expectedElementSize, int nestingLimit,
2214
      bool checkElementSize = true)) {
2215
    if (ref->isNull()) {
2216
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
2217
      if (defaultValue == nullptr ||
2218
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
2219
        return ListReader(expectedElementSize);
2220
      }
2221
      segment = nullptr;
2222
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
2223
      refTarget = ref->target(segment);
2224 2225
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
2226

2227
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
2228
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
2229 2230
      goto useDefault;
    }
2231

2232 2233 2234 2235
    const word* ptr;
    KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
      ptr = p;
    } else {
2236 2237 2238
      goto useDefault;
    }

2239 2240
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
               "Message contains non-list pointer where list pointer was expected.") {
2241
      goto useDefault;
2242 2243
    }

2244 2245
    ElementSize elementSize = ref->listRef.elementSize();
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
2246
      auto wordCount = ref->listRef.inlineCompositeWordCount();
2247

2248 2249
      // An INLINE_COMPOSITE list points to a tag, which is formatted like a pointer.
      const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
2250

2251
      KJ_REQUIRE(boundsCheck(segment, ptr, wordCount + POINTER_SIZE_IN_WORDS),
2252
                 "Message contains out-of-bounds list pointer.") {
2253 2254
        goto useDefault;
      }
2255

2256 2257
      ptr += POINTER_SIZE_IN_WORDS;

2258 2259
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
2260 2261
        goto useDefault;
      }
2262

2263 2264
      auto size = tag->inlineCompositeListElementCount();
      auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
2265

2266
      KJ_REQUIRE(upgradeBound<uint64_t>(size) * wordsPerElement <= wordCount,
2267
                 "INLINE_COMPOSITE list's elements overrun its word count.") {
2268 2269
        goto useDefault;
      }
2270

2271
      if (wordsPerElement * (ONE * ELEMENTS) == ZERO * WORDS) {
2272 2273
        // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
        // without having sent actual data.
2274
        KJ_REQUIRE(amplifiedRead(segment, size * (ONE * WORDS / ELEMENTS)),
2275 2276 2277 2278 2279
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2280 2281
      if (checkElementSize) {
        // If a struct list was not expected, then presumably a non-struct list was upgraded to a
2282 2283 2284
        // struct list. We need to manipulate the pointer to point at the first field of the
        // struct. Together with the `step` field, this will allow the struct list to be accessed
        // as if it were a primitive list without branching.
2285

2286 2287
        // Check whether the size is compatible.
        switch (expectedElementSize) {
2288
          case ElementSize::VOID:
2289
            break;
2290

2291
          case ElementSize::BIT:
2292 2293 2294 2295 2296 2297
            KJ_FAIL_REQUIRE(
                "Found struct list where bit list was expected; upgrading boolean lists to structs "
                "is no longer supported.") {
              goto useDefault;
            }
            break;
2298

2299 2300 2301 2302
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES:
2303
            KJ_REQUIRE(tag->structRef.dataSize.get() > ZERO * WORDS,
2304 2305 2306 2307
                       "Expected a primitive list, but got a list of pointer-only structs.") {
              goto useDefault;
            }
            break;
2308

2309
          case ElementSize::POINTER:
2310 2311 2312 2313
            // We expected a list of pointers but got a list of structs.  Assuming the first field
            // in the struct is the pointer we were looking for, we want to munge the pointer to
            // point at the first element's pointer section.
            ptr += tag->structRef.dataSize.get();
2314
            KJ_REQUIRE(tag->structRef.ptrCount.get() > ZERO * POINTERS,
2315 2316 2317 2318
                       "Expected a pointer list, but got a list of data-only structs.") {
              goto useDefault;
            }
            break;
2319

2320
          case ElementSize::INLINE_COMPOSITE:
2321 2322
            break;
        }
2323 2324
      }

2325
      return ListReader(
2326
          segment, capTable, ptr, size, wordsPerElement * BITS_PER_WORD,
2327
          tag->structRef.dataSize.get() * BITS_PER_WORD,
2328
          tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2329
          nestingLimit - 1);
2330 2331

    } else {
2332
      // This is a primitive or pointer list, but all such lists can also be interpreted as struct
2333
      // lists.  We need to compute the data size and pointer count for such structs.
2334 2335 2336
      auto dataSize = dataBitsPerElement(ref->listRef.elementSize()) * ELEMENTS;
      auto pointerCount = pointersPerElement(ref->listRef.elementSize()) * ELEMENTS;
      auto elementCount = ref->listRef.elementCount();
2337
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2338

2339
      auto wordCount = roundBitsUpToWords(upgradeBound<uint64_t>(elementCount) * step);
2340
      KJ_REQUIRE(boundsCheck(segment, ptr, wordCount),
2341
            "Message contains out-of-bounds list pointer.") {
2342
        goto useDefault;
2343 2344
      }

2345 2346 2347
      if (elementSize == ElementSize::VOID) {
        // Watch out for lists of void, which can claim to be arbitrarily large without having sent
        // actual data.
2348
        KJ_REQUIRE(amplifiedRead(segment, elementCount * (ONE * WORDS / ELEMENTS)),
2349 2350 2351 2352 2353
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2354
      if (checkElementSize) {
2355
        if (elementSize == ElementSize::BIT && expectedElementSize != ElementSize::BIT) {
2356 2357 2358 2359 2360
          KJ_FAIL_REQUIRE(
              "Found bit list where struct list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
2361 2362
        }

2363 2364 2365 2366
        // Verify that the elements are at least as large as the expected type.  Note that if we
        // expected INLINE_COMPOSITE, the expected sizes here will be zero, because bounds checking
        // will be performed at field access time.  So this check here is for the case where we
        // expected a list of some primitive or pointer type.
2367

2368 2369 2370 2371
        BitCount expectedDataBitsPerElement =
            dataBitsPerElement(expectedElementSize) * ELEMENTS;
        WirePointerCount expectedPointersPerElement =
            pointersPerElement(expectedElementSize) * ELEMENTS;
2372

2373 2374 2375 2376 2377 2378 2379 2380
        KJ_REQUIRE(expectedDataBitsPerElement <= dataSize,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
        KJ_REQUIRE(expectedPointersPerElement <= pointerCount,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
2381
      }
2382

2383
      return ListReader(segment, capTable, ptr, elementCount, step,
2384
                        dataSize, pointerCount, elementSize, nestingLimit - 1);
2385 2386
    }
  }
Kenton Varda's avatar
Kenton Varda committed
2387

2388
  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
2389
      SegmentReader* segment, const WirePointer* ref,
Kenton Varda's avatar
Kenton Varda committed
2390
      const void* defaultValue, ByteCount defaultSize)) {
2391
    return readTextPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
2392 2393 2394 2395 2396
  }

  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
      const void* defaultValue, ByteCount defaultSize)) {
2397
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2398
    useDefault:
2399
      if (defaultValue == nullptr) defaultValue = "";
2400
      return Text::Reader(reinterpret_cast<const char*>(defaultValue),
2401
          unbound(defaultSize / BYTES));
Kenton Varda's avatar
Kenton Varda committed
2402
    } else {
2403 2404 2405 2406
      const word* ptr;
      KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
        ptr = p;
      } else {
Kenton Varda's avatar
Kenton Varda committed
2407 2408 2409
        goto useDefault;
      }

2410
      auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
2411

2412 2413
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2414 2415 2416
        goto useDefault;
      }

2417
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2418
                 "Message contains list pointer of non-bytes where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2419 2420 2421
        goto useDefault;
      }

2422
      KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
2423
                 "Message contained out-of-bounds text pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2424 2425 2426
        goto useDefault;
      }

2427
      KJ_REQUIRE(size > ZERO * BYTES, "Message contains text that is not NUL-terminated.") {
2428 2429 2430
        goto useDefault;
      }

Kenton Varda's avatar
Kenton Varda committed
2431
      const char* cptr = reinterpret_cast<const char*>(ptr);
2432
      uint unboundedSize = unbound(size / BYTES) - 1;
Kenton Varda's avatar
Kenton Varda committed
2433

2434
      KJ_REQUIRE(cptr[unboundedSize] == '\0', "Message contains text that is not NUL-terminated.") {
Kenton Varda's avatar
Kenton Varda committed
2435 2436 2437
        goto useDefault;
      }

2438
      return Text::Reader(cptr, unboundedSize);
Kenton Varda's avatar
Kenton Varda committed
2439 2440 2441
    }
  }

2442
  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
2443
      SegmentReader* segment, const WirePointer* ref,
2444
      const void* defaultValue, BlobSize defaultSize)) {
2445
    return readDataPointer(segment, ref, ref->target(segment), defaultValue, defaultSize);
2446 2447 2448 2449
  }

  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
2450
      const void* defaultValue, BlobSize defaultSize)) {
2451
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2452
    useDefault:
2453
      return Data::Reader(reinterpret_cast<const byte*>(defaultValue),
2454
          unbound(defaultSize / BYTES));
Kenton Varda's avatar
Kenton Varda committed
2455
    } else {
2456 2457 2458 2459 2460 2461
      const word* ptr;
      KJ_IF_MAYBE(p, followFars(ref, refTarget, segment)) {
        ptr = p;
      } else {
        goto useDefault;
      }
Kenton Varda's avatar
Kenton Varda committed
2462

2463
      if (KJ_UNLIKELY(ptr == nullptr)) {
2464
        // Already reported error.
Kenton Varda's avatar
Kenton Varda committed
2465 2466 2467
        goto useDefault;
      }

2468
      auto size = ref->listRef.elementCount() * (ONE * BYTES / ELEMENTS);
2469

2470 2471
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2472 2473 2474
        goto useDefault;
      }

2475
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2476
                 "Message contains list pointer of non-bytes where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2477 2478 2479
        goto useDefault;
      }

2480
      KJ_REQUIRE(boundsCheck(segment, ptr, roundBytesUpToWords(size)),
2481
                 "Message contained out-of-bounds data pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2482 2483 2484
        goto useDefault;
      }

2485
      return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(size / BYTES));
Kenton Varda's avatar
Kenton Varda committed
2486 2487
    }
  }
2488 2489 2490
};

// =======================================================================================
2491
// PointerBuilder
2492

2493
StructBuilder PointerBuilder::initStruct(StructSize size) {
2494
  return WireHelpers::initStructPointer(pointer, segment, capTable, size);
2495 2496
}

2497
StructBuilder PointerBuilder::getStruct(StructSize size, const word* defaultValue) {
2498
  return WireHelpers::getWritableStructPointer(pointer, segment, capTable, size, defaultValue);
2499 2500
}

2501
ListBuilder PointerBuilder::initList(ElementSize elementSize, ElementCount elementCount) {
2502
  return WireHelpers::initListPointer(pointer, segment, capTable, elementCount, elementSize);
2503 2504
}

2505
ListBuilder PointerBuilder::initStructList(ElementCount elementCount, StructSize elementSize) {
2506
  return WireHelpers::initStructListPointer(pointer, segment, capTable, elementCount, elementSize);
2507 2508
}

2509
ListBuilder PointerBuilder::getList(ElementSize elementSize, const word* defaultValue) {
2510
  return WireHelpers::getWritableListPointer(pointer, segment, capTable, elementSize, defaultValue);
2511
}
2512

2513
ListBuilder PointerBuilder::getStructList(StructSize elementSize, const word* defaultValue) {
2514 2515
  return WireHelpers::getWritableStructListPointer(
      pointer, segment, capTable, elementSize, defaultValue);
2516 2517
}

2518
ListBuilder PointerBuilder::getListAnySize(const word* defaultValue) {
2519
  return WireHelpers::getWritableListPointerAnySize(pointer, segment, capTable, defaultValue);
2520 2521
}

2522 2523
template <>
Text::Builder PointerBuilder::initBlob<Text>(ByteCount size) {
2524 2525
  return WireHelpers::initTextPointer(pointer, segment, capTable,
      assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow())).value;
2526 2527
}
template <>
2528
void PointerBuilder::setBlob<Text>(Text::Reader value) {
2529
  WireHelpers::setTextPointer(pointer, segment, capTable, value);
2530 2531 2532
}
template <>
Text::Builder PointerBuilder::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) {
2533 2534
  return WireHelpers::getWritableTextPointer(pointer, segment, capTable, defaultValue,
      assertMax<MAX_TEXT_SIZE>(defaultSize, ThrowOverflow()));
2535 2536
}

2537 2538
template <>
Data::Builder PointerBuilder::initBlob<Data>(ByteCount size) {
2539 2540
  return WireHelpers::initDataPointer(pointer, segment, capTable,
      assertMaxBits<BLOB_SIZE_BITS>(size, ThrowOverflow())).value;
2541 2542
}
template <>
2543
void PointerBuilder::setBlob<Data>(Data::Reader value) {
2544
  WireHelpers::setDataPointer(pointer, segment, capTable, value);
2545 2546 2547
}
template <>
Data::Builder PointerBuilder::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) {
2548 2549
  return WireHelpers::getWritableDataPointer(pointer, segment, capTable, defaultValue,
      assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
2550 2551
}

Matthew Maurer's avatar
Matthew Maurer committed
2552 2553
void PointerBuilder::setStruct(const StructReader& value, bool canonical) {
  WireHelpers::setStructPointer(segment, capTable, pointer, value, nullptr, canonical);
2554 2555
}

Matthew Maurer's avatar
Matthew Maurer committed
2556 2557
void PointerBuilder::setList(const ListReader& value, bool canonical) {
  WireHelpers::setListPointer(segment, capTable, pointer, value, nullptr, canonical);
2558 2559
}

2560
#if !CAPNP_LITE
2561
kj::Own<ClientHook> PointerBuilder::getCapability() {
2562
  return WireHelpers::readCapabilityPointer(
2563
      segment, capTable, pointer, kj::maxValue);
2564 2565
}

2566
void PointerBuilder::setCapability(kj::Own<ClientHook>&& cap) {
2567
  WireHelpers::setCapabilityPointer(segment, capTable, pointer, kj::mv(cap));
2568
}
2569
#endif  // !CAPNP_LITE
2570

2571
void PointerBuilder::adopt(OrphanBuilder&& value) {
2572
  WireHelpers::adopt(segment, capTable, pointer, kj::mv(value));
Kenton Varda's avatar
Kenton Varda committed
2573
}
2574 2575

OrphanBuilder PointerBuilder::disown() {
2576
  return WireHelpers::disown(segment, capTable, pointer);
Kenton Varda's avatar
Kenton Varda committed
2577 2578
}

2579
void PointerBuilder::clear() {
2580
  WireHelpers::zeroObject(segment, capTable, pointer);
2581
  WireHelpers::zeroMemory(pointer);
2582 2583
}

2584
PointerType PointerBuilder::getPointerType() const {
2585 2586 2587 2588
  if(pointer->isNull()) {
    return PointerType::NULL_;
  } else {
    WirePointer* ptr = pointer;
2589 2590
    SegmentBuilder* sgmt = segment;
    WireHelpers::followFars(ptr, ptr->target(), sgmt);
2591
    switch(ptr->kind()) {
2592 2593 2594 2595 2596 2597 2598 2599 2600
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("far pointer not followed?");
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
        return PointerType::CAPABILITY;
2601
    }
2602
    KJ_UNREACHABLE;
2603
  }
2604 2605
}

2606
void PointerBuilder::transferFrom(PointerBuilder other) {
2607
  if (!pointer->isNull()) {
2608
    WireHelpers::zeroObject(segment, capTable, pointer);
2609
    WireHelpers::zeroMemory(pointer);
2610
  }
2611
  WireHelpers::transferPointer(segment, pointer, other.segment, other.pointer);
2612
  WireHelpers::zeroMemory(other.pointer);
2613 2614
}

Matthew Maurer's avatar
Matthew Maurer committed
2615
void PointerBuilder::copyFrom(PointerReader other, bool canonical) {
2616 2617 2618
  if (other.pointer == nullptr) {
    if (!pointer->isNull()) {
      WireHelpers::zeroObject(segment, capTable, pointer);
2619
      WireHelpers::zeroMemory(pointer);
2620 2621 2622
    }
  } else {
    WireHelpers::copyPointer(segment, capTable, pointer,
Matthew Maurer's avatar
Matthew Maurer committed
2623 2624 2625
                             other.segment, other.capTable, other.pointer, other.nestingLimit,
                             nullptr,
                             canonical);
2626
  }
2627 2628 2629
}

PointerReader PointerBuilder::asReader() const {
2630
  return PointerReader(segment, capTable, pointer, kj::maxValue);
2631 2632
}

2633 2634
BuilderArena* PointerBuilder::getArena() const {
  return segment->getArena();
2635 2636
}

2637 2638 2639 2640 2641 2642 2643 2644 2645 2646
CapTableBuilder* PointerBuilder::getCapTable() {
  return capTable;
}

PointerBuilder PointerBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2647 2648 2649
// =======================================================================================
// PointerReader

2650 2651
PointerReader PointerReader::getRoot(SegmentReader* segment, CapTableReader* capTable,
                                     const word* location, int nestingLimit) {
2652
  KJ_REQUIRE(WireHelpers::boundsCheck(segment, location, POINTER_SIZE_IN_WORDS),
Kenton Varda's avatar
Kenton Varda committed
2653 2654 2655 2656
             "Root location out-of-bounds.") {
    location = nullptr;
  }

2657 2658
  return PointerReader(segment, capTable,
      reinterpret_cast<const WirePointer*>(location), nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2659 2660
}

2661 2662
StructReader PointerReader::getStruct(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2663
  return WireHelpers::readStructPointer(segment, capTable, ref, defaultValue, nestingLimit);
2664 2665
}

2666
ListReader PointerReader::getList(ElementSize expectedElementSize, const word* defaultValue) const {
2667 2668
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2669
      segment, capTable, ref, defaultValue, expectedElementSize, nestingLimit);
2670 2671
}

2672 2673 2674
ListReader PointerReader::getListAnySize(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2675
      segment, capTable, ref, defaultValue, ElementSize::VOID /* dummy */, nestingLimit, false);
2676 2677
}

2678
template <>
2679 2680 2681
Text::Reader PointerReader::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readTextPointer(segment, ref, defaultValue, defaultSize);
Kenton Varda's avatar
Kenton Varda committed
2682
}
2683

2684
template <>
2685 2686
Data::Reader PointerReader::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2687 2688
  return WireHelpers::readDataPointer(segment, ref, defaultValue,
      assertMaxBits<BLOB_SIZE_BITS>(defaultSize, ThrowOverflow()));
Kenton Varda's avatar
Kenton Varda committed
2689 2690
}

2691
#if !CAPNP_LITE
2692
kj::Own<ClientHook> PointerReader::getCapability() const {
2693
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2694
  return WireHelpers::readCapabilityPointer(segment, capTable, ref, nestingLimit);
2695
}
2696
#endif  // !CAPNP_LITE
2697

2698 2699 2700
const word* PointerReader::getUnchecked() const {
  KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
  return reinterpret_cast<const word*>(pointer);
2701 2702
}

2703
MessageSizeCounts PointerReader::targetSize() const {
2704
  return pointer == nullptr ? MessageSizeCounts { ZERO * WORDS, 0 }
2705
                            : WireHelpers::totalSize(segment, pointer, nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2706 2707
}

2708
PointerType PointerReader::getPointerType() const {
2709
  if(pointer == nullptr || pointer->isNull()) {
2710 2711 2712
    return PointerType::NULL_;
  } else {
    const WirePointer* ptr = pointer;
2713
    const word* refTarget = ptr->target(segment);
2714
    SegmentReader* sgmt = segment;
2715
    if (WireHelpers::followFars(ptr, refTarget, sgmt) == nullptr) return PointerType::NULL_;
2716
    switch(ptr->kind()) {
2717
      case WirePointer::FAR:
2718
        KJ_FAIL_ASSERT("far pointer not followed?") { return PointerType::NULL_; }
2719 2720 2721 2722 2723
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
2724
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type") { return PointerType::NULL_; }
2725
        return PointerType::CAPABILITY;
2726
    }
2727
    KJ_UNREACHABLE;
2728
  }
2729 2730
}

2731 2732 2733 2734
kj::Maybe<Arena&> PointerReader::getArena() const {
  return segment == nullptr ? nullptr : segment->getArena();
}

2735 2736 2737 2738
CapTableReader* PointerReader::getCapTable() {
  return capTable;
}

2739 2740 2741 2742 2743 2744
PointerReader PointerReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

Matthew Maurer's avatar
Matthew Maurer committed
2745 2746 2747 2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758 2759
bool PointerReader::isCanonical(const word **readHead) {
  if (!this->pointer) {
    // The pointer is null, so we are canonical and do not read
    return true;
  }

  if (!this->pointer->isPositional()) {
    // The pointer is a FAR or OTHER pointer, and is non-canonical
    return false;
  }

  switch (this->getPointerType()) {
    case PointerType::NULL_:
      // The pointer is null, we are canonical and do not read
      return true;
2760
    case PointerType::STRUCT: {
Matthew Maurer's avatar
Matthew Maurer committed
2761
      bool dataTrunc, ptrTrunc;
2762
      auto structReader = this->getStruct(nullptr);
2763 2764
      if (structReader.getDataSectionSize() == ZERO * BITS &&
          structReader.getPointerSectionSize() == ZERO * POINTERS) {
2765 2766 2767 2768 2769
        return reinterpret_cast<const word*>(this->pointer) == structReader.getLocation();
      } else {
        return structReader.isCanonical(readHead, readHead, &dataTrunc, &ptrTrunc) && dataTrunc && ptrTrunc;
      }
    }
Matthew Maurer's avatar
Matthew Maurer committed
2770
    case PointerType::LIST:
2771
      return this->getListAnySize(nullptr).isCanonical(readHead, pointer);
Matthew Maurer's avatar
Matthew Maurer committed
2772 2773 2774 2775 2776 2777
    case PointerType::CAPABILITY:
      KJ_FAIL_ASSERT("Capabilities are not positional");
  }
  KJ_UNREACHABLE;
}

2778 2779 2780
// =======================================================================================
// StructBuilder

2781
void StructBuilder::clearAll() {
2782 2783
  if (dataSize == ONE * BITS) {
    setDataField<bool>(ONE * ELEMENTS, false);
2784
  } else {
2785
    WireHelpers::zeroMemory(reinterpret_cast<byte*>(data), dataSize / BITS_PER_BYTE);
2786 2787
  }

2788
  for (auto i: kj::zeroTo(pointerCount)) {
2789
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2790
  }
2791
  WireHelpers::zeroMemory(pointers, pointerCount);
2792 2793
}

2794 2795
void StructBuilder::transferContentFrom(StructBuilder other) {
  // Determine the amount of data the builders have in common.
2796
  auto sharedDataSize = kj::min(dataSize, other.dataSize);
2797 2798 2799 2800

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
2801 2802
    if (dataSize == ONE * BITS) {
      setDataField<bool>(ZERO * ELEMENTS, false);
2803
    } else {
2804 2805 2806 2807
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
      // Note: this subtraction can't fail due to the if() above
      WireHelpers::zeroMemory(unshared,
          subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
2808 2809 2810 2811
    }
  }

  // Copy over the shared part.
2812 2813
  if (sharedDataSize == ONE * BITS) {
    setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
2814
  } else {
2815 2816 2817
    WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
                            reinterpret_cast<byte*>(other.data),
                            sharedDataSize / BITS_PER_BYTE);
2818 2819 2820
  }

  // Zero out all pointers in the target.
2821
  for (auto i: kj::zeroTo(pointerCount)) {
2822
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2823
  }
2824
  WireHelpers::zeroMemory(pointers, pointerCount);
2825 2826

  // Transfer the pointers.
2827 2828
  auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (auto i: kj::zeroTo(sharedPointerCount)) {
2829 2830 2831 2832 2833 2834
    WireHelpers::transferPointer(segment, pointers + i, other.segment, other.pointers + i);
  }

  // Zero out the pointers that were transferred in the source because it no longer has ownership.
  // If the source had any extra pointers that the destination didn't have space for, we
  // intentionally leave them be, so that they'll be cleaned up later.
2835
  WireHelpers::zeroMemory(other.pointers, sharedPointerCount);
2836 2837
}

2838 2839
void StructBuilder::copyContentFrom(StructReader other) {
  // Determine the amount of data the builders have in common.
2840
  auto sharedDataSize = kj::min(dataSize, other.dataSize);
2841 2842 2843 2844

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
2845 2846
    if (dataSize == ONE * BITS) {
      setDataField<bool>(ZERO * ELEMENTS, false);
2847
    } else {
2848 2849 2850
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE;
      WireHelpers::zeroMemory(unshared,
          subtractChecked(dataSize, sharedDataSize, []() {}) / BITS_PER_BYTE);
2851 2852 2853 2854
    }
  }

  // Copy over the shared part.
2855 2856
  if (sharedDataSize == ONE * BITS) {
    setDataField<bool>(ZERO * ELEMENTS, other.getDataField<bool>(ZERO * ELEMENTS));
2857
  } else {
2858 2859 2860
    WireHelpers::copyMemory(reinterpret_cast<byte*>(data),
                            reinterpret_cast<const byte*>(other.data),
                            sharedDataSize / BITS_PER_BYTE);
2861 2862 2863
  }

  // Zero out all pointers in the target.
2864
  for (auto i: kj::zeroTo(pointerCount)) {
2865
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2866
  }
2867
  WireHelpers::zeroMemory(pointers, pointerCount);
2868 2869

  // Copy the pointers.
2870 2871
  auto sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (auto i: kj::zeroTo(sharedPointerCount)) {
2872 2873
    WireHelpers::copyPointer(segment, capTable, pointers + i,
        other.segment, other.capTable, other.pointers + i, other.nestingLimit);
2874 2875 2876
  }
}

2877
StructReader StructBuilder::asReader() const {
2878
  return StructReader(segment, capTable, data, pointers,
2879
      dataSize, pointerCount, kj::maxValue);
2880 2881
}

2882 2883 2884 2885
BuilderArena* StructBuilder::getArena() {
  return segment->getArena();
}

2886 2887 2888 2889 2890 2891 2892 2893 2894 2895
CapTableBuilder* StructBuilder::getCapTable() {
  return capTable;
}

StructBuilder StructBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2896 2897 2898
// =======================================================================================
// StructReader

2899 2900 2901
MessageSizeCounts StructReader::totalSize() const {
  MessageSizeCounts result = {
    WireHelpers::roundBitsUpToWords(dataSize) + pointerCount * WORDS_PER_POINTER, 0 };
2902

2903
  for (auto i: kj::zeroTo(pointerCount)) {
2904 2905 2906 2907 2908 2909
    result += WireHelpers::totalSize(segment, pointers + i, nestingLimit);
  }

  if (segment != nullptr) {
    // This traversal should not count against the read limit, because it's highly likely that
    // the caller is going to traverse the object again, e.g. to copy it.
2910
    segment->unread(result.wordCount);
2911 2912 2913 2914 2915
  }

  return result;
}

2916
kj::Array<word> StructReader::canonicalize() {
2917
  auto size = totalSize().wordCount + POINTER_SIZE_IN_WORDS;
2918
  kj::Array<word> backing = kj::heapArray<word>(unbound(size / WORDS));
2919
  WireHelpers::zeroMemory(backing.asPtr());
2920 2921 2922 2923 2924
  FlatMessageBuilder builder(backing);
  _::PointerHelpers<AnyPointer>::getInternalBuilder(builder.initRoot<AnyPointer>()).setStruct(*this, true);
  KJ_ASSERT(builder.isCanonical());
  auto output = builder.getSegmentsForOutput()[0];
  kj::Array<word> trunc = kj::heapArray<word>(output.size());
2925
  WireHelpers::copyMemory(trunc.begin(), output);
2926 2927 2928
  return trunc;
}

2929 2930 2931 2932
CapTableReader* StructReader::getCapTable() {
  return capTable;
}

2933 2934 2935 2936 2937 2938
StructReader StructReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

Matthew Maurer's avatar
Matthew Maurer committed
2939 2940 2941 2942 2943 2944 2945 2946 2947
bool StructReader::isCanonical(const word **readHead,
                               const word **ptrHead,
                               bool *dataTrunc,
                               bool *ptrTrunc) {
  if (this->getLocation() != *readHead) {
    // Our target area is not at the readHead, preorder fails
    return false;
  }

2948
  if (this->getDataSectionSize() % BITS_PER_WORD != ZERO * BITS) {
Matthew Maurer's avatar
Matthew Maurer committed
2949 2950 2951
    // Using legacy non-word-size structs, reject
    return false;
  }
2952
  auto dataSize = this->getDataSectionSize() / BITS_PER_WORD;
Matthew Maurer's avatar
Matthew Maurer committed
2953 2954

  // Mark whether the struct is properly truncated
2955
  KJ_IF_MAYBE(diff, trySubtract(dataSize, ONE * WORDS)) {
2956
    *dataTrunc = this->getDataField<uint64_t>(*diff / WORDS * ELEMENTS) != 0;
Matthew Maurer's avatar
Matthew Maurer committed
2957
  } else {
2958
    // Data segment empty.
Matthew Maurer's avatar
Matthew Maurer committed
2959 2960 2961
    *dataTrunc = true;
  }

2962
  KJ_IF_MAYBE(diff, trySubtract(this->pointerCount, ONE * POINTERS)) {
2963
    *ptrTrunc  = !this->getPointerField(*diff).isNull();
Matthew Maurer's avatar
Matthew Maurer committed
2964 2965 2966 2967 2968
  } else {
    *ptrTrunc = true;
  }

  // Advance the read head
2969
  *readHead += (dataSize + (this->pointerCount * WORDS_PER_POINTER));
Matthew Maurer's avatar
Matthew Maurer committed
2970 2971

  // Check each pointer field for canonicity
2972
  for (auto ptrIndex: kj::zeroTo(this->pointerCount)) {
Matthew Maurer's avatar
Matthew Maurer committed
2973 2974 2975 2976 2977 2978 2979 2980
    if (!this->getPointerField(ptrIndex).isCanonical(ptrHead)) {
      return false;
    }
  }

  return true;
}

2981 2982 2983
// =======================================================================================
// ListBuilder

2984
Text::Builder ListBuilder::asText() {
2985
  KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
2986
             "Expected Text, got list of non-bytes.") {
2987 2988 2989
    return Text::Builder();
  }

2990
  size_t size = unbound(elementCount / ELEMENTS);
2991

2992
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2993 2994 2995 2996 2997 2998
    return Text::Builder();
  }

  char* cptr = reinterpret_cast<char*>(ptr);
  --size;  // NUL terminator

2999
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
3000 3001 3002 3003 3004 3005 3006
    return Text::Builder();
  }

  return Text::Builder(cptr, size);
}

Data::Builder ListBuilder::asData() {
3007
  KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3008
             "Expected Text, got list of non-bytes.") {
3009 3010 3011
    return Data::Builder();
  }

3012
  return Data::Builder(reinterpret_cast<byte*>(ptr), unbound(elementCount / ELEMENTS));
3013 3014
}

3015
StructBuilder ListBuilder::getStructElement(ElementCount index) {
3016
  auto indexBit = upgradeBound<uint64_t>(index) * step;
3017
  byte* structData = ptr + indexBit / BITS_PER_BYTE;
3018
  KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
3019
  return StructBuilder(segment, capTable, structData,
3020
      reinterpret_cast<WirePointer*>(structData + structDataSize / BITS_PER_BYTE),
3021
      structDataSize, structPointerCount);
3022 3023
}

3024
ListReader ListBuilder::asReader() const {
3025
  return ListReader(segment, capTable, ptr, elementCount, step, structDataSize, structPointerCount,
3026
                    elementSize, kj::maxValue);
3027 3028
}

3029 3030 3031 3032
BuilderArena* ListBuilder::getArena() {
  return segment->getArena();
}

3033 3034 3035 3036 3037 3038 3039 3040 3041 3042
CapTableBuilder* ListBuilder::getCapTable() {
  return capTable;
}

ListBuilder ListBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

3043 3044 3045
// =======================================================================================
// ListReader

3046
Text::Reader ListReader::asText() {
3047
  KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3048
             "Expected Text, got list of non-bytes.") {
3049 3050 3051
    return Text::Reader();
  }

3052
  size_t size = unbound(elementCount / ELEMENTS);
3053

3054
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
3055 3056 3057 3058 3059 3060
    return Text::Reader();
  }

  const char* cptr = reinterpret_cast<const char*>(ptr);
  --size;  // NUL terminator

3061
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
3062 3063 3064 3065 3066 3067 3068
    return Text::Reader();
  }

  return Text::Reader(cptr, size);
}

Data::Reader ListReader::asData() {
3069
  KJ_REQUIRE(structDataSize == G(8) * BITS && structPointerCount == ZERO * POINTERS,
3070
             "Expected Text, got list of non-bytes.") {
3071 3072 3073
    return Data::Reader();
  }

3074
  return Data::Reader(reinterpret_cast<const byte*>(ptr), unbound(elementCount / ELEMENTS));
3075 3076
}

3077
kj::ArrayPtr<const byte> ListReader::asRawBytes() {
3078
  KJ_REQUIRE(structPointerCount == ZERO * POINTERS,
3079
             "Expected data only, got pointers.") {
3080
    return kj::ArrayPtr<const byte>();
3081 3082
  }

3083 3084
  return arrayPtr(reinterpret_cast<const byte*>(ptr),
      WireHelpers::roundBitsUpToBytes(
3085
          upgradeBound<uint64_t>(elementCount) * (structDataSize / ELEMENTS)));
3086 3087
}

3088
StructReader ListReader::getStructElement(ElementCount index) const {
3089
  KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
3090
             "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
3091
    return StructReader();
3092
  }
3093

3094
  auto indexBit = upgradeBound<uint64_t>(index) * step;
3095
  const byte* structData = ptr + indexBit / BITS_PER_BYTE;
3096 3097
  const WirePointer* structPointers =
      reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
3098 3099

  // This check should pass if there are no bugs in the list pointer validation code.
3100
  KJ_DASSERT(structPointerCount == ZERO * POINTERS ||
Kenton Varda's avatar
Kenton Varda committed
3101
         (uintptr_t)structPointers % sizeof(void*) == 0,
3102
         "Pointer section of struct list element not aligned.");
3103

3104
  KJ_DASSERT(indexBit % BITS_PER_BYTE == ZERO * BITS);
3105
  return StructReader(
3106
      segment, capTable, structData, structPointers,
3107
      structDataSize, structPointerCount,
3108
      nestingLimit - 1);
3109
}
3110

3111 3112 3113 3114
CapTableReader* ListReader::getCapTable() {
  return capTable;
}

3115 3116 3117 3118 3119 3120
ListReader ListReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

3121
bool ListReader::isCanonical(const word **readHead, const WirePointer *ref) {
Matthew Maurer's avatar
Matthew Maurer committed
3122 3123 3124 3125 3126 3127 3128 3129
  switch (this->getElementSize()) {
    case ElementSize::INLINE_COMPOSITE: {
      *readHead += 1;
      if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
        // The next word to read is the tag word, but the pointer is in
        // front of it, so our check is slightly different
        return false;
      }
3130
      if (this->structDataSize % BITS_PER_WORD != ZERO * BITS) {
Matthew Maurer's avatar
Matthew Maurer committed
3131 3132
        return false;
      }
3133 3134
      auto elementSize = StructSize(this->structDataSize / BITS_PER_WORD,
                                    this->structPointerCount).total() / ELEMENTS;
3135
      auto totalSize = upgradeBound<uint64_t>(this->elementCount) * elementSize;
3136
      if (totalSize != ref->listRef.inlineCompositeWordCount()) {
3137 3138
        return false;
      }
3139
      if (elementSize == ZERO * WORDS / ELEMENTS) {
3140 3141
        return true;
      }
3142
      auto listEnd = *readHead + totalSize;
Matthew Maurer's avatar
Matthew Maurer committed
3143 3144 3145
      auto pointerHead = listEnd;
      bool listDataTrunc = false;
      bool listPtrTrunc = false;
3146
      for (auto ec: kj::zeroTo(this->elementCount)) {
Matthew Maurer's avatar
Matthew Maurer committed
3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159 3160 3161 3162 3163 3164
        bool dataTrunc, ptrTrunc;
        if (!this->getStructElement(ec).isCanonical(readHead,
                                                    &pointerHead,
                                                    &dataTrunc,
                                                    &ptrTrunc)) {
          return false;
        }
        listDataTrunc |= dataTrunc;
        listPtrTrunc  |= ptrTrunc;
      }
      KJ_REQUIRE(*readHead == listEnd, *readHead, listEnd);
      *readHead = pointerHead;
      return listDataTrunc && listPtrTrunc;
    }
    case ElementSize::POINTER: {
      if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
        return false;
      }
3165 3166
      *readHead += this->elementCount * (POINTERS / ELEMENTS) * WORDS_PER_POINTER;
      for (auto ec: kj::zeroTo(this->elementCount)) {
Matthew Maurer's avatar
Matthew Maurer committed
3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177
        if (!this->getPointerElement(ec).isCanonical(readHead)) {
          return false;
        }
      }
      return true;
    }
    default: {
      if (reinterpret_cast<const word*>(this->ptr) != *readHead) {
        return false;
      }

3178
      auto bitSize = upgradeBound<uint64_t>(this->elementCount) *
Matthew Maurer's avatar
Matthew Maurer committed
3179
                     dataBitsPerElement(this->elementSize);
3180 3181 3182 3183
      auto truncatedByteSize = bitSize / BITS_PER_BYTE;
      auto byteReadHead = reinterpret_cast<const uint8_t*>(*readHead) + truncatedByteSize;
      auto readHeadEnd = *readHead + WireHelpers::roundBitsUpToWords(bitSize);

Kenton Varda's avatar
Kenton Varda committed
3184
      auto leftoverBits = bitSize % BITS_PER_BYTE;
3185
      if (leftoverBits > ZERO * BITS) {
Kenton Varda's avatar
Kenton Varda committed
3186
        auto mask = ~((1 << unbound(leftoverBits / BITS)) - 1);
3187

3188 3189 3190 3191 3192 3193 3194 3195 3196 3197 3198 3199 3200 3201
        if (mask & *byteReadHead) {
          return false;
        }
        byteReadHead += 1;
      }

      while (byteReadHead != reinterpret_cast<const uint8_t*>(readHeadEnd)) {
        if (*byteReadHead != 0) {
          return false;
        }
        byteReadHead += 1;
      }

      *readHead = readHeadEnd;
Matthew Maurer's avatar
Matthew Maurer committed
3202 3203 3204 3205 3206 3207
      return true;
    }
  }
  KJ_UNREACHABLE;
}

3208 3209 3210
// =======================================================================================
// OrphanBuilder

3211 3212
OrphanBuilder OrphanBuilder::initStruct(
    BuilderArena* arena, CapTableBuilder* capTable, StructSize size) {
3213
  OrphanBuilder result;
3214 3215
  StructBuilder builder = WireHelpers::initStructPointer(
      result.tagAsPtr(), nullptr, capTable, size, arena);
3216
  result.segment = builder.segment;
3217
  result.capTable = capTable;
3218
  result.location = builder.getLocation();
3219 3220 3221 3222
  return result;
}

OrphanBuilder OrphanBuilder::initList(
3223 3224
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, ElementSize elementSize) {
3225
  OrphanBuilder result;
3226
  ListBuilder builder = WireHelpers::initListPointer(
3227
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
3228
  result.segment = builder.segment;
3229
  result.capTable = capTable;
3230
  result.location = builder.getLocation();
3231 3232 3233 3234
  return result;
}

OrphanBuilder OrphanBuilder::initStructList(
3235 3236
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, StructSize elementSize) {
3237 3238
  OrphanBuilder result;
  ListBuilder builder = WireHelpers::initStructListPointer(
3239
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
3240
  result.segment = builder.segment;
3241
  result.capTable = capTable;
3242 3243
  result.location = builder.getLocation();
  return result;
3244 3245
}

3246 3247
OrphanBuilder OrphanBuilder::initText(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
3248
  OrphanBuilder result;
3249 3250
  auto allocation = WireHelpers::initTextPointer(result.tagAsPtr(), nullptr, capTable,
      assertMax<MAX_TEXT_SIZE>(size, ThrowOverflow()), arena);
3251
  result.segment = allocation.segment;
3252
  result.capTable = capTable;
3253
  result.location = reinterpret_cast<word*>(allocation.value.begin());
3254 3255 3256
  return result;
}

3257 3258
OrphanBuilder OrphanBuilder::initData(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
3259
  OrphanBuilder result;
3260 3261
  auto allocation = WireHelpers::initDataPointer(result.tagAsPtr(), nullptr, capTable,
      assertMaxBits<BLOB_SIZE_BITS>(size), arena);
3262
  result.segment = allocation.segment;
3263
  result.capTable = capTable;
3264
  result.location = reinterpret_cast<word*>(allocation.value.begin());
3265 3266 3267
  return result;
}

3268 3269
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom) {
3270
  OrphanBuilder result;
3271 3272
  auto allocation = WireHelpers::setStructPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
3273
  result.segment = allocation.segment;
3274
  result.capTable = capTable;
3275
  result.location = reinterpret_cast<word*>(allocation.value);
3276 3277 3278
  return result;
}

3279 3280
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom) {
3281
  OrphanBuilder result;
3282 3283
  auto allocation = WireHelpers::setListPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
3284
  result.segment = allocation.segment;
3285
  result.capTable = capTable;
3286
  result.location = reinterpret_cast<word*>(allocation.value);
3287 3288 3289
  return result;
}

3290 3291
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom) {
3292
  OrphanBuilder result;
3293
  auto allocation = WireHelpers::copyPointer(
3294 3295
      nullptr, capTable, result.tagAsPtr(),
      copyFrom.segment, copyFrom.capTable, copyFrom.pointer, copyFrom.nestingLimit, arena);
3296
  result.segment = allocation.segment;
3297
  result.capTable = capTable;
3298 3299 3300 3301
  result.location = reinterpret_cast<word*>(allocation.value);
  return result;
}

3302 3303
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom) {
3304
  OrphanBuilder result;
3305
  auto allocation = WireHelpers::setTextPointer(
3306
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
3307
  result.segment = allocation.segment;
3308
  result.capTable = capTable;
3309
  result.location = reinterpret_cast<word*>(allocation.value.begin());
3310 3311 3312
  return result;
}

3313 3314
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom) {
3315
  OrphanBuilder result;
3316
  auto allocation = WireHelpers::setDataPointer(
3317
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
3318
  result.segment = allocation.segment;
3319
  result.capTable = capTable;
3320
  result.location = reinterpret_cast<word*>(allocation.value.begin());
3321 3322 3323
  return result;
}

3324
#if !CAPNP_LITE
3325 3326
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, kj::Own<ClientHook> copyFrom) {
3327
  OrphanBuilder result;
3328
  WireHelpers::setCapabilityPointer(nullptr, capTable, result.tagAsPtr(), kj::mv(copyFrom));
3329
  result.segment = arena->getSegment(SegmentId(0));
3330
  result.capTable = capTable;
3331
  result.location = &result.tag;  // dummy to make location non-null
3332 3333
  return result;
}
3334
#endif  // !CAPNP_LITE
3335

3336 3337 3338 3339 3340 3341 3342
OrphanBuilder OrphanBuilder::concat(
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementSize elementSize, StructSize structSize,
    kj::ArrayPtr<const ListReader> lists) {
  KJ_REQUIRE(lists.size() > 0, "Can't concat empty list ");

  // Find the overall element count and size.
3343
  ListElementCount elementCount = ZERO * ELEMENTS;
3344
  for (auto& list: lists) {
3345 3346
    elementCount = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(elementCount + list.elementCount,
        []() { KJ_FAIL_REQUIRE("concatenated list exceeds list size limit"); });
3347 3348 3349 3350 3351 3352 3353 3354 3355 3356 3357 3358 3359 3360 3361 3362 3363 3364 3365 3366 3367 3368
    if (list.elementSize != elementSize) {
      // If element sizes don't all match, upgrade to struct list.
      KJ_REQUIRE(list.elementSize != ElementSize::BIT && elementSize != ElementSize::BIT,
                 "can't upgrade bit lists to struct lists");
      elementSize = ElementSize::INLINE_COMPOSITE;
    }
    structSize.data = kj::max(structSize.data,
        WireHelpers::roundBitsUpToWords(list.structDataSize));
    structSize.pointers = kj::max(structSize.pointers, list.structPointerCount);
  }

  // Allocate the list.
  OrphanBuilder result;
  ListBuilder builder = (elementSize == ElementSize::INLINE_COMPOSITE)
      ? WireHelpers::initStructListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, structSize, arena)
      : WireHelpers::initListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);

  // Copy elements.
  switch (elementSize) {
    case ElementSize::INLINE_COMPOSITE: {
3369
      ListElementCount pos = ZERO * ELEMENTS;
3370
      for (auto& list: lists) {
3371
        for (auto i: kj::zeroTo(list.size())) {
3372
          builder.getStructElement(pos).copyContentFrom(list.getStructElement(i));
3373 3374
          // assumeBits() safe because we checked total size earlier.
          pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3375 3376 3377 3378 3379
        }
      }
      break;
    }
    case ElementSize::POINTER: {
3380
      ListElementCount pos = ZERO * ELEMENTS;
3381
      for (auto& list: lists) {
3382
        for (auto i: kj::zeroTo(list.size())) {
3383
          builder.getPointerElement(pos).copyFrom(list.getPointerElement(i));
3384 3385
          // assumeBits() safe because we checked total size earlier.
          pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3386 3387 3388 3389 3390 3391 3392
        }
      }
      break;
    }
    case ElementSize::BIT: {
      // It's difficult to memcpy() bits since a list could start or end mid-byte. For now we
      // do a slow, naive loop. Probably no one will ever care.
3393
      ListElementCount pos = ZERO * ELEMENTS;
3394
      for (auto& list: lists) {
3395
        for (auto i: kj::zeroTo(list.size())) {
3396
          builder.setDataElement<bool>(pos, list.getDataElement<bool>(i));
3397 3398
          // assumeBits() safe because we checked total size earlier.
          pos = assumeBits<LIST_ELEMENT_COUNT_BITS>(pos + ONE * ELEMENTS);
3399 3400 3401 3402 3403
        }
      }
      break;
    }
    default: {
3404 3405 3406
      // We know all the inputs are primitives with identical size because otherwise we would have
      // chosen INLINE_COMPOSITE. Therefore, we can safely use memcpy() here instead of copying
      // each element manually.
3407 3408 3409
      byte* target = builder.ptr;
      auto step = builder.step / BITS_PER_BYTE;
      for (auto& list: lists) {
3410
        auto count = step * upgradeBound<uint64_t>(list.size());
3411
        WireHelpers::copyMemory(target, list.ptr, assumeBits<SEGMENT_WORD_COUNT_BITS>(count));
3412
        target += count;
3413 3414 3415 3416 3417 3418 3419 3420 3421 3422 3423 3424
      }
      break;
    }
  }

  // Return orphan.
  result.segment = builder.segment;
  result.capTable = capTable;
  result.location = builder.getLocation();
  return result;
}

3425 3426 3427 3428
OrphanBuilder OrphanBuilder::referenceExternalData(BuilderArena* arena, Data::Reader data) {
  KJ_REQUIRE(reinterpret_cast<uintptr_t>(data.begin()) % sizeof(void*) == 0,
             "Cannot referenceExternalData() that is not aligned.");

3429
  auto checkedSize = assertMaxBits<BLOB_SIZE_BITS>(bounded(data.size()));
3430 3431
  auto wordCount = WireHelpers::roundBytesUpToWords(checkedSize * BYTES);
  kj::ArrayPtr<const word> words(reinterpret_cast<const word*>(data.begin()),
3432
                                 unbound(wordCount / WORDS));
3433 3434 3435

  OrphanBuilder result;
  result.tagAsPtr()->setKindForOrphan(WirePointer::LIST);
3436
  result.tagAsPtr()->listRef.set(ElementSize::BYTE, checkedSize * ELEMENTS);
3437 3438
  result.segment = arena->addExternalSegment(words);

3439 3440 3441
  // External data cannot possibly contain capabilities.
  result.capTable = nullptr;

3442 3443 3444 3445 3446 3447 3448
  // const_cast OK here because we will check whether the segment is writable when we try to get
  // a builder.
  result.location = const_cast<word*>(words.begin());

  return result;
}

3449
StructBuilder OrphanBuilder::asStruct(StructSize size) {
3450 3451
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3452
  StructBuilder result = WireHelpers::getWritableStructPointer(
3453
      tagAsPtr(), location, segment, capTable, size, nullptr, segment->getArena());
3454 3455

  // Watch out, the pointer could have been updated if the object had to be relocated.
3456
  location = reinterpret_cast<word*>(result.data);
3457 3458 3459 3460

  return result;
}

3461
ListBuilder OrphanBuilder::asList(ElementSize elementSize) {
3462 3463
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3464
  ListBuilder result = WireHelpers::getWritableListPointer(
3465
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3466 3467

  // Watch out, the pointer could have been updated if the object had to be relocated.
3468 3469 3470
  // (Actually, currently this is not true for primitive lists, but let's not turn into a bug if
  // it changes!)
  location = result.getLocation();
3471 3472 3473 3474 3475

  return result;
}

ListBuilder OrphanBuilder::asStructList(StructSize elementSize) {
3476 3477
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3478
  ListBuilder result = WireHelpers::getWritableStructListPointer(
3479
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3480 3481

  // Watch out, the pointer could have been updated if the object had to be relocated.
3482
  location = result.getLocation();
3483 3484 3485 3486

  return result;
}

3487 3488 3489 3490 3491 3492 3493 3494 3495 3496 3497 3498
ListBuilder OrphanBuilder::asListAnySize() {
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

  ListBuilder result = WireHelpers::getWritableListPointerAnySize(
      tagAsPtr(), location, segment, capTable, nullptr, segment->getArena());

  // Watch out, the pointer could have been updated if the object had to be relocated.
  location = result.getLocation();

  return result;
}

3499
Text::Builder OrphanBuilder::asText() {
3500 3501
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3502
  // Never relocates.
3503
  return WireHelpers::getWritableTextPointer(
3504
      tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
3505 3506 3507
}

Data::Builder OrphanBuilder::asData() {
3508 3509
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3510
  // Never relocates.
3511
  return WireHelpers::getWritableDataPointer(
3512
      tagAsPtr(), location, segment, capTable, nullptr, ZERO * BYTES);
3513 3514
}

3515
StructReader OrphanBuilder::asStructReader(StructSize size) const {
3516
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3517
  return WireHelpers::readStructPointer(
3518
      segment, capTable, tagAsPtr(), location, nullptr, kj::maxValue);
3519 3520
}

3521
ListReader OrphanBuilder::asListReader(ElementSize elementSize) const {
3522
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3523
  return WireHelpers::readListPointer(
3524
      segment, capTable, tagAsPtr(), location, nullptr, elementSize, kj::maxValue);
3525 3526
}

3527 3528 3529 3530 3531 3532 3533
ListReader OrphanBuilder::asListReaderAnySize() const {
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
  return WireHelpers::readListPointer(
      segment, capTable, tagAsPtr(), location, nullptr, ElementSize::VOID /* dummy */,
      kj::maxValue);
}

3534
#if !CAPNP_LITE
3535
kj::Own<ClientHook> OrphanBuilder::asCapability() const {
3536
  return WireHelpers::readCapabilityPointer(segment, capTable, tagAsPtr(), kj::maxValue);
3537
}
3538
#endif  // !CAPNP_LITE
3539

3540
Text::Reader OrphanBuilder::asTextReader() const {
3541
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3542
  return WireHelpers::readTextPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
3543 3544 3545
}

Data::Reader OrphanBuilder::asDataReader() const {
3546
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3547
  return WireHelpers::readDataPointer(segment, tagAsPtr(), location, nullptr, ZERO * BYTES);
3548 3549
}

3550 3551 3552
bool OrphanBuilder::truncate(ElementCount uncheckedSize, bool isText) {
  ListElementCount size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(uncheckedSize,
      []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
3553

3554 3555 3556 3557 3558
  WirePointer* ref = tagAsPtr();
  SegmentBuilder* segment = this->segment;

  word* target = WireHelpers::followFars(ref, location, segment);

3559 3560
  if (ref->isNull()) {
    // We don't know the right element size, so we can't resize this list.
3561
    return size == ZERO * ELEMENTS;
3562 3563
  }

3564
  KJ_REQUIRE(ref->kind() == WirePointer::LIST, "Can't truncate non-list.") {
3565
    return false;
3566 3567
  }

3568 3569 3570 3571 3572
  if (isText) {
    // Add space for the NUL terminator.
    size = assertMaxBits<LIST_ELEMENT_COUNT_BITS>(size + ONE * ELEMENTS,
        []() { KJ_FAIL_REQUIRE("requested list size is too large"); });
  }
3573

3574
  auto elementSize = ref->listRef.elementSize();
3575

3576
  if (elementSize == ElementSize::INLINE_COMPOSITE) {
3577
    auto oldWordCount = ref->listRef.inlineCompositeWordCount();
3578

3579 3580 3581 3582
    WirePointer* tag = reinterpret_cast<WirePointer*>(target);
    ++target;
    KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
               "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
3583
      return false;
3584 3585
    }
    StructSize structSize(tag->structRef.dataSize.get(), tag->structRef.ptrCount.get());
3586 3587 3588
    auto elementStep = structSize.total() / ELEMENTS;

    auto oldSize = tag->inlineCompositeListElementCount();
3589

3590
    SegmentWordCount sizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
3591
        upgradeBound<uint64_t>(size) * elementStep,
3592 3593
        []() { KJ_FAIL_ASSERT("requested list size too large to fit in message segment"); });
    SegmentWordCount oldSizeWords = assertMaxBits<SEGMENT_WORD_COUNT_BITS>(
3594
        upgradeBound<uint64_t>(oldSize) * elementStep,
3595 3596 3597
        []() { KJ_FAIL_ASSERT("prior to truncate, list is larger than max segment size?"); });

    word* newEndWord = target + sizeWords;
3598
    word* oldEndWord = target + oldWordCount;
3599

3600 3601
    if (size <= oldSize) {
      // Zero the trailing elements.
3602 3603 3604 3605
      for (auto i: kj::range(size, oldSize)) {
        // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
        // above.
        WireHelpers::zeroObject(segment, capTable, tag, target +
3606
            assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep));
3607
      }
3608
      ref->listRef.setInlineComposite(sizeWords);
3609 3610 3611
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else if (newEndWord <= oldEndWord) {
3612
      // Apparently the old list was over-allocated? The word count is more than needed to store
3613 3614
      // the elements. This is "valid" but shouldn't happen in practice unless someone is toying
      // with us.
3615
      word* expectedEnd = target + oldSizeWords;
3616
      KJ_ASSERT(newEndWord >= expectedEnd);
3617 3618
      WireHelpers::zeroMemory(expectedEnd,
          intervalLength(expectedEnd, newEndWord, MAX_SEGMENT_WORDS));
3619 3620 3621 3622
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
3623
        ref->listRef.setInlineComposite(sizeWords);
3624 3625 3626
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      } else {
        // Need to re-allocate and transfer.
3627
        OrphanBuilder replacement = initStructList(segment->getArena(), capTable, size, structSize);
3628 3629

        ListBuilder newList = replacement.asStructList(structSize);
3630 3631 3632 3633
        for (auto i: kj::zeroTo(oldSize)) {
          // assumeBits() safe because we checked that both sizeWords and oldSizeWords are in-range
          // above.
          word* element = target +
3634
              assumeBits<SEGMENT_WORD_COUNT_BITS>(upgradeBound<uint64_t>(i) * elementStep);
3635
          newList.getStructElement(i).transferContentFrom(
3636
              StructBuilder(segment, capTable, element,
3637 3638 3639
                            reinterpret_cast<WirePointer*>(element + structSize.data),
                            structSize.data * BITS_PER_WORD, structSize.pointers));
        }
3640

3641 3642 3643 3644
        *this = kj::mv(replacement);
      }
    }
  } else if (elementSize == ElementSize::POINTER) {
3645 3646 3647 3648
    // TODO(cleanup): GCC won't let me declare this constexpr, claiming POINTERS is not constexpr,
    //   but it is?
    const auto POINTERS_PER_ELEMENT = ONE * POINTERS / ELEMENTS;

3649
    auto oldSize = ref->listRef.elementCount();
3650 3651
    word* newEndWord = target + size * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
    word* oldEndWord = target + oldSize * POINTERS_PER_ELEMENT * WORDS_PER_POINTER;
3652

3653 3654 3655 3656 3657 3658 3659 3660 3661 3662 3663 3664 3665 3666
    if (size <= oldSize) {
      // Zero the trailing elements.
      for (WirePointer* element = reinterpret_cast<WirePointer*>(newEndWord);
           element < reinterpret_cast<WirePointer*>(oldEndWord); ++element) {
        WireHelpers::zeroPointerAndFars(segment, element);
      }
      ref->listRef.set(ElementSize::POINTER, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
        ref->listRef.set(ElementSize::POINTER, size);
      } else {
        // Need to re-allocate and transfer.
3667 3668
        OrphanBuilder replacement = initList(
            segment->getArena(), capTable, size, ElementSize::POINTER);
3669 3670
        ListBuilder newList = replacement.asList(ElementSize::POINTER);
        WirePointer* oldPointers = reinterpret_cast<WirePointer*>(target);
3671 3672 3673
        for (auto i: kj::zeroTo(oldSize)) {
          newList.getPointerElement(i).transferFrom(
              PointerBuilder(segment, capTable, oldPointers + i * POINTERS_PER_ELEMENT));
3674 3675 3676 3677 3678 3679 3680
        }
        *this = kj::mv(replacement);
      }
    }
  } else {
    auto oldSize = ref->listRef.elementCount();
    auto step = dataBitsPerElement(elementSize);
3681 3682
    const auto MAX_STEP_BYTES = ONE * WORDS / ELEMENTS * BYTES_PER_WORD;
    word* newEndWord = target + WireHelpers::roundBitsUpToWords(
3683
        upgradeBound<uint64_t>(size) * step);
3684
    word* oldEndWord = target + WireHelpers::roundBitsUpToWords(
3685
        upgradeBound<uint64_t>(oldSize) * step);
3686 3687 3688 3689 3690

    if (size <= oldSize) {
      // When truncating text, we want to set the null terminator as well, so we'll do our zeroing
      // at the byte level.
      byte* begin = reinterpret_cast<byte*>(target);
3691
      byte* newEndByte = begin + WireHelpers::roundBitsUpToBytes(
3692
          upgradeBound<uint64_t>(size) * step) - isText;
3693 3694
      byte* oldEndByte = reinterpret_cast<byte*>(oldEndWord);

3695 3696
      WireHelpers::zeroMemory(newEndByte,
          intervalLength(newEndByte, oldEndByte, MAX_LIST_ELEMENTS * MAX_STEP_BYTES));
3697 3698 3699 3700 3701 3702 3703 3704 3705
      ref->listRef.set(elementSize, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      // We're trying to extend, not truncate.
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the memory is already zero'd.
        ref->listRef.set(elementSize, size);
      } else {
        // Need to re-allocate and transfer.
3706
        OrphanBuilder replacement = initList(segment->getArena(), capTable, size, elementSize);
3707
        ListBuilder newList = replacement.asList(elementSize);
3708
        auto words = WireHelpers::roundBitsUpToWords(
3709
            dataBitsPerElement(elementSize) * upgradeBound<uint64_t>(oldSize));
3710
        WireHelpers::copyMemory(reinterpret_cast<word*>(newList.ptr), target, words);
3711 3712 3713 3714
        *this = kj::mv(replacement);
      }
    }
  }
3715 3716 3717 3718 3719 3720

  return true;
}

void OrphanBuilder::truncate(ElementCount size, ElementSize elementSize) {
  if (!truncate(size, false)) {
3721 3722 3723
    // assumeBits() safe since it's checked inside truncate()
    *this = initList(segment->getArena(), capTable,
        assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
3724 3725 3726 3727 3728
  }
}

void OrphanBuilder::truncate(ElementCount size, StructSize elementSize) {
  if (!truncate(size, false)) {
3729 3730 3731
    // assumeBits() safe since it's checked inside truncate()
    *this = initStructList(segment->getArena(), capTable,
        assumeBits<LIST_ELEMENT_COUNT_BITS>(size), elementSize);
3732 3733 3734 3735 3736
  }
}

void OrphanBuilder::truncateText(ElementCount size) {
  if (!truncate(size, true)) {
3737 3738 3739
    // assumeBits() safe since it's checked inside truncate()
    *this = initText(segment->getArena(), capTable,
        assumeBits<LIST_ELEMENT_COUNT_BITS>(size) * (ONE * BYTES / ELEMENTS));
3740
  }
3741 3742
}

3743
void OrphanBuilder::euthanize() {
3744 3745 3746
  // Carefully catch any exceptions and rethrow them as recoverable exceptions since we may be in
  // a destructor.
  auto exception = kj::runCatchingExceptions([&]() {
3747
    if (tagAsPtr()->isPositional()) {
3748
      WireHelpers::zeroObject(segment, capTable, tagAsPtr(), location);
3749
    } else {
3750
      WireHelpers::zeroObject(segment, capTable, tagAsPtr());
3751
    }
3752

3753
    WireHelpers::zeroMemory(&tag, ONE * WORDS);
3754 3755 3756 3757 3758 3759 3760
    segment = nullptr;
    location = nullptr;
  });

  KJ_IF_MAYBE(e, exception) {
    kj::getExceptionCallback().onRecoverableException(kj::mv(*e));
  }
3761 3762
}

3763
}  // namespace _ (private)
3764
}  // namespace capnp