layout.c++ 126 KB
Newer Older
Kenton Varda's avatar
Kenton Varda committed
1 2
// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
3
//
Kenton Varda's avatar
Kenton Varda committed
4 5 6 7 8 9
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
10
//
Kenton Varda's avatar
Kenton Varda committed
11 12
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
13
//
Kenton Varda's avatar
Kenton Varda committed
14 15 16 17 18 19 20
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
21

Kenton Varda's avatar
Kenton Varda committed
22
#define CAPNP_PRIVATE
23
#include "layout.h"
Kenton Varda's avatar
Kenton Varda committed
24
#include <kj/debug.h>
25
#include "arena.h"
26
#include <string.h>
27
#include <stdlib.h>
28

29 30 31 32
#if !CAPNP_LITE
#include "capability.h"
#endif  // !CAPNP_LITE

33
namespace capnp {
34
namespace _ {  // private
Kenton Varda's avatar
Kenton Varda committed
35

36
#if !CAPNP_LITE
37 38 39 40 41
static BrokenCapFactory* brokenCapFactory = nullptr;
// Horrible hack:  We need to be able to construct broken caps without any capability context,
// but we can't have a link-time dependency on libcapnp-rpc.

void setGlobalBrokenCapFactoryForLayoutCpp(BrokenCapFactory& factory) {
42 43
  // Called from capability.c++ when the capability API is used, to make sure that layout.c++
  // is ready for it.  May be called multiple times but always with the same value.
44 45
  __atomic_store_n(&brokenCapFactory, &factory, __ATOMIC_RELAXED);
}
Kenton Varda's avatar
Kenton Varda committed
46 47 48 49 50 51 52 53

}  // namespace _ (private)

const uint ClientHook::NULL_CAPABILITY_BRAND = 0;
// Defined here rather than capability.c++ so that we can safely call isNull() in this file.

namespace _ {  // private

54
#endif  // !CAPNP_LITE
55

56 57
// =======================================================================================

58 59
struct WirePointer {
  // A pointer, in exactly the format in which it appears on the wire.
60 61

  // Copying and moving is not allowed because the offset would become wrong.
62 63 64 65
  WirePointer(const WirePointer& other) = delete;
  WirePointer(WirePointer&& other) = delete;
  WirePointer& operator=(const WirePointer& other) = delete;
  WirePointer& operator=(WirePointer&& other) = delete;
66

67
  // -----------------------------------------------------------------
68
  // Common part of all pointers:  kind + offset
69 70 71
  //
  // Actually this is not terribly common.  The "offset" could actually be different things
  // depending on the context:
72 73
  // - For a regular (e.g. struct/list) pointer, a signed word offset from the word immediately
  //   following the pointer pointer.  (The off-by-one means the offset is more often zero, saving
74
  //   bytes on the wire when packed.)
75
  // - For an inline composite list tag (not really a pointer, but structured similarly), an
76
  //   element count.
77
  // - For a FAR pointer, an unsigned offset into the target segment.
78
  // - For a FAR landing pad, zero indicates that the target value immediately follows the pad while
79
  //   1 indicates that the pad is followed by another FAR pointer that actually points at the
80 81 82
  //   value.

  enum Kind {
83
    STRUCT = 0,
84 85
    // Reference points at / describes a struct.

86
    LIST = 1,
87 88 89 90 91 92
    // Reference points at / describes a list.

    FAR = 2,
    // Reference is a "far pointer", which points at data located in a different segment.  The
    // eventual target is one of the other kinds.

93 94 95
    OTHER = 3
    // Reference has type "other".  If the next 30 bits are all zero (i.e. the lower 32 bits contain
    // only the kind OTHER) then the pointer is a capability.  All other values are reserved.
96 97
  };

98 99
  WireValue<uint32_t> offsetAndKind;

100
  KJ_ALWAYS_INLINE(Kind kind() const) {
101 102
    return static_cast<Kind>(offsetAndKind.get() & 3);
  }
103 104 105 106 107 108
  KJ_ALWAYS_INLINE(bool isPositional() const) {
    return (offsetAndKind.get() & 2) == 0;  // match STRUCT and LIST but not FAR or OTHER
  }
  KJ_ALWAYS_INLINE(bool isCapability() const) {
    return offsetAndKind.get() == OTHER;
  }
109

110
  KJ_ALWAYS_INLINE(word* target()) {
111
    return reinterpret_cast<word*>(this) + 1 + (static_cast<int32_t>(offsetAndKind.get()) >> 2);
112
  }
113
  KJ_ALWAYS_INLINE(const word* target() const) {
114 115
    return reinterpret_cast<const word*>(this) + 1 +
        (static_cast<int32_t>(offsetAndKind.get()) >> 2);
116
  }
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
  KJ_ALWAYS_INLINE(void setKindAndTarget(Kind kind, word* target, SegmentBuilder* segment)) {
    // Check that the target is really in the same segment, otherwise subtracting pointers is
    // undefined behavior.  As it turns out, it's undefined behavior that actually produces
    // unexpected results in a real-world situation that actually happened:  At one time,
    // OrphanBuilder's "tag" (a WirePointer) was allowed to be initialized as if it lived in
    // a particular segment when in fact it does not.  On 32-bit systems, where words might
    // only be 32-bit aligned, it's possible that the difference between `this` and `target` is
    // not a whole number of words.  But clang optimizes:
    //     (target - (word*)this - 1) << 2
    // to:
    //     (((ptrdiff_t)target - (ptrdiff_t)this - 8) >> 1)
    // So now when the pointers are not aligned the same, we can end up corrupting the bottom
    // two bits, where `kind` is stored.  For example, this turns a struct into a far pointer.
    // Ouch!
    KJ_DREQUIRE(segment->containsInterval(
        reinterpret_cast<word*>(this), reinterpret_cast<word*>(this + 1)));
    KJ_DREQUIRE(segment->containsInterval(target, target));
134
    offsetAndKind.set(((target - reinterpret_cast<word*>(this) - 1) << 2) | kind);
135
  }
136
  KJ_ALWAYS_INLINE(void setKindWithZeroOffset(Kind kind)) {
137 138
    offsetAndKind.set(kind);
  }
139 140 141 142 143 144 145 146 147 148
  KJ_ALWAYS_INLINE(void setKindAndTargetForEmptyStruct()) {
    // This pointer points at an empty struct.  Assuming the WirePointer itself is in-bounds, we
    // can set the target to point either at the WirePointer itself or immediately after it.  The
    // latter would cause the WirePointer to be "null" (since for an empty struct the upper 32
    // bits are going to be zero).  So we set an offset of -1, as if the struct were allocated
    // immediately before this pointer, to distinguish it from null.
    offsetAndKind.set(0xfffffffc);
  }
  KJ_ALWAYS_INLINE(void setKindForOrphan(Kind kind)) {
    // OrphanBuilder contains a WirePointer, but since it isn't located in a segment, it should
149 150 151 152
    // not have a valid offset (unless it is a FAR or OTHER pointer).  We set its offset to -1
    // because setting it to zero would mean a pointer to an empty struct would appear to be a null
    // pointer.
    KJ_DREQUIRE(isPositional());
153 154
    offsetAndKind.set(kind | 0xfffffffc);
  }
155

156
  KJ_ALWAYS_INLINE(ElementCount inlineCompositeListElementCount() const) {
157 158
    return (offsetAndKind.get() >> 2) * ELEMENTS;
  }
159
  KJ_ALWAYS_INLINE(void setKindAndInlineCompositeListElementCount(
160 161 162 163
      Kind kind, ElementCount elementCount)) {
    offsetAndKind.set(((elementCount / ELEMENTS) << 2) | kind);
  }

164
  KJ_ALWAYS_INLINE(WordCount farPositionInSegment() const) {
165
    KJ_DREQUIRE(kind() == FAR,
166
        "positionInSegment() should only be called on FAR pointers.");
167
    return (offsetAndKind.get() >> 3) * WORDS;
168
  }
169
  KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
170
    KJ_DREQUIRE(kind() == FAR,
171
        "isDoubleFar() should only be called on FAR pointers.");
172
    return (offsetAndKind.get() >> 2) & 1;
173
  }
174
  KJ_ALWAYS_INLINE(void setFar(bool isDoubleFar, WordCount pos)) {
175 176
    offsetAndKind.set(((pos / WORDS) << 3) | (static_cast<uint32_t>(isDoubleFar) << 2) |
                      static_cast<uint32_t>(Kind::FAR));
177
  }
178 179 180 181
  KJ_ALWAYS_INLINE(void setCap(uint index)) {
    offsetAndKind.set(static_cast<uint32_t>(Kind::OTHER));
    capRef.index.set(index);
  }
182 183

  // -----------------------------------------------------------------
184
  // Part of pointer that depends on the kind.
185

186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
  // Note:  Originally StructRef, ListRef, and FarRef were unnamed types, but this somehow
  //   tickled a bug in GCC:
  //     http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58192
  struct StructRef {
    WireValue<WordCount16> dataSize;
    WireValue<WirePointerCount16> ptrCount;

    inline WordCount wordSize() const {
      return dataSize.get() + ptrCount.get() * WORDS_PER_POINTER;
    }

    KJ_ALWAYS_INLINE(void set(WordCount ds, WirePointerCount rc)) {
      dataSize.set(ds);
      ptrCount.set(rc);
    }
    KJ_ALWAYS_INLINE(void set(StructSize size)) {
      dataSize.set(size.data);
      ptrCount.set(size.pointers);
    }
  };

  struct ListRef {
    WireValue<uint32_t> elementSizeAndCount;

210 211
    KJ_ALWAYS_INLINE(ElementSize elementSize() const) {
      return static_cast<ElementSize>(elementSizeAndCount.get() & 7);
212 213 214 215 216 217 218 219
    }
    KJ_ALWAYS_INLINE(ElementCount elementCount() const) {
      return (elementSizeAndCount.get() >> 3) * ELEMENTS;
    }
    KJ_ALWAYS_INLINE(WordCount inlineCompositeWordCount() const) {
      return elementCount() * (1 * WORDS / ELEMENTS);
    }

220
    KJ_ALWAYS_INLINE(void set(ElementSize es, ElementCount ec)) {
221 222 223 224 225 226 227
      KJ_DREQUIRE(ec < (1 << 29) * ELEMENTS, "Lists are limited to 2**29 elements.");
      elementSizeAndCount.set(((ec / ELEMENTS) << 3) | static_cast<int>(es));
    }

    KJ_ALWAYS_INLINE(void setInlineComposite(WordCount wc)) {
      KJ_DREQUIRE(wc < (1 << 29) * WORDS, "Inline composite lists are limited to 2**29 words.");
      elementSizeAndCount.set(((wc / WORDS) << 3) |
228
                              static_cast<int>(ElementSize::INLINE_COMPOSITE));
229 230 231 232 233 234 235 236 237 238 239
    }
  };

  struct FarRef {
    WireValue<SegmentId> segmentId;

    KJ_ALWAYS_INLINE(void set(SegmentId si)) {
      segmentId.set(si);
    }
  };

240 241 242 243 244
  struct CapRef {
    WireValue<uint32_t> index;
    // Index into the message's capability table.
  };

245
  union {
246 247
    uint32_t upper32Bits;

248
    StructRef structRef;
249

250 251 252
    ListRef listRef;

    FarRef farRef;
253 254

    CapRef capRef;
255
  };
256

257
  KJ_ALWAYS_INLINE(bool isNull() const) {
258 259 260 261
    // If the upper 32 bits are zero, this is a pointer to an empty struct.  We consider that to be
    // our "null" value.
    return (offsetAndKind.get() == 0) & (upper32Bits == 0);
  }
262

263
};
264
static_assert(sizeof(WirePointer) == sizeof(word),
265
    "capnp::WirePointer is not exactly one word.  This will probably break everything.");
266 267 268 269 270 271
static_assert(POINTERS * WORDS_PER_POINTER * BYTES_PER_WORD / BYTES == sizeof(WirePointer),
    "WORDS_PER_POINTER is wrong.");
static_assert(POINTERS * BYTES_PER_POINTER / BYTES == sizeof(WirePointer),
    "BYTES_PER_POINTER is wrong.");
static_assert(POINTERS * BITS_PER_POINTER / BITS_PER_BYTE / BYTES == sizeof(WirePointer),
    "BITS_PER_POINTER is wrong.");
272

273 274 275 276 277 278 279 280 281
namespace {

static const union {
  AlignedData<POINTER_SIZE_IN_WORDS / WORDS> word;
  WirePointer pointer;
} zero = {{{0}}};

}  // namespace

282 283
// =======================================================================================

284 285 286 287 288 289 290 291 292 293
namespace {

template <typename T>
struct SegmentAnd {
  SegmentBuilder* segment;
  T value;
};

}  // namespace

294
struct WireHelpers {
295
  static KJ_ALWAYS_INLINE(WordCount roundBytesUpToWords(ByteCount bytes)) {
Kenton Varda's avatar
Kenton Varda committed
296
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
297 298 299
    return (bytes + 7 * BYTES) / BYTES_PER_WORD;
  }

300
  static KJ_ALWAYS_INLINE(ByteCount roundBitsUpToBytes(BitCount bits)) {
301
    return (bits + 7 * BITS) / BITS_PER_BYTE;
Kenton Varda's avatar
Kenton Varda committed
302 303
  }

304
  static KJ_ALWAYS_INLINE(WordCount64 roundBitsUpToWords(BitCount64 bits)) {
305 306 307 308
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    return (bits + 63 * BITS) / BITS_PER_WORD;
  }

309
  static KJ_ALWAYS_INLINE(ByteCount64 roundBitsUpToBytes(BitCount64 bits)) {
310 311 312
    return (bits + 7 * BITS) / BITS_PER_BYTE;
  }

313
  static KJ_ALWAYS_INLINE(bool boundsCheck(
314
      SegmentReader* segment, const word* start, const word* end)) {
315
    // If segment is null, this is an unchecked message, so we don't do bounds checks.
316 317 318
    return segment == nullptr || segment->containsInterval(start, end);
  }

319 320 321 322 323
  static KJ_ALWAYS_INLINE(bool amplifiedRead(SegmentReader* segment, WordCount virtualAmount)) {
    // If segment is null, this is an unchecked message, so we don't do read limiter checks.
    return segment == nullptr || segment->amplifiedRead(virtualAmount);
  }

324
  static KJ_ALWAYS_INLINE(word* allocate(
325
      WirePointer*& ref, SegmentBuilder*& segment, CapTableBuilder* capTable, WordCount amount,
326
      WirePointer::Kind kind, BuilderArena* orphanArena)) {
David Renshaw's avatar
David Renshaw committed
327
    // Allocate space in the message for a new object, creating far pointers if necessary.
328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
    //
    // * `ref` starts out being a reference to the pointer which shall be assigned to point at the
    //   new object.  On return, `ref` points to a pointer which needs to be initialized with
    //   the object's type information.  Normally this is the same pointer, but it can change if
    //   a far pointer was allocated -- in this case, `ref` will end up pointing to the far
    //   pointer's tag.  Either way, `allocate()` takes care of making sure that the original
    //   pointer ends up leading to the new object.  On return, only the upper 32 bit of `*ref`
    //   need to be filled in by the caller.
    // * `segment` starts out pointing to the segment containing `ref`.  On return, it points to
    //   the segment containing the allocated object, which is usually the same segment but could
    //   be a different one if the original segment was out of space.
    // * `amount` is the number of words to allocate.
    // * `kind` is the kind of object to allocate.  It is used to initialize the pointer.  It
    //   cannot be `FAR` -- far pointers are allocated automatically as needed.
    // * `orphanArena` is usually null.  If it is non-null, then we're allocating an orphan object.
    //   In this case, `segment` starts out null; the allocation takes place in an arbitrary
    //   segment belonging to the arena.  `ref` will be initialized as a non-far pointer, but its
    //   target offset will be set to zero.

    if (orphanArena == nullptr) {
348
      if (!ref->isNull()) zeroObject(segment, capTable, ref);
349

350 351 352 353 354 355 356
      if (amount == 0 * WORDS && kind == WirePointer::STRUCT) {
        // Note that the check for kind == WirePointer::STRUCT will hopefully cause this whole
        // branch to be optimized away from all the call sites that are allocating non-structs.
        ref->setKindAndTargetForEmptyStruct();
        return reinterpret_cast<word*>(ref);
      }

357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373
      word* ptr = segment->allocate(amount);

      if (ptr == nullptr) {
        // Need to allocate in a new segment.  We'll need to allocate an extra pointer worth of
        // space to act as the landing pad for a far pointer.

        WordCount amountPlusRef = amount + POINTER_SIZE_IN_WORDS;
        auto allocation = segment->getArena()->allocate(amountPlusRef);
        segment = allocation.segment;
        ptr = allocation.words;

        // Set up the original pointer to be a far pointer to the new segment.
        ref->setFar(false, segment->getOffsetTo(ptr));
        ref->farRef.set(segment->getSegmentId());

        // Initialize the landing pad to indicate that the data immediately follows the pad.
        ref = reinterpret_cast<WirePointer*>(ptr);
374
        ref->setKindAndTarget(kind, ptr + POINTER_SIZE_IN_WORDS, segment);
375 376 377 378

        // Allocated space follows new pointer.
        return ptr + POINTER_SIZE_IN_WORDS;
      } else {
379
        ref->setKindAndTarget(kind, ptr, segment);
380 381
        return ptr;
      }
382
    } else {
383
      // orphanArena is non-null.  Allocate an orphan.
384
      KJ_DASSERT(ref->isNull());
385 386
      auto allocation = orphanArena->allocate(amount);
      segment = allocation.segment;
387
      ref->setKindForOrphan(kind);
388
      return allocation.words;
389 390 391
    }
  }

392
  static KJ_ALWAYS_INLINE(word* followFarsNoWritableCheck(
393 394 395 396 397 398 399 400 401 402
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    // If `ref` is a far pointer, follow it.  On return, `ref` will have been updated to point at
    // a WirePointer that contains the type information about the target object, and a pointer to
    // the object contents is returned.  The caller must NOT use `ref->target()` as this may or may
    // not actually return a valid pointer.  `segment` is also updated to point at the segment which
    // actually contains the object.
    //
    // If `ref` is not a far pointer, this simply returns `refTarget`.  Usually, `refTarget` should
    // be the same as `ref->target()`, but may not be in cases where `ref` is only a tag.

403
    if (ref->kind() == WirePointer::FAR) {
404
      segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
405 406
      WirePointer* pad =
          reinterpret_cast<WirePointer*>(segment->getPtrUnchecked(ref->farPositionInSegment()));
407 408 409
      if (!ref->isDoubleFar()) {
        ref = pad;
        return pad->target();
410
      }
411 412 413 414 415 416 417

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

      segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
      return segment->getPtrUnchecked(pad->farPositionInSegment());
418
    } else {
419
      return refTarget;
420 421 422
    }
  }

423 424 425 426 427 428 429
  static KJ_ALWAYS_INLINE(word* followFars(
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    auto result = followFarsNoWritableCheck(ref, refTarget, segment);
    segment->checkWritable();
    return result;
  }

430 431 432
  static KJ_ALWAYS_INLINE(const word* followFars(
      const WirePointer*& ref, const word* refTarget, SegmentReader*& segment)) {
    // Like the other followFars() but operates on readers.
433

434
    // If the segment is null, this is an unchecked message, so there are no FAR pointers.
435
    if (segment != nullptr && ref->kind() == WirePointer::FAR) {
436
      // Look up the segment containing the landing pad.
437
      segment = segment->getArena()->tryGetSegment(ref->farRef.segmentId.get());
438
      KJ_REQUIRE(segment != nullptr, "Message contains far pointer to unknown segment.") {
439
        return nullptr;
Kenton Varda's avatar
Kenton Varda committed
440
      }
441

442 443
      // Find the landing pad and check that it is within bounds.
      const word* ptr = segment->getStartPtr() + ref->farPositionInSegment();
444
      WordCount padWords = (1 + ref->isDoubleFar()) * POINTER_SIZE_IN_WORDS;
445 446
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr + padWords),
                 "Message contains out-of-bounds far pointer.") {
447
        return nullptr;
448 449
      }

450
      const WirePointer* pad = reinterpret_cast<const WirePointer*>(ptr);
451 452 453 454 455 456 457 458 459 460 461 462

      // If this is not a double-far then the landing pad is our final pointer.
      if (!ref->isDoubleFar()) {
        ref = pad;
        return pad->target();
      }

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

      segment = segment->getArena()->tryGetSegment(pad->farRef.segmentId.get());
463
      KJ_REQUIRE(segment != nullptr, "Message contains double-far pointer to unknown segment.") {
464
        return nullptr;
465
      }
466 467

      return segment->getStartPtr() + pad->farPositionInSegment();
468
    } else {
469
      return refTarget;
470 471 472
    }
  }

473 474
  // -----------------------------------------------------------------

475
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref) {
476 477 478
    // Zero out the pointed-to object.  Use when the pointer is about to be overwritten making the
    // target object no longer reachable.

479 480 481
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

482 483 484
    switch (ref->kind()) {
      case WirePointer::STRUCT:
      case WirePointer::LIST:
485
        zeroObject(segment, capTable, ref, ref->target());
486 487 488
        break;
      case WirePointer::FAR: {
        segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
489 490 491 492 493 494 495
        if (segment->isWritable()) {  // Don't zero external data.
          WirePointer* pad =
              reinterpret_cast<WirePointer*>(segment->getPtrUnchecked(ref->farPositionInSegment()));

          if (ref->isDoubleFar()) {
            segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
            if (segment->isWritable()) {
496 497
              zeroObject(segment, capTable,
                         pad + 1, segment->getPtrUnchecked(pad->farPositionInSegment()));
498 499 500
            }
            memset(pad, 0, sizeof(WirePointer) * 2);
          } else {
501
            zeroObject(segment, capTable, pad);
502 503
            memset(pad, 0, sizeof(WirePointer));
          }
504 505 506
        }
        break;
      }
507 508
      case WirePointer::OTHER:
        if (ref->isCapability()) {
509 510 511
#if CAPNP_LITE
          KJ_FAIL_ASSERT("Capability encountered in builder in lite mode?") { break; }
#else  // CAPNP_LINE
512
          capTable->dropCap(ref->capRef.index.get());
513
#endif  // CAPNP_LITE, else
514 515 516 517
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
        break;
518 519 520
    }
  }

521 522
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable,
                         WirePointer* tag, word* ptr) {
523 524 525
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

526 527 528 529 530 531
    switch (tag->kind()) {
      case WirePointer::STRUCT: {
        WirePointer* pointerSection =
            reinterpret_cast<WirePointer*>(ptr + tag->structRef.dataSize.get());
        uint count = tag->structRef.ptrCount.get() / POINTERS;
        for (uint i = 0; i < count; i++) {
532
          zeroObject(segment, capTable, pointerSection + i);
533 534 535 536 537 538
        }
        memset(ptr, 0, tag->structRef.wordSize() * BYTES_PER_WORD / BYTES);
        break;
      }
      case WirePointer::LIST: {
        switch (tag->listRef.elementSize()) {
539
          case ElementSize::VOID:
540 541
            // Nothing.
            break;
542 543 544 545 546
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES:
547
            memset(ptr, 0,
548 549
                roundBitsUpToWords(ElementCount64(tag->listRef.elementCount()) *
                                   dataBitsPerElement(tag->listRef.elementSize()))
550 551
                    * BYTES_PER_WORD / BYTES);
            break;
552
          case ElementSize::POINTER: {
553 554
            uint count = tag->listRef.elementCount() / ELEMENTS;
            for (uint i = 0; i < count; i++) {
555
              zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i);
556
            }
557
            memset(ptr, 0, POINTER_SIZE_IN_WORDS * count * BYTES_PER_WORD / BYTES);
558 559
            break;
          }
560
          case ElementSize::INLINE_COMPOSITE: {
561 562
            WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);

563
            KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
564 565 566 567 568
                  "Don't know how to handle non-STRUCT inline composite.");
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

            uint count = elementTag->inlineCompositeListElementCount() / ELEMENTS;
569 570 571 572 573 574
            if (pointerCount > 0 * POINTERS) {
              word* pos = ptr + POINTER_SIZE_IN_WORDS;
              for (uint i = 0; i < count; i++) {
                pos += dataSize;

                for (uint j = 0; j < pointerCount / POINTERS; j++) {
575
                  zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(pos));
576 577
                  pos += POINTER_SIZE_IN_WORDS;
                }
578 579 580
              }
            }

581
            memset(ptr, 0, (elementTag->structRef.wordSize() * count + POINTER_SIZE_IN_WORDS)
582 583 584 585 586 587 588
                           * BYTES_PER_WORD / BYTES);
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
589 590 591
        KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
          break;
        }
592
        break;
593 594 595 596 597
      case WirePointer::OTHER:
        KJ_FAIL_ASSERT("Unexpected OTHER pointer.") {
          break;
        }
        break;
598 599 600
    }
  }

601
  static KJ_ALWAYS_INLINE(
602 603 604 605 606
      void zeroPointerAndFars(SegmentBuilder* segment, WirePointer* ref)) {
    // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, but
    // do not zero the object body.  Used when upgrading.

    if (ref->kind() == WirePointer::FAR) {
607 608 609 610 611
      SegmentBuilder* padSegment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
      if (padSegment->isWritable()) {  // Don't zero external data.
        word* pad = padSegment->getPtrUnchecked(ref->farPositionInSegment());
        memset(pad, 0, sizeof(WirePointer) * (1 + ref->isDoubleFar()));
      }
612 613 614 615
    }
    memset(ref, 0, sizeof(*ref));
  }

616 617 618

  // -----------------------------------------------------------------

619 620
  static MessageSizeCounts totalSize(
      SegmentReader* segment, const WirePointer* ref, int nestingLimit) {
621 622
    // Compute the total size of the object pointed to, not counting far pointer overhead.

623 624
    MessageSizeCounts result = { 0 * WORDS, 0 };

625
    if (ref->isNull()) {
626
      return result;
627 628
    }

629
    KJ_REQUIRE(nestingLimit > 0, "Message is too deeply-nested.") {
630
      return result;
631 632 633
    }
    --nestingLimit;

634
    const word* ptr = followFars(ref, ref->target(), segment);
635 636

    switch (ref->kind()) {
637
      case WirePointer::STRUCT: {
638 639 640
        KJ_REQUIRE(boundsCheck(segment, ptr, ptr + ref->structRef.wordSize()),
                   "Message contained out-of-bounds struct pointer.") {
          return result;
641
        }
642
        result.wordCount += ref->structRef.wordSize();
643 644 645 646 647 648 649 650 651 652 653

        const WirePointer* pointerSection =
            reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get());
        uint count = ref->structRef.ptrCount.get() / POINTERS;
        for (uint i = 0; i < count; i++) {
          result += totalSize(segment, pointerSection + i, nestingLimit);
        }
        break;
      }
      case WirePointer::LIST: {
        switch (ref->listRef.elementSize()) {
654
          case ElementSize::VOID:
655 656
            // Nothing.
            break;
657 658 659 660 661
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
662
            WordCount64 totalWords = roundBitsUpToWords(
663 664
                ElementCount64(ref->listRef.elementCount()) *
                dataBitsPerElement(ref->listRef.elementSize()));
665 666 667
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + totalWords),
                       "Message contained out-of-bounds list pointer.") {
              return result;
668
            }
669
            result.wordCount += totalWords;
670 671
            break;
          }
672
          case ElementSize::POINTER: {
673 674
            WirePointerCount count = ref->listRef.elementCount() * (POINTERS / ELEMENTS);

675 676 677
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + count * WORDS_PER_POINTER),
                       "Message contained out-of-bounds list pointer.") {
              return result;
678 679
            }

680
            result.wordCount += count * WORDS_PER_POINTER;
681 682 683 684 685 686 687

            for (uint i = 0; i < count / POINTERS; i++) {
              result += totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
                                  nestingLimit);
            }
            break;
          }
688
          case ElementSize::INLINE_COMPOSITE: {
689
            WordCount wordCount = ref->listRef.inlineCompositeWordCount();
690 691 692
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + wordCount + POINTER_SIZE_IN_WORDS),
                       "Message contained out-of-bounds list pointer.") {
              return result;
693 694 695 696 697
            }

            const WirePointer* elementTag = reinterpret_cast<const WirePointer*>(ptr);
            ElementCount count = elementTag->inlineCompositeListElementCount();

698 699 700
            KJ_REQUIRE(elementTag->kind() == WirePointer::STRUCT,
                       "Don't know how to handle non-STRUCT inline composite.") {
              return result;
701
            }
702

703 704
            auto actualSize = elementTag->structRef.wordSize() / ELEMENTS * ElementCount64(count);
            KJ_REQUIRE(actualSize <= wordCount,
705 706
                       "Struct list pointer's elements overran size.") {
              return result;
707 708
            }

709 710 711 712
            // We count the actual size rather than the claimed word count because that's what
            // we'll end up with if we make a copy.
            result.wordCount += actualSize + POINTER_SIZE_IN_WORDS;

713 714 715
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

716 717 718 719
            if (pointerCount > 0 * POINTERS) {
              const word* pos = ptr + POINTER_SIZE_IN_WORDS;
              for (uint i = 0; i < count / ELEMENTS; i++) {
                pos += dataSize;
720

721 722 723 724 725
                for (uint j = 0; j < pointerCount / POINTERS; j++) {
                  result += totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
                                      nestingLimit);
                  pos += POINTER_SIZE_IN_WORDS;
                }
726 727 728 729 730 731 732 733
              }
            }
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
734
        KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
735 736 737
          break;
        }
        break;
738
      case WirePointer::OTHER:
739 740 741 742 743
        if (ref->isCapability()) {
          result.capCount++;
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
744
        break;
745 746 747 748 749
    }

    return result;
  }

750
  // -----------------------------------------------------------------
751
  // Copy from an unchecked message.
752

753
  static KJ_ALWAYS_INLINE(
754 755
      void copyStruct(SegmentBuilder* segment, CapTableBuilder* capTable,
                      word* dst, const word* src,
756
                      WordCount dataSize, WirePointerCount pointerCount)) {
757 758
    memcpy(dst, src, dataSize * BYTES_PER_WORD / BYTES);

759 760
    const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src + dataSize);
    WirePointer* dstRefs = reinterpret_cast<WirePointer*>(dst + dataSize);
761

762
    for (uint i = 0; i < pointerCount / POINTERS; i++) {
763
      SegmentBuilder* subSegment = segment;
764
      WirePointer* dstRef = dstRefs + i;
765
      copyMessage(subSegment, capTable, dstRef, srcRefs + i);
766 767 768
    }
  }

769
  static word* copyMessage(
770 771
      SegmentBuilder*& segment, CapTableBuilder* capTable,
      WirePointer*& dst, const WirePointer* src) {
772 773
    // Not always-inline because it's recursive.

774
    switch (src->kind()) {
775
      case WirePointer::STRUCT: {
776
        if (src->isNull()) {
777
          memset(dst, 0, sizeof(WirePointer));
778
          return nullptr;
779 780
        } else {
          const word* srcPtr = src->target();
781
          word* dstPtr = allocate(
782
              dst, segment, capTable, src->structRef.wordSize(), WirePointer::STRUCT, nullptr);
783

784
          copyStruct(segment, capTable, dstPtr, srcPtr, src->structRef.dataSize.get(),
785
                     src->structRef.ptrCount.get());
786

787
          dst->structRef.set(src->structRef.dataSize.get(), src->structRef.ptrCount.get());
788
          return dstPtr;
789 790
        }
      }
791
      case WirePointer::LIST: {
792
        switch (src->listRef.elementSize()) {
793 794 795 796 797 798
          case ElementSize::VOID:
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
799
            WordCount wordCount = roundBitsUpToWords(
800
                ElementCount64(src->listRef.elementCount()) *
801
                dataBitsPerElement(src->listRef.elementSize()));
802
            const word* srcPtr = src->target();
803
            word* dstPtr = allocate(dst, segment, capTable, wordCount, WirePointer::LIST, nullptr);
804 805
            memcpy(dstPtr, srcPtr, wordCount * BYTES_PER_WORD / BYTES);

806 807
            dst->listRef.set(src->listRef.elementSize(), src->listRef.elementCount());
            return dstPtr;
808 809
          }

810
          case ElementSize::POINTER: {
811 812
            const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src->target());
            WirePointer* dstRefs = reinterpret_cast<WirePointer*>(
813
                allocate(dst, segment, capTable, src->listRef.elementCount() *
814
                    (1 * POINTERS / ELEMENTS) * WORDS_PER_POINTER,
815
                    WirePointer::LIST, nullptr));
816 817 818

            uint n = src->listRef.elementCount() / ELEMENTS;
            for (uint i = 0; i < n; i++) {
819
              SegmentBuilder* subSegment = segment;
820
              WirePointer* dstRef = dstRefs + i;
821
              copyMessage(subSegment, capTable, dstRef, srcRefs + i);
822 823
            }

824
            dst->listRef.set(ElementSize::POINTER, src->listRef.elementCount());
825
            return reinterpret_cast<word*>(dstRefs);
826 827
          }

828
          case ElementSize::INLINE_COMPOSITE: {
829
            const word* srcPtr = src->target();
830
            word* dstPtr = allocate(dst, segment, capTable,
831
                src->listRef.inlineCompositeWordCount() + POINTER_SIZE_IN_WORDS,
832
                WirePointer::LIST, nullptr);
833

834
            dst->listRef.setInlineComposite(src->listRef.inlineCompositeWordCount());
835

836 837
            const WirePointer* srcTag = reinterpret_cast<const WirePointer*>(srcPtr);
            memcpy(dstPtr, srcTag, sizeof(WirePointer));
838

839 840
            const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
            word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
841

842
            KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
843 844
                "INLINE_COMPOSITE of lists is not yet supported.");

845
            uint n = srcTag->inlineCompositeListElementCount() / ELEMENTS;
846
            for (uint i = 0; i < n; i++) {
847
              copyStruct(segment, capTable, dstElement, srcElement,
848
                  srcTag->structRef.dataSize.get(), srcTag->structRef.ptrCount.get());
849 850
              srcElement += srcTag->structRef.wordSize();
              dstElement += srcTag->structRef.wordSize();
851
            }
852
            return dstPtr;
853 854 855 856
          }
        }
        break;
      }
857 858
      case WirePointer::OTHER:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain OTHER pointers (e.g. capabilities).");
859 860 861
        break;
      case WirePointer::FAR:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain far pointers.");
862 863 864
        break;
    }

865
    return nullptr;
866 867
  }

868 869
  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, WirePointer* src) {
870 871
    // Make *dst point to the same object as *src.  Both must reside in the same message, but can
    // be in different segments.  Not always-inline because this is rarely used.
872 873 874 875 876
    //
    // Caller MUST zero out the source pointer after calling this, to make sure no later code
    // mistakenly thinks the source location still owns the object.  transferPointer() doesn't do
    // this zeroing itself because many callers transfer several pointers in a loop then zero out
    // the whole section.
877 878 879 880

    KJ_DASSERT(dst->isNull());
    // We expect the caller to ensure the target is already null so won't leak.

881
    if (src->isNull()) {
882
      memset(dst, 0, sizeof(WirePointer));
883
    } else if (src->isPositional()) {
884
      transferPointer(dstSegment, dst, srcSegment, src, src->target());
885 886 887
    } else {
      // Far and other pointers are position-independent, so we can just copy.
      memcpy(dst, src, sizeof(WirePointer));
888 889 890 891 892 893 894 895 896 897
    }
  }

  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, const WirePointer* srcTag,
                              word* srcPtr) {
    // Like the other overload, but splits src into a tag and a target.  Particularly useful for
    // OrphanBuilder.

    if (dstSegment == srcSegment) {
898
      // Same segment, so create a direct pointer.
899

900
      if (srcTag->kind() == WirePointer::STRUCT && srcTag->structRef.wordSize() == 0 * WORDS) {
901 902 903 904
        dst->setKindAndTargetForEmptyStruct();
      } else {
        dst->setKindAndTarget(srcTag->kind(), srcPtr, dstSegment);
      }
905 906 907

      // We can just copy the upper 32 bits.  (Use memcpy() to comply with aliasing rules.)
      memcpy(&dst->upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
908 909 910 911
    } else {
      // Need to create a far pointer.  Try to allocate it in the same segment as the source, so
      // that it doesn't need to be a double-far.

912 913
      WirePointer* landingPad =
          reinterpret_cast<WirePointer*>(srcSegment->allocate(1 * WORDS));
914 915
      if (landingPad == nullptr) {
        // Darn, need a double-far.
916 917 918
        auto allocation = srcSegment->getArena()->allocate(2 * WORDS);
        SegmentBuilder* farSegment = allocation.segment;
        landingPad = reinterpret_cast<WirePointer*>(allocation.words);
919

920
        landingPad[0].setFar(false, srcSegment->getOffsetTo(srcPtr));
921 922
        landingPad[0].farRef.segmentId.set(srcSegment->getSegmentId());

923 924
        landingPad[1].setKindWithZeroOffset(srcTag->kind());
        memcpy(&landingPad[1].upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
925 926 927 928 929

        dst->setFar(true, farSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(farSegment->getSegmentId());
      } else {
        // Simple landing pad is just a pointer.
930
        landingPad->setKindAndTarget(srcTag->kind(), srcPtr, srcSegment);
931
        memcpy(&landingPad->upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
932 933 934 935 936 937 938

        dst->setFar(false, srcSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(srcSegment->getSegmentId());
      }
    }
  }

939 940
  // -----------------------------------------------------------------

941
  static KJ_ALWAYS_INLINE(StructBuilder initStructPointer(
942
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
943
      BuilderArena* orphanArena = nullptr)) {
944
    // Allocate space for the new struct.  Newly-allocated space is automatically zeroed.
945
    word* ptr = allocate(ref, segment, capTable, size.total(), WirePointer::STRUCT, orphanArena);
946

947
    // Initialize the pointer.
948
    ref->structRef.set(size);
949 950

    // Build the StructBuilder.
951
    return StructBuilder(segment, capTable, ptr, reinterpret_cast<WirePointer*>(ptr + size.data),
952
                         size.data * BITS_PER_WORD, size.pointers);
953
  }
954

955
  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
956 957 958
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
      const word* defaultValue)) {
    return getWritableStructPointer(ref, ref->target(), segment, capTable, size, defaultValue);
959 960 961
  }

  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
962 963
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
      StructSize size, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
964
    if (ref->isNull()) {
965
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
966
      if (defaultValue == nullptr ||
967
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
968
        return initStructPointer(ref, segment, capTable, size, orphanArena);
969
      }
970 971
      refTarget = copyMessage(segment, capTable, ref,
          reinterpret_cast<const WirePointer*>(defaultValue));
972 973
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
974

975 976
    WirePointer* oldRef = ref;
    SegmentBuilder* oldSegment = segment;
977
    word* oldPtr = followFars(oldRef, refTarget, oldSegment);
978

979
    KJ_REQUIRE(oldRef->kind() == WirePointer::STRUCT,
980 981 982
        "Message contains non-struct pointer where struct pointer was expected.") {
      goto useDefault;
    }
983

984 985 986 987
    WordCount oldDataSize = oldRef->structRef.dataSize.get();
    WirePointerCount oldPointerCount = oldRef->structRef.ptrCount.get();
    WirePointer* oldPointerSection =
        reinterpret_cast<WirePointer*>(oldPtr + oldDataSize);
988

989 990 991 992
    if (oldDataSize < size.data || oldPointerCount < size.pointers) {
      // The space allocated for this struct is too small.  Unlike with readers, we can't just
      // run with it and do bounds checks at access time, because how would we handle writes?
      // Instead, we have to copy the struct to a new space now.
993

994 995
      WordCount newDataSize = kj::max(oldDataSize, size.data);
      WirePointerCount newPointerCount = kj::max(oldPointerCount, size.pointers);
996
      WordCount totalSize = newDataSize + newPointerCount * WORDS_PER_POINTER;
997

998 999
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(segment, ref);
1000

1001
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1002
      ref->structRef.set(newDataSize, newPointerCount);
1003

1004 1005
      // Copy data section.
      memcpy(ptr, oldPtr, oldDataSize * BYTES_PER_WORD / BYTES);
1006

1007 1008 1009 1010
      // Copy pointer section.
      WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(ptr + newDataSize);
      for (uint i = 0; i < oldPointerCount / POINTERS; i++) {
        transferPointer(segment, newPointerSection + i, oldSegment, oldPointerSection + i);
1011
      }
1012 1013 1014 1015 1016 1017 1018 1019 1020

      // Zero out old location.  This has two purposes:
      // 1) We don't want to leak the original contents of the struct when the message is written
      //    out as it may contain secrets that the caller intends to remove from the new copy.
      // 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
      //    hits the wire.
      memset(oldPtr, 0,
             (oldDataSize + oldPointerCount * WORDS_PER_POINTER) * BYTES_PER_WORD / BYTES);

1021
      return StructBuilder(segment, capTable, ptr, newPointerSection, newDataSize * BITS_PER_WORD,
1022
                           newPointerCount);
1023
    } else {
1024 1025
      return StructBuilder(oldSegment, capTable, oldPtr, oldPointerSection,
                           oldDataSize * BITS_PER_WORD, oldPointerCount);
1026
    }
1027 1028
  }

1029
  static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
1030 1031
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, ElementSize elementSize, BuilderArena* orphanArena = nullptr)) {
1032
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1033
        "Should have called initStructListPointer() instead.");
1034

1035
    BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
1036 1037
    WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;
    auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1038

1039
    // Calculate size of the list.
1040
    WordCount wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
1041

1042
    // Allocate the list.
1043
    word* ptr = allocate(ref, segment, capTable, wordCount, WirePointer::LIST, orphanArena);
1044

1045
    // Initialize the pointer.
1046
    ref->listRef.set(elementSize, elementCount);
1047

1048
    // Build the ListBuilder.
1049 1050
    return ListBuilder(segment, capTable, ptr, step, elementCount, dataSize,
                       pointerCount, elementSize);
1051
  }
1052

1053
  static KJ_ALWAYS_INLINE(ListBuilder initStructListPointer(
1054 1055
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, StructSize elementSize, BuilderArena* orphanArena = nullptr)) {
1056
    auto wordsPerElement = elementSize.total() / ELEMENTS;
1057

1058
    // Allocate the list, prefixed by a single WirePointer.
1059
    WordCount wordCount = elementCount * wordsPerElement;
1060 1061
    word* ptr = allocate(ref, segment, capTable, POINTER_SIZE_IN_WORDS + wordCount,
                         WirePointer::LIST, orphanArena);
1062

1063
    // Initialize the pointer.
1064
    // INLINE_COMPOSITE lists replace the element count with the word count.
1065
    ref->listRef.setInlineComposite(wordCount);
1066

1067
    // Initialize the list tag.
1068 1069 1070 1071
    reinterpret_cast<WirePointer*>(ptr)->setKindAndInlineCompositeListElementCount(
        WirePointer::STRUCT, elementCount);
    reinterpret_cast<WirePointer*>(ptr)->structRef.set(elementSize);
    ptr += POINTER_SIZE_IN_WORDS;
1072

1073
    // Build the ListBuilder.
1074
    return ListBuilder(segment, capTable, ptr, wordsPerElement * BITS_PER_WORD, elementCount,
1075
                       elementSize.data * BITS_PER_WORD, elementSize.pointers,
1076
                       ElementSize::INLINE_COMPOSITE);
1077 1078
  }

1079
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1080 1081 1082
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      ElementSize elementSize, const word* defaultValue)) {
    return getWritableListPointer(origRef, origRef->target(), origSegment, capTable, elementSize,
1083 1084 1085 1086
                                  defaultValue);
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1087 1088
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable, ElementSize elementSize,
1089
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1090
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1091
             "Use getStructList{Element,Field}() for structs.");
1092

1093 1094
    if (origRef->isNull()) {
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1095
      if (defaultValue == nullptr ||
1096
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1097
        return ListBuilder(elementSize);
1098
      }
1099
      origRefTarget = copyMessage(
1100
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1101 1102
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1103

1104
    // We must verify that the pointer has the right size.  Unlike in
David Renshaw's avatar
David Renshaw committed
1105
    // getWritableStructListPointer(), we never need to "upgrade" the data, because this
1106 1107
    // method is called only for non-struct lists, and there is no allowed upgrade path *to*
    // a non-struct list, only *from* them.
1108

1109 1110
    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
1111
    word* ptr = followFars(ref, origRefTarget, segment);
1112

1113
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1114 1115 1116
        "Called getList{Field,Element}() but existing pointer is not a list.") {
      goto useDefault;
    }
1117

1118
    ElementSize oldSize = ref->listRef.elementSize();
1119

1120
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1121 1122 1123 1124
      // The existing element size is INLINE_COMPOSITE, though we expected a list of primitives.
      // The existing data must have been written with a newer version of the protocol.  We
      // therefore never need to upgrade the data in this case, but we do need to validate that it
      // is a valid upgrade from what we expected.
1125

1126 1127
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1128
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1129 1130
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;
1131

1132 1133
      WordCount dataSize = tag->structRef.dataSize.get();
      WirePointerCount pointerCount = tag->structRef.ptrCount.get();
1134

1135
      switch (elementSize) {
1136
        case ElementSize::VOID:
1137 1138
          // Anything is a valid upgrade from Void.
          break;
1139

1140
        case ElementSize::BIT:
1141 1142 1143 1144 1145 1146 1147
          KJ_FAIL_REQUIRE(
              "Found struct list where bit list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
          break;

1148 1149 1150 1151
        case ElementSize::BYTE:
        case ElementSize::TWO_BYTES:
        case ElementSize::FOUR_BYTES:
        case ElementSize::EIGHT_BYTES:
1152 1153 1154 1155
          KJ_REQUIRE(dataSize >= 1 * WORDS,
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1156
          break;
1157

1158
        case ElementSize::POINTER:
1159 1160 1161 1162
          KJ_REQUIRE(pointerCount >= 1 * POINTERS,
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1163 1164 1165
          // Adjust the pointer to point at the reference segment.
          ptr += dataSize;
          break;
1166

1167
        case ElementSize::INLINE_COMPOSITE:
1168
          KJ_UNREACHABLE;
1169
      }
1170

1171
      // OK, looks valid.
1172

1173
      return ListBuilder(segment, capTable, ptr,
1174 1175
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
1176
                         dataSize * BITS_PER_WORD, pointerCount, ElementSize::INLINE_COMPOSITE);
1177 1178 1179
    } else {
      BitCount dataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      WirePointerCount pointerCount = pointersPerElement(oldSize) * ELEMENTS;
1180

1181 1182
      if (elementSize == ElementSize::BIT) {
        KJ_REQUIRE(oldSize == ElementSize::BIT,
1183 1184 1185 1186
            "Found non-bit list where bit list was expected.") {
          goto useDefault;
        }
      } else {
1187
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198
            "Found bit list where non-bit list was expected.") {
          goto useDefault;
        }
        KJ_REQUIRE(dataSize >= dataBitsPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
        KJ_REQUIRE(pointerCount >= pointersPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
1199
      }
1200

1201
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1202
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1203
                         dataSize, pointerCount, oldSize);
1204
    }
1205 1206
  }

1207
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1208 1209 1210 1211
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      const word* defaultValue)) {
    return getWritableListPointerAnySize(origRef, origRef->target(), origSegment,
                                         capTable, defaultValue);
1212 1213 1214
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1215 1216
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1217 1218 1219 1220 1221
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1222
        return ListBuilder(ElementSize::VOID);
1223 1224
      }
      origRefTarget = copyMessage(
1225
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }

    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
    word* ptr = followFars(ref, origRefTarget, segment);

    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
        "Called getList{Field,Element}() but existing pointer is not a list.") {
      goto useDefault;
    }

1238
    ElementSize elementSize = ref->listRef.elementSize();
1239

1240
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
1241 1242 1243 1244 1245 1246
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;

1247
      return ListBuilder(segment, capTable, ptr,
1248 1249 1250
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1251
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE);
1252 1253 1254 1255 1256
    } else {
      BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
      WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;

      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1257
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1258 1259 1260 1261
                         dataSize, pointerCount, elementSize);
    }
  }

1262
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1263 1264 1265 1266
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      StructSize elementSize, const word* defaultValue)) {
    return getWritableStructListPointer(origRef, origRef->target(), origSegment, capTable,
                                        elementSize, defaultValue);
1267 1268
  }
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1269 1270
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1271
      StructSize elementSize, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1272 1273 1274 1275
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1276
        return ListBuilder(ElementSize::INLINE_COMPOSITE);
1277
      }
1278
      origRefTarget = copyMessage(
1279
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1280 1281
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1282

1283
    // We must verify that the pointer has the right size and potentially upgrade it if not.
1284

1285 1286
    WirePointer* oldRef = origRef;
    SegmentBuilder* oldSegment = origSegment;
1287
    word* oldPtr = followFars(oldRef, origRefTarget, oldSegment);
1288

1289 1290
    KJ_REQUIRE(oldRef->kind() == WirePointer::LIST,
               "Called getList{Field,Element}() but existing pointer is not a list.") {
1291 1292 1293
      goto useDefault;
    }

1294
    ElementSize oldSize = oldRef->listRef.elementSize();
1295

1296
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1297
      // Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
1298

1299 1300
      WirePointer* oldTag = reinterpret_cast<WirePointer*>(oldPtr);
      oldPtr += POINTER_SIZE_IN_WORDS;
1301 1302
      KJ_REQUIRE(oldTag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE list with non-STRUCT elements not supported.") {
1303 1304 1305
        goto useDefault;
      }

1306 1307 1308 1309
      WordCount oldDataSize = oldTag->structRef.dataSize.get();
      WirePointerCount oldPointerCount = oldTag->structRef.ptrCount.get();
      auto oldStep = (oldDataSize + oldPointerCount * WORDS_PER_POINTER) / ELEMENTS;
      ElementCount elementCount = oldTag->inlineCompositeListElementCount();
1310

1311 1312
      if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
        // Old size is at least as large as we need.  Ship it.
1313
        return ListBuilder(oldSegment, capTable, oldPtr, oldStep * BITS_PER_WORD, elementCount,
1314
                           oldDataSize * BITS_PER_WORD, oldPointerCount,
1315
                           ElementSize::INLINE_COMPOSITE);
1316
      }
1317

1318 1319
      // The structs in this list are smaller than expected, probably written using an older
      // version of the protocol.  We need to make a copy and expand them.
1320

1321 1322
      WordCount newDataSize = kj::max(oldDataSize, elementSize.data);
      WirePointerCount newPointerCount = kj::max(oldPointerCount, elementSize.pointers);
1323 1324
      auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
      WordCount totalSize = newStep * elementCount;
1325

1326 1327
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(origSegment, origRef);
1328

1329
      word* newPtr = allocate(origRef, origSegment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1330
                              WirePointer::LIST, orphanArena);
1331
      origRef->listRef.setInlineComposite(totalSize);
1332

1333 1334 1335 1336
      WirePointer* newTag = reinterpret_cast<WirePointer*>(newPtr);
      newTag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
      newTag->structRef.set(newDataSize, newPointerCount);
      newPtr += POINTER_SIZE_IN_WORDS;
1337

1338 1339 1340 1341 1342
      word* src = oldPtr;
      word* dst = newPtr;
      for (uint i = 0; i < elementCount / ELEMENTS; i++) {
        // Copy data section.
        memcpy(dst, src, oldDataSize * BYTES_PER_WORD / BYTES);
1343

1344 1345 1346
        // Copy pointer section.
        WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(dst + newDataSize);
        WirePointer* oldPointerSection = reinterpret_cast<WirePointer*>(src + oldDataSize);
1347 1348
        for (uint j = 0; j < oldPointerCount / POINTERS; j++) {
          transferPointer(origSegment, newPointerSection + j, oldSegment, oldPointerSection + j);
1349 1350
        }

1351 1352 1353
        dst += newStep * (1 * ELEMENTS);
        src += oldStep * (1 * ELEMENTS);
      }
1354

1355 1356
      // Zero out old location.  See explanation in getWritableStructPointer().
      memset(oldPtr, 0, oldStep * elementCount * BYTES_PER_WORD / BYTES);
1357

1358
      return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1359
                         newDataSize * BITS_PER_WORD, newPointerCount, ElementSize::INLINE_COMPOSITE);
1360
    } else {
1361
      // We're upgrading from a non-struct list.
1362

1363 1364 1365 1366
      BitCount oldDataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      WirePointerCount oldPointerCount = pointersPerElement(oldSize) * ELEMENTS;
      auto oldStep = (oldDataSize + oldPointerCount * BITS_PER_POINTER) / ELEMENTS;
      ElementCount elementCount = oldRef->listRef.elementCount();
1367

1368
      if (oldSize == ElementSize::VOID) {
1369
        // Nothing to copy, just allocate a new list.
1370
        return initStructListPointer(origRef, origSegment, capTable, elementCount, elementSize);
1371
      } else {
1372
        // Upgrading to an inline composite list.
1373

1374
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1375 1376 1377 1378 1379
            "Found bit list where struct list was expected; upgrading boolean lists to structs "
            "is no longer supported.") {
          goto useDefault;
        }

1380 1381
        WordCount newDataSize = elementSize.data;
        WirePointerCount newPointerCount = elementSize.pointers;
1382

1383
        if (oldSize == ElementSize::POINTER) {
1384
          newPointerCount = kj::max(newPointerCount, 1 * POINTERS);
1385 1386
        } else {
          // Old list contains data elements, so we need at least 1 word of data.
1387
          newDataSize = kj::max(newDataSize, 1 * WORDS);
1388
        }
1389

1390 1391
        auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
        WordCount totalWords = elementCount * newStep;
1392

1393 1394
        // Don't let allocate() zero out the object just yet.
        zeroPointerAndFars(origSegment, origRef);
1395

1396
        word* newPtr = allocate(origRef, origSegment, capTable, totalWords + POINTER_SIZE_IN_WORDS,
1397
                                WirePointer::LIST, orphanArena);
1398
        origRef->listRef.setInlineComposite(totalWords);
1399

1400 1401 1402 1403
        WirePointer* tag = reinterpret_cast<WirePointer*>(newPtr);
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
        tag->structRef.set(newDataSize, newPointerCount);
        newPtr += POINTER_SIZE_IN_WORDS;
1404

1405
        if (oldSize == ElementSize::POINTER) {
1406 1407 1408 1409 1410 1411
          WirePointer* dst = reinterpret_cast<WirePointer*>(newPtr + newDataSize);
          WirePointer* src = reinterpret_cast<WirePointer*>(oldPtr);
          for (uint i = 0; i < elementCount / ELEMENTS; i++) {
            transferPointer(origSegment, dst, oldSegment, src);
            dst += newStep / WORDS_PER_POINTER * (1 * ELEMENTS);
            ++src;
1412
          }
1413 1414 1415 1416 1417 1418 1419 1420 1421 1422
        } else {
          word* dst = newPtr;
          char* src = reinterpret_cast<char*>(oldPtr);
          ByteCount oldByteStep = oldDataSize / BITS_PER_BYTE;
          for (uint i = 0; i < elementCount / ELEMENTS; i++) {
            memcpy(dst, src, oldByteStep / BYTES);
            src += oldByteStep / BYTES;
            dst += newStep * (1 * ELEMENTS);
          }
        }
1423

1424
        // Zero out old location.  See explanation in getWritableStructPointer().
1425
        memset(oldPtr, 0, roundBitsUpToBytes(oldStep * elementCount) / BYTES);
1426

1427
        return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1428
                           newDataSize * BITS_PER_WORD, newPointerCount,
1429
                           ElementSize::INLINE_COMPOSITE);
1430
      }
1431 1432 1433
    }
  }

1434
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> initTextPointer(
1435
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, ByteCount size,
1436
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1437 1438 1439 1440
    // The byte list must include a NUL terminator.
    ByteCount byteSize = size + 1 * BYTES;

    // Allocate the space.
1441
    word* ptr = allocate(
1442
        ref, segment, capTable, roundBytesUpToWords(byteSize), WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1443

1444
    // Initialize the pointer.
1445
    ref->listRef.set(ElementSize::BYTE, byteSize * (1 * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1446 1447

    // Build the Text::Builder.  This will initialize the NUL terminator.
1448
    return { segment, Text::Builder(reinterpret_cast<char*>(ptr), size / BYTES) };
Kenton Varda's avatar
Kenton Varda committed
1449 1450
  }

1451
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> setTextPointer(
1452
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Text::Reader value,
1453
      BuilderArena* orphanArena = nullptr)) {
1454
    auto allocation = initTextPointer(ref, segment, capTable, value.size() * BYTES, orphanArena);
1455 1456
    memcpy(allocation.value.begin(), value.begin(), value.size());
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1457 1458
  }

1459
  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1460
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
Kenton Varda's avatar
Kenton Varda committed
1461
      const void* defaultValue, ByteCount defaultSize)) {
1462
    return getWritableTextPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1463 1464 1465
  }

  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1466
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1467
      const void* defaultValue, ByteCount defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1468
    if (ref->isNull()) {
1469
    useDefault:
1470 1471 1472
      if (defaultSize == 0 * BYTES) {
        return nullptr;
      } else {
1473
        Text::Builder builder = initTextPointer(ref, segment, capTable, defaultSize).value;
1474 1475 1476
        memcpy(builder.begin(), defaultValue, defaultSize / BYTES);
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1477
    } else {
1478
      word* ptr = followFars(ref, refTarget, segment);
1479
      char* cptr = reinterpret_cast<char*>(ptr);
Kenton Varda's avatar
Kenton Varda committed
1480

1481
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1482
          "Called getText{Field,Element}() but existing pointer is not a list.");
1483
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1484
          "Called getText{Field,Element}() but existing list pointer is not byte-sized.");
Kenton Varda's avatar
Kenton Varda committed
1485

1486 1487 1488 1489 1490 1491
      size_t size = ref->listRef.elementCount() / ELEMENTS;
      KJ_REQUIRE(size > 0 && cptr[size-1] == '\0', "Text blob missing NUL terminator.") {
        goto useDefault;
      }

      return Text::Builder(cptr, size - 1);
Kenton Varda's avatar
Kenton Varda committed
1492 1493 1494
    }
  }

1495
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> initDataPointer(
1496
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, ByteCount size,
1497
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1498
    // Allocate the space.
1499 1500
    word* ptr = allocate(ref, segment, capTable, roundBytesUpToWords(size),
                         WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1501

1502
    // Initialize the pointer.
1503
    ref->listRef.set(ElementSize::BYTE, size * (1 * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1504 1505

    // Build the Data::Builder.
1506
    return { segment, Data::Builder(reinterpret_cast<byte*>(ptr), size / BYTES) };
Kenton Varda's avatar
Kenton Varda committed
1507 1508
  }

1509
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> setDataPointer(
1510
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Data::Reader value,
1511
      BuilderArena* orphanArena = nullptr)) {
1512
    auto allocation = initDataPointer(ref, segment, capTable, value.size() * BYTES, orphanArena);
1513 1514
    memcpy(allocation.value.begin(), value.begin(), value.size());
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1515 1516
  }

1517
  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1518
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
Kenton Varda's avatar
Kenton Varda committed
1519
      const void* defaultValue, ByteCount defaultSize)) {
1520
    return getWritableDataPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1521 1522 1523
  }

  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1524
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1525
      const void* defaultValue, ByteCount defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1526
    if (ref->isNull()) {
1527 1528 1529
      if (defaultSize == 0 * BYTES) {
        return nullptr;
      } else {
1530
        Data::Builder builder = initDataPointer(ref, segment, capTable, defaultSize).value;
1531 1532 1533
        memcpy(builder.begin(), defaultValue, defaultSize / BYTES);
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1534
    } else {
1535
      word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
1536

1537
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1538
          "Called getData{Field,Element}() but existing pointer is not a list.");
1539
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1540
          "Called getData{Field,Element}() but existing list pointer is not byte-sized.");
Kenton Varda's avatar
Kenton Varda committed
1541

1542
      return Data::Builder(reinterpret_cast<byte*>(ptr), ref->listRef.elementCount() / ELEMENTS);
Kenton Varda's avatar
Kenton Varda committed
1543 1544 1545
    }
  }

1546
  static SegmentAnd<word*> setStructPointer(
1547
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, StructReader value,
1548
      BuilderArena* orphanArena = nullptr) {
1549
    WordCount dataSize = roundBitsUpToWords(value.dataSize);
1550 1551
    WordCount totalSize = dataSize + value.pointerCount * WORDS_PER_POINTER;

1552
    word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1553 1554 1555 1556 1557 1558 1559 1560 1561 1562
    ref->structRef.set(dataSize, value.pointerCount);

    if (value.dataSize == 1 * BITS) {
      *reinterpret_cast<char*>(ptr) = value.getDataField<bool>(0 * ELEMENTS);
    } else {
      memcpy(ptr, value.data, value.dataSize / BITS_PER_BYTE / BYTES);
    }

    WirePointer* pointerSection = reinterpret_cast<WirePointer*>(ptr + dataSize);
    for (uint i = 0; i < value.pointerCount / POINTERS; i++) {
1563 1564
      copyPointer(segment, capTable, pointerSection + i,
                  value.segment, value.capTable, value.pointers + i, value.nestingLimit);
1565
    }
1566

1567
    return { segment, ptr };
1568 1569
  }

1570
#if !CAPNP_LITE
1571
  static void setCapabilityPointer(
1572 1573 1574 1575
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref,
      kj::Own<ClientHook>&& cap) {
    if (!ref->isNull()) {
      zeroObject(segment, capTable, ref);
1576
    }
1577 1578 1579 1580 1581
    if (cap->isNull()) {
      memset(ref, 0, sizeof(*ref));
    } else {
      ref->setCap(capTable->injectCap(kj::mv(cap)));
    }
1582
  }
1583
#endif  // !CAPNP_LITE
1584

1585
  static SegmentAnd<word*> setListPointer(
1586
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, ListReader value,
1587
      BuilderArena* orphanArena = nullptr) {
1588
    WordCount totalSize = roundBitsUpToWords(value.elementCount * value.step);
1589

1590
    if (value.elementSize != ElementSize::INLINE_COMPOSITE) {
1591
      // List of non-structs.
1592
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::LIST, orphanArena);
1593

1594
      if (value.elementSize == ElementSize::POINTER) {
1595
        // List of pointers.
1596
        ref->listRef.set(ElementSize::POINTER, value.elementCount);
1597
        for (uint i = 0; i < value.elementCount / ELEMENTS; i++) {
1598 1599 1600
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i,
                      value.segment, value.capTable,
                      reinterpret_cast<const WirePointer*>(value.ptr) + i,
1601
                      value.nestingLimit);
1602 1603 1604
        }
      } else {
        // List of data.
1605
        ref->listRef.set(value.elementSize, value.elementCount);
1606 1607
        memcpy(ptr, value.ptr, totalSize * BYTES_PER_WORD / BYTES);
      }
1608

1609
      return { segment, ptr };
1610 1611
    } else {
      // List of structs.
1612
      KJ_DASSERT(value.structDataSize % BITS_PER_WORD == 0 * BITS);
1613 1614
      word* ptr = allocate(ref, segment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
                           WirePointer::LIST, orphanArena);
1615 1616
      ref->listRef.setInlineComposite(totalSize);

1617
      WordCount dataSize = roundBitsUpToWords(value.structDataSize);
1618 1619 1620 1621 1622
      WirePointerCount pointerCount = value.structPointerCount;

      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, value.elementCount);
      tag->structRef.set(dataSize, pointerCount);
1623
      word* dst = ptr + POINTER_SIZE_IN_WORDS;
1624 1625 1626

      const word* src = reinterpret_cast<const word*>(value.ptr);
      for (uint i = 0; i < value.elementCount / ELEMENTS; i++) {
1627 1628
        memcpy(dst, src, value.structDataSize / BITS_PER_BYTE / BYTES);
        dst += dataSize;
1629 1630 1631
        src += dataSize;

        for (uint j = 0; j < pointerCount / POINTERS; j++) {
1632 1633 1634
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(dst),
              value.segment, value.capTable, reinterpret_cast<const WirePointer*>(src),
              value.nestingLimit);
1635
          dst += POINTER_SIZE_IN_WORDS;
1636 1637 1638
          src += POINTER_SIZE_IN_WORDS;
        }
      }
1639

1640
      return { segment, ptr };
1641 1642 1643
    }
  }

1644
  static KJ_ALWAYS_INLINE(SegmentAnd<word*> copyPointer(
1645 1646
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
1647
      int nestingLimit, BuilderArena* orphanArena = nullptr)) {
1648 1649 1650
    return copyPointer(dstSegment, dstCapTable, dst,
                       srcSegment, srcCapTable, src, src->target(),
                       nestingLimit, orphanArena);
1651 1652 1653
  }

  static SegmentAnd<word*> copyPointer(
1654 1655 1656
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
      const word* srcTarget, int nestingLimit, BuilderArena* orphanArena = nullptr) {
1657 1658 1659 1660
    // Deep-copy the object pointed to by src into dst.  It turns out we can't reuse
    // readStructPointer(), etc. because they do type checking whereas here we want to accept any
    // valid pointer.

1661
    if (src->isNull()) {
1662
    useDefault:
1663
      if (!dst->isNull()) {
1664
        zeroObject(dstSegment, dstCapTable, dst);
1665 1666
        memset(dst, 0, sizeof(*dst));
      }
1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678
      return { dstSegment, nullptr };
    }

    const word* ptr = WireHelpers::followFars(src, srcTarget, srcSegment);
    if (KJ_UNLIKELY(ptr == nullptr)) {
      // Already reported the error.
      goto useDefault;
    }

    switch (src->kind()) {
      case WirePointer::STRUCT:
        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1679
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1680 1681 1682 1683 1684 1685 1686
          goto useDefault;
        }

        KJ_REQUIRE(boundsCheck(srcSegment, ptr, ptr + src->structRef.wordSize()),
                   "Message contained out-of-bounds struct pointer.") {
          goto useDefault;
        }
1687 1688
        return setStructPointer(dstSegment, dstCapTable, dst,
            StructReader(srcSegment, srcCapTable, ptr,
1689 1690 1691
                         reinterpret_cast<const WirePointer*>(ptr + src->structRef.dataSize.get()),
                         src->structRef.dataSize.get() * BITS_PER_WORD,
                         src->structRef.ptrCount.get(),
1692
                         nestingLimit - 1),
1693 1694 1695
            orphanArena);

      case WirePointer::LIST: {
1696
        ElementSize elementSize = src->listRef.elementSize();
1697 1698

        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1699
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1700 1701 1702
          goto useDefault;
        }

1703
        if (elementSize == ElementSize::INLINE_COMPOSITE) {
1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720
          WordCount wordCount = src->listRef.inlineCompositeWordCount();
          const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
          ptr += POINTER_SIZE_IN_WORDS;

          KJ_REQUIRE(boundsCheck(srcSegment, ptr - POINTER_SIZE_IN_WORDS, ptr + wordCount),
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

          KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                     "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
            goto useDefault;
          }

          ElementCount elementCount = tag->inlineCompositeListElementCount();
          auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;

1721
          KJ_REQUIRE(wordsPerElement * ElementCount64(elementCount) <= wordCount,
1722 1723 1724 1725
                     "INLINE_COMPOSITE list's elements overrun its word count.") {
            goto useDefault;
          }

1726 1727 1728 1729 1730 1731 1732 1733 1734
          if (wordsPerElement * (1 * ELEMENTS) == 0 * WORDS) {
            // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
            // without having sent actual data.
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (1 * WORDS / ELEMENTS)),
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

1735 1736 1737
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr,
                         elementCount, wordsPerElement * BITS_PER_WORD,
1738
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1739
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
1740
                         nestingLimit - 1),
1741 1742 1743 1744 1745 1746
              orphanArena);
        } else {
          BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
          WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;
          auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
          ElementCount elementCount = src->listRef.elementCount();
1747
          WordCount64 wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
1748 1749 1750 1751 1752 1753

          KJ_REQUIRE(boundsCheck(srcSegment, ptr, ptr + wordCount),
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

1754 1755 1756 1757 1758 1759 1760 1761 1762
          if (elementSize == ElementSize::VOID) {
            // Watch out for lists of void, which can claim to be arbitrarily large without having
            // sent actual data.
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (1 * WORDS / ELEMENTS)),
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

1763 1764 1765
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr, elementCount, step, dataSize, pointerCount,
                         elementSize, nestingLimit - 1),
1766 1767 1768 1769
              orphanArena);
        }
      }

1770 1771
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("Far pointer should have been handled above.") {
1772 1773 1774
          goto useDefault;
        }

1775 1776
      case WirePointer::OTHER: {
        KJ_REQUIRE(src->isCapability(), "Unknown pointer type.") {
1777 1778 1779
          goto useDefault;
        }

1780
#if !CAPNP_LITE
1781 1782
        KJ_IF_MAYBE(cap, srcCapTable->extractCap(src->capRef.index.get())) {
          setCapabilityPointer(dstSegment, dstCapTable, dst, kj::mv(*cap));
1783 1784
          // Return dummy non-null pointer so OrphanBuilder doesn't end up null.
          return { dstSegment, reinterpret_cast<word*>(1) };
1785
        } else {
1786
#endif  // !CAPNP_LITE
1787
          KJ_FAIL_REQUIRE("Message contained invalid capability pointer.") {
1788 1789
            goto useDefault;
          }
1790
#if !CAPNP_LITE
1791
        }
1792
#endif  // !CAPNP_LITE
1793
      }
1794
    }
Kenton Varda's avatar
Kenton Varda committed
1795 1796

    KJ_UNREACHABLE;
1797 1798
  }

1799 1800
  static void adopt(SegmentBuilder* segment, CapTableBuilder* capTable,
                    WirePointer* ref, OrphanBuilder&& value) {
1801
    KJ_REQUIRE(value.segment == nullptr || value.segment->getArena() == segment->getArena(),
1802 1803 1804
               "Adopted object must live in the same message.");

    if (!ref->isNull()) {
1805
      zeroObject(segment, capTable, ref);
1806 1807
    }

1808
    if (value == nullptr) {
1809 1810
      // Set null.
      memset(ref, 0, sizeof(*ref));
1811
    } else if (value.tagAsPtr()->isPositional()) {
1812
      WireHelpers::transferPointer(segment, ref, value.segment, value.tagAsPtr(), value.location);
1813 1814 1815
    } else {
      // FAR and OTHER pointers are position-independent, so we can just copy.
      memcpy(ref, value.tagAsPtr(), sizeof(WirePointer));
1816 1817 1818 1819 1820 1821 1822 1823
    }

    // Take ownership away from the OrphanBuilder.
    memset(value.tagAsPtr(), 0, sizeof(WirePointer));
    value.location = nullptr;
    value.segment = nullptr;
  }

1824 1825
  static OrphanBuilder disown(SegmentBuilder* segment, CapTableBuilder* capTable,
                              WirePointer* ref) {
1826 1827 1828 1829
    word* location;

    if (ref->isNull()) {
      location = nullptr;
1830 1831
    } else if (ref->kind() == WirePointer::OTHER) {
      KJ_REQUIRE(ref->isCapability(), "Unknown pointer type.") { break; }
1832
      location = reinterpret_cast<word*>(1);  // dummy so that it is non-null
1833 1834
    } else {
      WirePointer* refCopy = ref;
1835
      location = followFarsNoWritableCheck(refCopy, ref->target(), segment);
1836 1837
    }

1838
    OrphanBuilder result(ref, segment, capTable, location);
1839

1840
    if (!ref->isNull() && ref->isPositional()) {
1841
      result.tagAsPtr()->setKindForOrphan(ref->kind());
1842
    }
1843 1844 1845 1846 1847

    // Zero out the pointer that was disowned.
    memset(ref, 0, sizeof(*ref));

    return result;
1848 1849
  }

1850 1851
  // -----------------------------------------------------------------

1852
  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
1853 1854
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
1855
      int nestingLimit)) {
1856
    return readStructPointer(segment, capTable, ref, ref->target(), defaultValue, nestingLimit);
1857 1858 1859
  }

  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
1860 1861
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
1862
      const word* defaultValue, int nestingLimit)) {
1863
    if (ref->isNull()) {
1864
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1865
      if (defaultValue == nullptr ||
1866
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1867
        return StructReader();
1868
      }
1869
      segment = nullptr;
1870
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
1871
      refTarget = ref->target();
1872 1873
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1874

1875
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1876
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1877 1878
      goto useDefault;
    }
1879

1880
    const word* ptr = followFars(ref, refTarget, segment);
1881
    if (KJ_UNLIKELY(ptr == nullptr)) {
1882 1883 1884
      // Already reported the error.
      goto useDefault;
    }
1885

1886 1887
    KJ_REQUIRE(ref->kind() == WirePointer::STRUCT,
               "Message contains non-struct pointer where struct pointer was expected.") {
1888 1889 1890
      goto useDefault;
    }

1891 1892
    KJ_REQUIRE(boundsCheck(segment, ptr, ptr + ref->structRef.wordSize()),
               "Message contained out-of-bounds struct pointer.") {
1893
      goto useDefault;
1894
    }
1895

1896
    return StructReader(
1897 1898
        segment, capTable,
        ptr, reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get()),
1899
        ref->structRef.dataSize.get() * BITS_PER_WORD,
1900
        ref->structRef.ptrCount.get(),
1901
        nestingLimit - 1);
1902 1903
  }

1904
#if !CAPNP_LITE
1905
  static KJ_ALWAYS_INLINE(kj::Own<ClientHook> readCapabilityPointer(
1906 1907
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, int nestingLimit)) {
1908 1909
    kj::Maybe<kj::Own<ClientHook>> maybeCap;

1910 1911 1912 1913 1914 1915
    KJ_REQUIRE(brokenCapFactory != nullptr,
               "Trying to read capabilities without ever having created a capability context.  "
               "To read capabilities from a message, you must imbue it with CapReaderContext, or "
               "use the Cap'n Proto RPC system.");

    if (ref->isNull()) {
1916
      return brokenCapFactory->newNullCap();
1917 1918 1919 1920 1921
    } else if (!ref->isCapability()) {
      KJ_FAIL_REQUIRE(
          "Message contains non-capability pointer where capability pointer was expected.") {
        break;
      }
1922
      return brokenCapFactory->newBrokenCap(
1923
          "Calling capability extracted from a non-capability pointer.");
1924
    } else KJ_IF_MAYBE(cap, capTable->extractCap(ref->capRef.index.get())) {
1925 1926
      return kj::mv(*cap);
    } else {
1927 1928 1929 1930
      KJ_FAIL_REQUIRE("Message contains invalid capability pointer.") {
        break;
      }
      return brokenCapFactory->newBrokenCap("Calling invalid capability pointer.");
1931 1932
    }
  }
1933
#endif  // !CAPNP_LITE
1934

1935
  static KJ_ALWAYS_INLINE(ListReader readListPointer(
1936 1937
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
1938
      ElementSize expectedElementSize, int nestingLimit, bool checkElementSize = true)) {
1939
    return readListPointer(segment, capTable, ref, ref->target(), defaultValue,
1940
                           expectedElementSize, nestingLimit, checkElementSize);
1941 1942 1943
  }

  static KJ_ALWAYS_INLINE(ListReader readListPointer(
1944 1945
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
1946
      const word* defaultValue, ElementSize expectedElementSize, int nestingLimit,
1947
      bool checkElementSize = true)) {
1948
    if (ref->isNull()) {
1949
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1950
      if (defaultValue == nullptr ||
1951
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1952
        return ListReader(expectedElementSize);
1953
      }
1954
      segment = nullptr;
1955
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
1956
      refTarget = ref->target();
1957 1958
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1959

1960
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1961
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1962 1963
      goto useDefault;
    }
1964

1965
    const word* ptr = followFars(ref, refTarget, segment);
1966
    if (KJ_UNLIKELY(ptr == nullptr)) {
1967 1968 1969 1970
      // Already reported error.
      goto useDefault;
    }

1971 1972
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
               "Message contains non-list pointer where list pointer was expected.") {
1973
      goto useDefault;
1974 1975
    }

1976 1977
    ElementSize elementSize = ref->listRef.elementSize();
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
1978 1979 1980 1981
#if _MSC_VER
      // TODO(msvc): MSVC thinks decltype(WORDS/ELEMENTS) is a const type. /eyeroll
      uint wordsPerElement;
#else
1982
      decltype(WORDS/ELEMENTS) wordsPerElement;
1983
#endif
1984
      ElementCount size;
1985

1986
      WordCount wordCount = ref->listRef.inlineCompositeWordCount();
1987

1988 1989 1990
      // An INLINE_COMPOSITE list points to a tag, which is formatted like a pointer.
      const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
      ptr += POINTER_SIZE_IN_WORDS;
1991

1992 1993
      KJ_REQUIRE(boundsCheck(segment, ptr - POINTER_SIZE_IN_WORDS, ptr + wordCount),
                 "Message contains out-of-bounds list pointer.") {
1994 1995
        goto useDefault;
      }
1996

1997 1998
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
1999 2000
        goto useDefault;
      }
2001

2002 2003
      size = tag->inlineCompositeListElementCount();
      wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
2004

2005
      KJ_REQUIRE(ElementCount64(size) * wordsPerElement <= wordCount,
2006
                 "INLINE_COMPOSITE list's elements overrun its word count.") {
2007 2008
        goto useDefault;
      }
2009

2010 2011 2012 2013 2014 2015 2016 2017 2018
      if (wordsPerElement * (1 * ELEMENTS) == 0 * WORDS) {
        // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
        // without having sent actual data.
        KJ_REQUIRE(amplifiedRead(segment, size * (1 * WORDS / ELEMENTS)),
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2019 2020 2021 2022 2023
      if (checkElementSize) {
        // If a struct list was not expected, then presumably a non-struct list was upgraded to a
        // struct list.  We need to manipulate the pointer to point at the first field of the
        // struct.  Together with the "stepBits", this will allow the struct list to be accessed as
        // if it were a primitive list without branching.
2024

2025 2026
        // Check whether the size is compatible.
        switch (expectedElementSize) {
2027
          case ElementSize::VOID:
2028
            break;
2029

2030
          case ElementSize::BIT:
2031 2032 2033 2034 2035 2036
            KJ_FAIL_REQUIRE(
                "Found struct list where bit list was expected; upgrading boolean lists to structs "
                "is no longer supported.") {
              goto useDefault;
            }
            break;
2037

2038 2039 2040 2041
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES:
2042 2043 2044 2045 2046
            KJ_REQUIRE(tag->structRef.dataSize.get() > 0 * WORDS,
                       "Expected a primitive list, but got a list of pointer-only structs.") {
              goto useDefault;
            }
            break;
2047

2048
          case ElementSize::POINTER:
2049 2050 2051 2052 2053 2054 2055 2056 2057
            // We expected a list of pointers but got a list of structs.  Assuming the first field
            // in the struct is the pointer we were looking for, we want to munge the pointer to
            // point at the first element's pointer section.
            ptr += tag->structRef.dataSize.get();
            KJ_REQUIRE(tag->structRef.ptrCount.get() > 0 * POINTERS,
                       "Expected a pointer list, but got a list of data-only structs.") {
              goto useDefault;
            }
            break;
2058

2059
          case ElementSize::INLINE_COMPOSITE:
2060 2061
            break;
        }
2062 2063
      }

2064
      return ListReader(
2065
          segment, capTable, ptr, size, wordsPerElement * BITS_PER_WORD,
2066
          tag->structRef.dataSize.get() * BITS_PER_WORD,
2067
          tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2068
          nestingLimit - 1);
2069 2070

    } else {
2071
      // This is a primitive or pointer list, but all such lists can also be interpreted as struct
2072
      // lists.  We need to compute the data size and pointer count for such structs.
2073
      BitCount dataSize = dataBitsPerElement(ref->listRef.elementSize()) * ELEMENTS;
2074
      WirePointerCount pointerCount =
2075
          pointersPerElement(ref->listRef.elementSize()) * ELEMENTS;
2076
      ElementCount elementCount = ref->listRef.elementCount();
2077
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2078

2079 2080
      WordCount wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr + wordCount),
2081
                 "Message contains out-of-bounds list pointer.") {
2082
        goto useDefault;
2083 2084
      }

2085 2086 2087 2088 2089 2090 2091 2092 2093
      if (elementSize == ElementSize::VOID) {
        // Watch out for lists of void, which can claim to be arbitrarily large without having sent
        // actual data.
        KJ_REQUIRE(amplifiedRead(segment, elementCount * (1 * WORDS / ELEMENTS)),
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2094
      if (checkElementSize) {
2095
        if (elementSize == ElementSize::BIT && expectedElementSize != ElementSize::BIT) {
2096 2097 2098 2099 2100
          KJ_FAIL_REQUIRE(
              "Found bit list where struct list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
2101 2102
        }

2103 2104 2105 2106
        // Verify that the elements are at least as large as the expected type.  Note that if we
        // expected INLINE_COMPOSITE, the expected sizes here will be zero, because bounds checking
        // will be performed at field access time.  So this check here is for the case where we
        // expected a list of some primitive or pointer type.
2107

2108 2109 2110 2111
        BitCount expectedDataBitsPerElement =
            dataBitsPerElement(expectedElementSize) * ELEMENTS;
        WirePointerCount expectedPointersPerElement =
            pointersPerElement(expectedElementSize) * ELEMENTS;
2112

2113 2114 2115 2116 2117 2118 2119 2120
        KJ_REQUIRE(expectedDataBitsPerElement <= dataSize,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
        KJ_REQUIRE(expectedPointersPerElement <= pointerCount,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
2121
      }
2122

2123
      return ListReader(segment, capTable, ptr, elementCount, step,
2124
                        dataSize, pointerCount, elementSize, nestingLimit - 1);
2125 2126
    }
  }
Kenton Varda's avatar
Kenton Varda committed
2127

2128
  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
2129
      SegmentReader* segment, const WirePointer* ref,
Kenton Varda's avatar
Kenton Varda committed
2130
      const void* defaultValue, ByteCount defaultSize)) {
2131 2132 2133 2134 2135 2136
    return readTextPointer(segment, ref, ref->target(), defaultValue, defaultSize);
  }

  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
      const void* defaultValue, ByteCount defaultSize)) {
2137
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2138
    useDefault:
2139
      if (defaultValue == nullptr) defaultValue = "";
Kenton Varda's avatar
Kenton Varda committed
2140 2141
      return Text::Reader(reinterpret_cast<const char*>(defaultValue), defaultSize / BYTES);
    } else {
2142
      const word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
2143

2144
      if (KJ_UNLIKELY(ptr == nullptr)) {
2145
        // Already reported error.
Kenton Varda's avatar
Kenton Varda committed
2146 2147 2148
        goto useDefault;
      }

2149 2150
      uint size = ref->listRef.elementCount() / ELEMENTS;

2151 2152
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2153 2154 2155
        goto useDefault;
      }

2156
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2157
                 "Message contains list pointer of non-bytes where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2158 2159 2160
        goto useDefault;
      }

2161
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr +
2162
                     roundBytesUpToWords(ref->listRef.elementCount() * (1 * BYTES / ELEMENTS))),
2163
                 "Message contained out-of-bounds text pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2164 2165 2166
        goto useDefault;
      }

2167
      KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2168 2169 2170
        goto useDefault;
      }

Kenton Varda's avatar
Kenton Varda committed
2171 2172 2173
      const char* cptr = reinterpret_cast<const char*>(ptr);
      --size;  // NUL terminator

2174
      KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
Kenton Varda's avatar
Kenton Varda committed
2175 2176 2177 2178 2179 2180 2181
        goto useDefault;
      }

      return Text::Reader(cptr, size);
    }
  }

2182
  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
2183
      SegmentReader* segment, const WirePointer* ref,
Kenton Varda's avatar
Kenton Varda committed
2184
      const void* defaultValue, ByteCount defaultSize)) {
2185 2186 2187 2188 2189 2190
    return readDataPointer(segment, ref, ref->target(), defaultValue, defaultSize);
  }

  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
      const void* defaultValue, ByteCount defaultSize)) {
2191
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2192
    useDefault:
2193
      return Data::Reader(reinterpret_cast<const byte*>(defaultValue), defaultSize / BYTES);
Kenton Varda's avatar
Kenton Varda committed
2194
    } else {
2195
      const word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
2196

2197
      if (KJ_UNLIKELY(ptr == nullptr)) {
2198
        // Already reported error.
Kenton Varda's avatar
Kenton Varda committed
2199 2200 2201
        goto useDefault;
      }

2202 2203
      uint size = ref->listRef.elementCount() / ELEMENTS;

2204 2205
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2206 2207 2208
        goto useDefault;
      }

2209
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2210
                 "Message contains list pointer of non-bytes where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2211 2212 2213
        goto useDefault;
      }

2214
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr +
2215
                     roundBytesUpToWords(ref->listRef.elementCount() * (1 * BYTES / ELEMENTS))),
2216
                 "Message contained out-of-bounds data pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2217 2218 2219
        goto useDefault;
      }

2220
      return Data::Reader(reinterpret_cast<const byte*>(ptr), size);
Kenton Varda's avatar
Kenton Varda committed
2221 2222
    }
  }
2223 2224 2225
};

// =======================================================================================
2226
// PointerBuilder
2227

2228
StructBuilder PointerBuilder::initStruct(StructSize size) {
2229
  return WireHelpers::initStructPointer(pointer, segment, capTable, size);
2230 2231
}

2232
StructBuilder PointerBuilder::getStruct(StructSize size, const word* defaultValue) {
2233
  return WireHelpers::getWritableStructPointer(pointer, segment, capTable, size, defaultValue);
2234 2235
}

2236
ListBuilder PointerBuilder::initList(ElementSize elementSize, ElementCount elementCount) {
2237
  return WireHelpers::initListPointer(pointer, segment, capTable, elementCount, elementSize);
2238 2239
}

2240
ListBuilder PointerBuilder::initStructList(ElementCount elementCount, StructSize elementSize) {
2241
  return WireHelpers::initStructListPointer(pointer, segment, capTable, elementCount, elementSize);
2242 2243
}

2244
ListBuilder PointerBuilder::getList(ElementSize elementSize, const word* defaultValue) {
2245
  return WireHelpers::getWritableListPointer(pointer, segment, capTable, elementSize, defaultValue);
2246
}
2247

2248
ListBuilder PointerBuilder::getStructList(StructSize elementSize, const word* defaultValue) {
2249 2250
  return WireHelpers::getWritableStructListPointer(
      pointer, segment, capTable, elementSize, defaultValue);
2251 2252
}

2253
ListBuilder PointerBuilder::getListAnySize(const word* defaultValue) {
2254
  return WireHelpers::getWritableListPointerAnySize(pointer, segment, capTable, defaultValue);
2255 2256
}

2257 2258
template <>
Text::Builder PointerBuilder::initBlob<Text>(ByteCount size) {
2259
  return WireHelpers::initTextPointer(pointer, segment, capTable, size).value;
2260 2261 2262
}
template <>
void PointerBuilder::setBlob<Text>(Text::Reader value) {
2263
  WireHelpers::setTextPointer(pointer, segment, capTable, value);
2264 2265 2266
}
template <>
Text::Builder PointerBuilder::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) {
2267
  return WireHelpers::getWritableTextPointer(pointer, segment, capTable, defaultValue, defaultSize);
2268 2269
}

2270 2271
template <>
Data::Builder PointerBuilder::initBlob<Data>(ByteCount size) {
2272
  return WireHelpers::initDataPointer(pointer, segment, capTable, size).value;
2273 2274 2275
}
template <>
void PointerBuilder::setBlob<Data>(Data::Reader value) {
2276
  WireHelpers::setDataPointer(pointer, segment, capTable, value);
2277 2278 2279
}
template <>
Data::Builder PointerBuilder::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) {
2280
  return WireHelpers::getWritableDataPointer(pointer, segment, capTable, defaultValue, defaultSize);
2281 2282
}

2283
void PointerBuilder::setStruct(const StructReader& value) {
2284
  WireHelpers::setStructPointer(segment, capTable, pointer, value);
2285 2286
}

2287
void PointerBuilder::setList(const ListReader& value) {
2288
  WireHelpers::setListPointer(segment, capTable, pointer, value);
2289 2290
}

2291
#if !CAPNP_LITE
2292
kj::Own<ClientHook> PointerBuilder::getCapability() {
2293
  return WireHelpers::readCapabilityPointer(
2294
      segment, capTable, pointer, kj::maxValue);
2295 2296
}

2297
void PointerBuilder::setCapability(kj::Own<ClientHook>&& cap) {
2298
  WireHelpers::setCapabilityPointer(segment, capTable, pointer, kj::mv(cap));
2299
}
2300
#endif  // !CAPNP_LITE
2301

2302
void PointerBuilder::adopt(OrphanBuilder&& value) {
2303
  WireHelpers::adopt(segment, capTable, pointer, kj::mv(value));
Kenton Varda's avatar
Kenton Varda committed
2304
}
2305 2306

OrphanBuilder PointerBuilder::disown() {
2307
  return WireHelpers::disown(segment, capTable, pointer);
Kenton Varda's avatar
Kenton Varda committed
2308 2309
}

2310
void PointerBuilder::clear() {
2311
  WireHelpers::zeroObject(segment, capTable, pointer);
2312 2313 2314
  memset(pointer, 0, sizeof(WirePointer));
}

2315 2316 2317 2318 2319 2320 2321
PointerType PointerBuilder::getPointerType() {
  if(pointer->isNull()) {
    return PointerType::NULL_;
  } else {
    WirePointer* ptr = pointer;
    WireHelpers::followFars(ptr, ptr->target(), segment);
    switch(ptr->kind()) {
2322 2323 2324 2325 2326 2327 2328 2329 2330
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("far pointer not followed?");
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
        return PointerType::CAPABILITY;
2331
    }
2332
    KJ_UNREACHABLE;
2333
  }
2334 2335
}

2336
void PointerBuilder::transferFrom(PointerBuilder other) {
2337
  if (!pointer->isNull()) {
2338
    WireHelpers::zeroObject(segment, capTable, pointer);
2339 2340
    memset(pointer, 0, sizeof(*pointer));
  }
2341
  WireHelpers::transferPointer(segment, pointer, other.segment, other.pointer);
2342
  memset(other.pointer, 0, sizeof(*other.pointer));
2343 2344 2345
}

void PointerBuilder::copyFrom(PointerReader other) {
2346 2347 2348 2349 2350 2351 2352 2353
  if (other.pointer == nullptr) {
    if (!pointer->isNull()) {
      WireHelpers::zeroObject(segment, capTable, pointer);
      memset(pointer, 0, sizeof(*pointer));
    }
  } else {
    WireHelpers::copyPointer(segment, capTable, pointer,
                             other.segment, other.capTable, other.pointer, other.nestingLimit);
2354
  }
2355 2356 2357
}

PointerReader PointerBuilder::asReader() const {
2358
  return PointerReader(segment, capTable, pointer, kj::maxValue);
2359 2360
}

2361 2362
BuilderArena* PointerBuilder::getArena() const {
  return segment->getArena();
2363 2364
}

2365 2366 2367 2368 2369 2370 2371 2372 2373 2374
CapTableBuilder* PointerBuilder::getCapTable() {
  return capTable;
}

PointerBuilder PointerBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2375 2376 2377
// =======================================================================================
// PointerReader

2378 2379
PointerReader PointerReader::getRoot(SegmentReader* segment, CapTableReader* capTable,
                                     const word* location, int nestingLimit) {
Kenton Varda's avatar
Kenton Varda committed
2380 2381 2382 2383 2384
  KJ_REQUIRE(WireHelpers::boundsCheck(segment, location, location + POINTER_SIZE_IN_WORDS),
             "Root location out-of-bounds.") {
    location = nullptr;
  }

2385 2386
  return PointerReader(segment, capTable,
      reinterpret_cast<const WirePointer*>(location), nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2387 2388
}

2389 2390
StructReader PointerReader::getStruct(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2391
  return WireHelpers::readStructPointer(segment, capTable, ref, defaultValue, nestingLimit);
2392 2393
}

2394
ListReader PointerReader::getList(ElementSize expectedElementSize, const word* defaultValue) const {
2395 2396
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2397
      segment, capTable, ref, defaultValue, expectedElementSize, nestingLimit);
2398 2399
}

2400 2401 2402
ListReader PointerReader::getListAnySize(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2403
      segment, capTable, ref, defaultValue, ElementSize::VOID /* dummy */, nestingLimit, false);
2404 2405
}

2406
template <>
2407 2408 2409
Text::Reader PointerReader::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readTextPointer(segment, ref, defaultValue, defaultSize);
Kenton Varda's avatar
Kenton Varda committed
2410
}
2411

2412
template <>
2413 2414 2415
Data::Reader PointerReader::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readDataPointer(segment, ref, defaultValue, defaultSize);
Kenton Varda's avatar
Kenton Varda committed
2416 2417
}

2418
#if !CAPNP_LITE
2419
kj::Own<ClientHook> PointerReader::getCapability() const {
2420
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2421
  return WireHelpers::readCapabilityPointer(segment, capTable, ref, nestingLimit);
2422
}
2423
#endif  // !CAPNP_LITE
2424

2425 2426 2427
const word* PointerReader::getUnchecked() const {
  KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
  return reinterpret_cast<const word*>(pointer);
2428 2429
}

2430
MessageSizeCounts PointerReader::targetSize() const {
2431 2432
  return pointer == nullptr ? MessageSizeCounts { 0 * WORDS, 0 }
                            : WireHelpers::totalSize(segment, pointer, nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2433 2434
}

2435
PointerType PointerReader::getPointerType() const {
2436
  if(pointer == nullptr || pointer->isNull()) {
2437 2438 2439 2440 2441 2442 2443
    return PointerType::NULL_;
  } else {
    word* refTarget = nullptr;
    const WirePointer* ptr = pointer;
    SegmentReader* sgmt = segment;
    WireHelpers::followFars(ptr, refTarget, sgmt);
    switch(ptr->kind()) {
2444 2445 2446 2447 2448 2449 2450 2451 2452
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("far pointer not followed?");
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
        return PointerType::CAPABILITY;
2453
    }
2454
    KJ_UNREACHABLE;
2455
  }
2456 2457
}

2458 2459 2460 2461
kj::Maybe<Arena&> PointerReader::getArena() const {
  return segment == nullptr ? nullptr : segment->getArena();
}

2462 2463 2464 2465
CapTableReader* PointerReader::getCapTable() {
  return capTable;
}

2466 2467 2468 2469 2470 2471
PointerReader PointerReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2472 2473 2474
// =======================================================================================
// StructBuilder

2475 2476 2477 2478 2479 2480 2481 2482
void StructBuilder::clearAll() {
  if (dataSize == 1 * BITS) {
    setDataField<bool>(1 * ELEMENTS, false);
  } else {
    memset(data, 0, dataSize / BITS_PER_BYTE / BYTES);
  }

  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2483
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2484 2485 2486 2487
  }
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);
}

2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510 2511
void StructBuilder::transferContentFrom(StructBuilder other) {
  // Determine the amount of data the builders have in common.
  BitCount sharedDataSize = kj::min(dataSize, other.dataSize);

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
    if (dataSize == 1 * BITS) {
      setDataField<bool>(0 * ELEMENTS, false);
    } else {
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE / BYTES;
      memset(unshared, 0, (dataSize - sharedDataSize) / BITS_PER_BYTE / BYTES);
    }
  }

  // Copy over the shared part.
  if (sharedDataSize == 1 * BITS) {
    setDataField<bool>(0 * ELEMENTS, other.getDataField<bool>(0 * ELEMENTS));
  } else {
    memcpy(data, other.data, sharedDataSize / BITS_PER_BYTE / BYTES);
  }

  // Zero out all pointers in the target.
  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2512
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2513
  }
2514
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);
2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526 2527

  // Transfer the pointers.
  WirePointerCount sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (uint i = 0; i < sharedPointerCount / POINTERS; i++) {
    WireHelpers::transferPointer(segment, pointers + i, other.segment, other.pointers + i);
  }

  // Zero out the pointers that were transferred in the source because it no longer has ownership.
  // If the source had any extra pointers that the destination didn't have space for, we
  // intentionally leave them be, so that they'll be cleaned up later.
  memset(other.pointers, 0, sharedPointerCount * BYTES_PER_POINTER / BYTES);
}

2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551
void StructBuilder::copyContentFrom(StructReader other) {
  // Determine the amount of data the builders have in common.
  BitCount sharedDataSize = kj::min(dataSize, other.dataSize);

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
    if (dataSize == 1 * BITS) {
      setDataField<bool>(0 * ELEMENTS, false);
    } else {
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE / BYTES;
      memset(unshared, 0, (dataSize - sharedDataSize) / BITS_PER_BYTE / BYTES);
    }
  }

  // Copy over the shared part.
  if (sharedDataSize == 1 * BITS) {
    setDataField<bool>(0 * ELEMENTS, other.getDataField<bool>(0 * ELEMENTS));
  } else {
    memcpy(data, other.data, sharedDataSize / BITS_PER_BYTE / BYTES);
  }

  // Zero out all pointers in the target.
  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2552
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2553 2554 2555 2556 2557 2558
  }
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);

  // Copy the pointers.
  WirePointerCount sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (uint i = 0; i < sharedPointerCount / POINTERS; i++) {
2559 2560
    WireHelpers::copyPointer(segment, capTable, pointers + i,
        other.segment, other.capTable, other.pointers + i, other.nestingLimit);
2561 2562 2563
  }
}

2564
StructReader StructBuilder::asReader() const {
2565
  return StructReader(segment, capTable, data, pointers,
2566
      dataSize, pointerCount, kj::maxValue);
2567 2568
}

2569 2570 2571 2572
BuilderArena* StructBuilder::getArena() {
  return segment->getArena();
}

2573 2574 2575 2576 2577 2578 2579 2580 2581 2582
CapTableBuilder* StructBuilder::getCapTable() {
  return capTable;
}

StructBuilder StructBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2583 2584 2585
// =======================================================================================
// StructReader

2586 2587 2588
MessageSizeCounts StructReader::totalSize() const {
  MessageSizeCounts result = {
    WireHelpers::roundBitsUpToWords(dataSize) + pointerCount * WORDS_PER_POINTER, 0 };
2589 2590 2591 2592 2593 2594 2595 2596

  for (uint i = 0; i < pointerCount / POINTERS; i++) {
    result += WireHelpers::totalSize(segment, pointers + i, nestingLimit);
  }

  if (segment != nullptr) {
    // This traversal should not count against the read limit, because it's highly likely that
    // the caller is going to traverse the object again, e.g. to copy it.
2597
    segment->unread(result.wordCount);
2598 2599 2600 2601 2602
  }

  return result;
}

2603 2604 2605 2606
CapTableReader* StructReader::getCapTable() {
  return capTable;
}

2607 2608 2609 2610 2611 2612
StructReader StructReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2613 2614 2615
// =======================================================================================
// ListBuilder

2616
Text::Builder ListBuilder::asText() {
2617 2618
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2619 2620 2621 2622 2623
    return Text::Builder();
  }

  size_t size = elementCount / ELEMENTS;

2624
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2625 2626 2627 2628 2629 2630
    return Text::Builder();
  }

  char* cptr = reinterpret_cast<char*>(ptr);
  --size;  // NUL terminator

2631
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
2632 2633 2634 2635 2636 2637 2638
    return Text::Builder();
  }

  return Text::Builder(cptr, size);
}

Data::Builder ListBuilder::asData() {
2639 2640
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2641 2642 2643
    return Data::Builder();
  }

2644
  return Data::Builder(reinterpret_cast<byte*>(ptr), elementCount / ELEMENTS);
2645 2646
}

2647
StructBuilder ListBuilder::getStructElement(ElementCount index) {
2648 2649
  BitCount64 indexBit = ElementCount64(index) * step;
  byte* structData = ptr + indexBit / BITS_PER_BYTE;
2650
  KJ_DASSERT(indexBit % BITS_PER_BYTE == 0 * BITS);
2651
  return StructBuilder(segment, capTable, structData,
2652
      reinterpret_cast<WirePointer*>(structData + structDataSize / BITS_PER_BYTE),
2653
      structDataSize, structPointerCount);
2654 2655
}

2656
ListReader ListBuilder::asReader() const {
2657
  return ListReader(segment, capTable, ptr, elementCount, step, structDataSize, structPointerCount,
2658
                    elementSize, kj::maxValue);
2659 2660
}

2661 2662 2663 2664
BuilderArena* ListBuilder::getArena() {
  return segment->getArena();
}

2665 2666 2667 2668 2669 2670 2671 2672 2673 2674
CapTableBuilder* ListBuilder::getCapTable() {
  return capTable;
}

ListBuilder ListBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2675 2676 2677
// =======================================================================================
// ListReader

2678
Text::Reader ListReader::asText() {
2679 2680
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2681 2682 2683 2684 2685
    return Text::Reader();
  }

  size_t size = elementCount / ELEMENTS;

2686
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2687 2688 2689 2690 2691 2692
    return Text::Reader();
  }

  const char* cptr = reinterpret_cast<const char*>(ptr);
  --size;  // NUL terminator

2693
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
2694 2695 2696 2697 2698 2699 2700
    return Text::Reader();
  }

  return Text::Reader(cptr, size);
}

Data::Reader ListReader::asData() {
2701 2702
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2703 2704 2705
    return Data::Reader();
  }

2706
  return Data::Reader(reinterpret_cast<const byte*>(ptr), elementCount / ELEMENTS);
2707 2708
}

2709
kj::ArrayPtr<const byte> ListReader::asRawBytes() {
2710 2711
  KJ_REQUIRE(structPointerCount == 0 * POINTERS,
             "Expected data only, got pointers.") {
2712
    return kj::ArrayPtr<const byte>();
2713 2714
  }

2715 2716
  return kj::ArrayPtr<const byte>(reinterpret_cast<const byte*>(ptr),
      WireHelpers::roundBitsUpToBytes(elementCount * (structDataSize / ELEMENTS)) / BYTES);
2717 2718
}

2719
StructReader ListReader::getStructElement(ElementCount index) const {
2720
  KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
2721
             "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
2722
    return StructReader();
2723
  }
2724

2725 2726
  BitCount64 indexBit = ElementCount64(index) * step;
  const byte* structData = ptr + indexBit / BITS_PER_BYTE;
2727 2728
  const WirePointer* structPointers =
      reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
2729 2730

  // This check should pass if there are no bugs in the list pointer validation code.
2731
  KJ_DASSERT(structPointerCount == 0 * POINTERS ||
Kenton Varda's avatar
Kenton Varda committed
2732
         (uintptr_t)structPointers % sizeof(void*) == 0,
2733
         "Pointer section of struct list element not aligned.");
2734

2735
  KJ_DASSERT(indexBit % BITS_PER_BYTE == 0 * BITS);
2736
  return StructReader(
2737
      segment, capTable, structData, structPointers,
2738
      structDataSize, structPointerCount,
2739
      nestingLimit - 1);
2740
}
2741

2742 2743 2744 2745
CapTableReader* ListReader::getCapTable() {
  return capTable;
}

2746 2747 2748 2749 2750 2751
ListReader ListReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2752 2753 2754
// =======================================================================================
// OrphanBuilder

2755 2756
OrphanBuilder OrphanBuilder::initStruct(
    BuilderArena* arena, CapTableBuilder* capTable, StructSize size) {
2757
  OrphanBuilder result;
2758 2759
  StructBuilder builder = WireHelpers::initStructPointer(
      result.tagAsPtr(), nullptr, capTable, size, arena);
2760
  result.segment = builder.segment;
2761
  result.capTable = capTable;
2762
  result.location = builder.getLocation();
2763 2764 2765 2766
  return result;
}

OrphanBuilder OrphanBuilder::initList(
2767 2768
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, ElementSize elementSize) {
2769
  OrphanBuilder result;
2770
  ListBuilder builder = WireHelpers::initListPointer(
2771
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
2772
  result.segment = builder.segment;
2773
  result.capTable = capTable;
2774
  result.location = builder.getLocation();
2775 2776 2777 2778
  return result;
}

OrphanBuilder OrphanBuilder::initStructList(
2779 2780
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, StructSize elementSize) {
2781 2782
  OrphanBuilder result;
  ListBuilder builder = WireHelpers::initStructListPointer(
2783
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
2784
  result.segment = builder.segment;
2785
  result.capTable = capTable;
2786 2787
  result.location = builder.getLocation();
  return result;
2788 2789
}

2790 2791
OrphanBuilder OrphanBuilder::initText(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
2792
  OrphanBuilder result;
2793
  auto allocation = WireHelpers::initTextPointer(result.tagAsPtr(), nullptr, capTable, size, arena);
2794
  result.segment = allocation.segment;
2795
  result.capTable = capTable;
2796
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2797 2798 2799
  return result;
}

2800 2801
OrphanBuilder OrphanBuilder::initData(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
2802
  OrphanBuilder result;
2803
  auto allocation = WireHelpers::initDataPointer(result.tagAsPtr(), nullptr, capTable, size, arena);
2804
  result.segment = allocation.segment;
2805
  result.capTable = capTable;
2806
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2807 2808 2809
  return result;
}

2810 2811
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom) {
2812
  OrphanBuilder result;
2813 2814
  auto allocation = WireHelpers::setStructPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
2815
  result.segment = allocation.segment;
2816
  result.capTable = capTable;
2817
  result.location = reinterpret_cast<word*>(allocation.value);
2818 2819 2820
  return result;
}

2821 2822
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom) {
2823
  OrphanBuilder result;
2824 2825
  auto allocation = WireHelpers::setListPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
2826
  result.segment = allocation.segment;
2827
  result.capTable = capTable;
2828
  result.location = reinterpret_cast<word*>(allocation.value);
2829 2830 2831
  return result;
}

2832 2833
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom) {
2834
  OrphanBuilder result;
2835
  auto allocation = WireHelpers::copyPointer(
2836 2837
      nullptr, capTable, result.tagAsPtr(),
      copyFrom.segment, copyFrom.capTable, copyFrom.pointer, copyFrom.nestingLimit, arena);
2838
  result.segment = allocation.segment;
2839
  result.capTable = capTable;
2840 2841 2842 2843
  result.location = reinterpret_cast<word*>(allocation.value);
  return result;
}

2844 2845
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom) {
2846
  OrphanBuilder result;
2847
  auto allocation = WireHelpers::setTextPointer(
2848
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
2849
  result.segment = allocation.segment;
2850
  result.capTable = capTable;
2851
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2852 2853 2854
  return result;
}

2855 2856
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom) {
2857
  OrphanBuilder result;
2858
  auto allocation = WireHelpers::setDataPointer(
2859
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
2860
  result.segment = allocation.segment;
2861
  result.capTable = capTable;
2862
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2863 2864 2865
  return result;
}

2866
#if !CAPNP_LITE
2867 2868
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, kj::Own<ClientHook> copyFrom) {
2869
  OrphanBuilder result;
2870
  WireHelpers::setCapabilityPointer(nullptr, capTable, result.tagAsPtr(), kj::mv(copyFrom));
2871
  result.segment = arena->getSegment(SegmentId(0));
2872
  result.capTable = capTable;
2873
  result.location = &result.tag;  // dummy to make location non-null
2874 2875
  return result;
}
2876
#endif  // !CAPNP_LITE
2877

2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959 2960 2961 2962
OrphanBuilder OrphanBuilder::concat(
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementSize elementSize, StructSize structSize,
    kj::ArrayPtr<const ListReader> lists) {
  KJ_REQUIRE(lists.size() > 0, "Can't concat empty list ");

  // Find the overall element count and size.
  ElementCount elementCount = 0 * ELEMENTS;
  for (auto& list: lists) {
    elementCount += list.elementCount;
    if (list.elementSize != elementSize) {
      // If element sizes don't all match, upgrade to struct list.
      KJ_REQUIRE(list.elementSize != ElementSize::BIT && elementSize != ElementSize::BIT,
                 "can't upgrade bit lists to struct lists");
      elementSize = ElementSize::INLINE_COMPOSITE;
    }
    structSize.data = kj::max(structSize.data,
        WireHelpers::roundBitsUpToWords(list.structDataSize));
    structSize.pointers = kj::max(structSize.pointers, list.structPointerCount);
  }

  // Allocate the list.
  OrphanBuilder result;
  ListBuilder builder = (elementSize == ElementSize::INLINE_COMPOSITE)
      ? WireHelpers::initStructListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, structSize, arena)
      : WireHelpers::initListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);

  // Copy elements.
  switch (elementSize) {
    case ElementSize::INLINE_COMPOSITE: {
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.getStructElement(pos).copyContentFrom(list.getStructElement(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    case ElementSize::POINTER: {
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.getPointerElement(pos).copyFrom(list.getPointerElement(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    case ElementSize::BIT: {
      // It's difficult to memcpy() bits since a list could start or end mid-byte. For now we
      // do a slow, naive loop. Probably no one will ever care.
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.setDataElement<bool>(pos, list.getDataElement<bool>(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    default: {
      // We know all the inputs had identical size because otherwise we would have chosen
      // INLINE_COMPOSITE. Therefore, we can safely use memcpy() here instead of copying each
      // element manually.
      byte* target = builder.ptr;
      auto step = builder.step / BITS_PER_BYTE;
      for (auto& list: lists) {
        auto count = step * list.size();
        memcpy(target, list.ptr, count / BYTES);
        target += count / BYTES;
      }
      break;
    }
  }

  // Return orphan.
  result.segment = builder.segment;
  result.capTable = capTable;
  result.location = builder.getLocation();
  return result;
}

2963 2964 2965 2966 2967 2968 2969 2970 2971
OrphanBuilder OrphanBuilder::referenceExternalData(BuilderArena* arena, Data::Reader data) {
  KJ_REQUIRE(reinterpret_cast<uintptr_t>(data.begin()) % sizeof(void*) == 0,
             "Cannot referenceExternalData() that is not aligned.");

  auto wordCount = WireHelpers::roundBytesUpToWords(data.size() * BYTES);
  kj::ArrayPtr<const word> words(reinterpret_cast<const word*>(data.begin()), wordCount / WORDS);

  OrphanBuilder result;
  result.tagAsPtr()->setKindForOrphan(WirePointer::LIST);
2972
  result.tagAsPtr()->listRef.set(ElementSize::BYTE, data.size() * ELEMENTS);
2973 2974
  result.segment = arena->addExternalSegment(words);

2975 2976 2977
  // External data cannot possibly contain capabilities.
  result.capTable = nullptr;

2978 2979 2980 2981 2982 2983 2984
  // const_cast OK here because we will check whether the segment is writable when we try to get
  // a builder.
  result.location = const_cast<word*>(words.begin());

  return result;
}

2985
StructBuilder OrphanBuilder::asStruct(StructSize size) {
2986 2987
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

2988
  StructBuilder result = WireHelpers::getWritableStructPointer(
2989
      tagAsPtr(), location, segment, capTable, size, nullptr, segment->getArena());
2990 2991

  // Watch out, the pointer could have been updated if the object had to be relocated.
2992
  location = reinterpret_cast<word*>(result.data);
2993 2994 2995 2996

  return result;
}

2997
ListBuilder OrphanBuilder::asList(ElementSize elementSize) {
2998 2999
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3000
  ListBuilder result = WireHelpers::getWritableListPointer(
3001
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3002 3003

  // Watch out, the pointer could have been updated if the object had to be relocated.
3004 3005 3006
  // (Actually, currently this is not true for primitive lists, but let's not turn into a bug if
  // it changes!)
  location = result.getLocation();
3007 3008 3009 3010 3011

  return result;
}

ListBuilder OrphanBuilder::asStructList(StructSize elementSize) {
3012 3013
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3014
  ListBuilder result = WireHelpers::getWritableStructListPointer(
3015
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3016 3017

  // Watch out, the pointer could have been updated if the object had to be relocated.
3018
  location = result.getLocation();
3019 3020 3021 3022 3023

  return result;
}

Text::Builder OrphanBuilder::asText() {
3024 3025
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3026
  // Never relocates.
3027 3028
  return WireHelpers::getWritableTextPointer(
      tagAsPtr(), location, segment, capTable, nullptr, 0 * BYTES);
3029 3030 3031
}

Data::Builder OrphanBuilder::asData() {
3032 3033
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3034
  // Never relocates.
3035 3036
  return WireHelpers::getWritableDataPointer(
      tagAsPtr(), location, segment, capTable, nullptr, 0 * BYTES);
3037 3038
}

3039
StructReader OrphanBuilder::asStructReader(StructSize size) const {
3040
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3041
  return WireHelpers::readStructPointer(
3042
      segment, capTable, tagAsPtr(), location, nullptr, kj::maxValue);
3043 3044
}

3045
ListReader OrphanBuilder::asListReader(ElementSize elementSize) const {
3046
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3047
  return WireHelpers::readListPointer(
3048
      segment, capTable, tagAsPtr(), location, nullptr, elementSize, kj::maxValue);
3049 3050
}

3051
#if !CAPNP_LITE
3052
kj::Own<ClientHook> OrphanBuilder::asCapability() const {
3053
  return WireHelpers::readCapabilityPointer(segment, capTable, tagAsPtr(), kj::maxValue);
3054
}
3055
#endif  // !CAPNP_LITE
3056

3057
Text::Reader OrphanBuilder::asTextReader() const {
3058
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3059 3060 3061 3062
  return WireHelpers::readTextPointer(segment, tagAsPtr(), location, nullptr, 0 * BYTES);
}

Data::Reader OrphanBuilder::asDataReader() const {
3063
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3064 3065 3066
  return WireHelpers::readDataPointer(segment, tagAsPtr(), location, nullptr, 0 * BYTES);
}

3067
bool OrphanBuilder::truncate(ElementCount size, bool isText) {
3068 3069 3070 3071 3072
  WirePointer* ref = tagAsPtr();
  SegmentBuilder* segment = this->segment;

  word* target = WireHelpers::followFars(ref, location, segment);

3073 3074 3075 3076 3077
  if (ref->isNull()) {
    // We don't know the right element size, so we can't resize this list.
    return size == 0 * ELEMENTS;
  }

3078
  KJ_REQUIRE(ref->kind() == WirePointer::LIST, "Can't truncate non-list.") {
3079
    return false;
3080 3081
  }

3082 3083
  if (isText) size += 1 * ELEMENTS;

3084
  ElementSize elementSize = ref->listRef.elementSize();
3085

3086 3087
  if (elementSize == ElementSize::INLINE_COMPOSITE) {
    WordCount oldWordCount = ref->listRef.inlineCompositeWordCount();
3088

3089 3090 3091 3092
    WirePointer* tag = reinterpret_cast<WirePointer*>(target);
    ++target;
    KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
               "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
3093
      return false;
3094 3095 3096 3097 3098 3099 3100
    }
    StructSize structSize(tag->structRef.dataSize.get(), tag->structRef.ptrCount.get());
    WordCount elementWordCount = structSize.total();

    ElementCount oldSize = tag->inlineCompositeListElementCount();
    word* newEndWord = target + size * (elementWordCount / ELEMENTS);
    word* oldEndWord = target + oldWordCount;
3101

3102 3103 3104
    if (size <= oldSize) {
      // Zero the trailing elements.
      for (uint i = size / ELEMENTS; i < oldSize / ELEMENTS; i++) {
3105
        WireHelpers::zeroObject(segment, capTable, tag, target + i * elementWordCount);
3106 3107 3108 3109 3110
      }
      ref->listRef.setInlineComposite(size * (elementWordCount / ELEMENTS));
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else if (newEndWord <= oldEndWord) {
3111
      // Apparently the old list was over-allocated? The word count is more than needed to store
3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123 3124
      // the elements. This is "valid" but shouldn't happen in practice unless someone is toying
      // with us.
      word* expectedEnd = target + oldSize * (elementWordCount / ELEMENTS);
      KJ_ASSERT(newEndWord >= expectedEnd);
      memset(expectedEnd, 0, (newEndWord - expectedEnd) * sizeof(word));
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
        ref->listRef.setInlineComposite(size * (elementWordCount / ELEMENTS));
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      } else {
        // Need to re-allocate and transfer.
3125
        OrphanBuilder replacement = initStructList(segment->getArena(), capTable, size, structSize);
3126 3127 3128 3129 3130

        ListBuilder newList = replacement.asStructList(structSize);
        word* element = target;
        for (uint i = 0; i < oldSize / ELEMENTS; i++) {
          newList.getStructElement(i * ELEMENTS).transferContentFrom(
3131
              StructBuilder(segment, capTable, element,
3132 3133 3134 3135
                            reinterpret_cast<WirePointer*>(element + structSize.data),
                            structSize.data * BITS_PER_WORD, structSize.pointers));
          element += elementWordCount;
        }
3136

3137 3138 3139 3140 3141 3142 3143
        *this = kj::mv(replacement);
      }
    }
  } else if (elementSize == ElementSize::POINTER) {
    auto oldSize = ref->listRef.elementCount();
    word* newEndWord = target + size * (POINTER_SIZE_IN_WORDS / ELEMENTS);
    word* oldEndWord = target + oldSize * (POINTER_SIZE_IN_WORDS / ELEMENTS);
3144

3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158
    if (size <= oldSize) {
      // Zero the trailing elements.
      for (WirePointer* element = reinterpret_cast<WirePointer*>(newEndWord);
           element < reinterpret_cast<WirePointer*>(oldEndWord); ++element) {
        WireHelpers::zeroPointerAndFars(segment, element);
      }
      ref->listRef.set(ElementSize::POINTER, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
        ref->listRef.set(ElementSize::POINTER, size);
      } else {
        // Need to re-allocate and transfer.
3159 3160
        OrphanBuilder replacement = initList(
            segment->getArena(), capTable, size, ElementSize::POINTER);
3161 3162 3163 3164
        ListBuilder newList = replacement.asList(ElementSize::POINTER);
        WirePointer* oldPointers = reinterpret_cast<WirePointer*>(target);
        for (uint i = 0; i < oldSize / ELEMENTS; i++) {
          newList.getPointerElement(i * ELEMENTS).transferFrom(
3165
              PointerBuilder(segment, capTable, oldPointers + i));
3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191 3192
        }
        *this = kj::mv(replacement);
      }
    }
  } else {
    auto oldSize = ref->listRef.elementCount();
    auto step = dataBitsPerElement(elementSize);
    word* newEndWord = target + WireHelpers::roundBitsUpToWords(size * step);
    word* oldEndWord = target + WireHelpers::roundBitsUpToWords(oldSize * step);

    if (size <= oldSize) {
      // When truncating text, we want to set the null terminator as well, so we'll do our zeroing
      // at the byte level.
      byte* begin = reinterpret_cast<byte*>(target);
      byte* newEndByte = begin + WireHelpers::roundBitsUpToBytes(size * step) - isText;
      byte* oldEndByte = reinterpret_cast<byte*>(oldEndWord);

      memset(newEndByte, 0, oldEndByte - newEndByte);
      ref->listRef.set(elementSize, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      // We're trying to extend, not truncate.
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the memory is already zero'd.
        ref->listRef.set(elementSize, size);
      } else {
        // Need to re-allocate and transfer.
3193
        OrphanBuilder replacement = initList(segment->getArena(), capTable, size, elementSize);
3194 3195 3196 3197 3198 3199 3200
        ListBuilder newList = replacement.asList(elementSize);
        auto words = WireHelpers::roundBitsUpToWords(dataBitsPerElement(elementSize) * oldSize);
        memcpy(newList.ptr, target, words * BYTES_PER_WORD / BYTES);
        *this = kj::mv(replacement);
      }
    }
  }
3201 3202 3203 3204 3205 3206

  return true;
}

void OrphanBuilder::truncate(ElementCount size, ElementSize elementSize) {
  if (!truncate(size, false)) {
3207
    *this = initList(segment->getArena(), capTable, size, elementSize);
3208 3209 3210 3211 3212
  }
}

void OrphanBuilder::truncate(ElementCount size, StructSize elementSize) {
  if (!truncate(size, false)) {
3213
    *this = initStructList(segment->getArena(), capTable, size, elementSize);
3214 3215 3216 3217 3218
  }
}

void OrphanBuilder::truncateText(ElementCount size) {
  if (!truncate(size, true)) {
3219
    *this = initText(segment->getArena(), capTable, size * (1 * BYTES / ELEMENTS));
3220
  }
3221 3222
}

3223
void OrphanBuilder::euthanize() {
3224 3225 3226
  // Carefully catch any exceptions and rethrow them as recoverable exceptions since we may be in
  // a destructor.
  auto exception = kj::runCatchingExceptions([&]() {
3227
    if (tagAsPtr()->isPositional()) {
3228
      WireHelpers::zeroObject(segment, capTable, tagAsPtr(), location);
3229
    } else {
3230
      WireHelpers::zeroObject(segment, capTable, tagAsPtr());
3231
    }
3232

3233
    memset(&tag, 0, sizeof(tag));
3234 3235 3236 3237 3238 3239 3240
    segment = nullptr;
    location = nullptr;
  });

  KJ_IF_MAYBE(e, exception) {
    kj::getExceptionCallback().onRecoverableException(kj::mv(*e));
  }
3241 3242
}

3243
}  // namespace _ (private)
3244
}  // namespace capnp