layout.c++ 126 KB
Newer Older
Kenton Varda's avatar
Kenton Varda committed
1 2
// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
3
//
Kenton Varda's avatar
Kenton Varda committed
4 5 6 7 8 9
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
10
//
Kenton Varda's avatar
Kenton Varda committed
11 12
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
13
//
Kenton Varda's avatar
Kenton Varda committed
14 15 16 17 18 19 20
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
21

Kenton Varda's avatar
Kenton Varda committed
22
#define CAPNP_PRIVATE
23
#include "layout.h"
Kenton Varda's avatar
Kenton Varda committed
24
#include <kj/debug.h>
25
#include "arena.h"
26
#include <string.h>
27
#include <stdlib.h>
28

29 30 31 32
#if !CAPNP_LITE
#include "capability.h"
#endif  // !CAPNP_LITE

33
namespace capnp {
34
namespace _ {  // private
Kenton Varda's avatar
Kenton Varda committed
35

36
#if !CAPNP_LITE
37 38 39 40 41
static BrokenCapFactory* brokenCapFactory = nullptr;
// Horrible hack:  We need to be able to construct broken caps without any capability context,
// but we can't have a link-time dependency on libcapnp-rpc.

void setGlobalBrokenCapFactoryForLayoutCpp(BrokenCapFactory& factory) {
42 43
  // Called from capability.c++ when the capability API is used, to make sure that layout.c++
  // is ready for it.  May be called multiple times but always with the same value.
44 45
  __atomic_store_n(&brokenCapFactory, &factory, __ATOMIC_RELAXED);
}
Kenton Varda's avatar
Kenton Varda committed
46 47 48 49 50 51 52 53

}  // namespace _ (private)

const uint ClientHook::NULL_CAPABILITY_BRAND = 0;
// Defined here rather than capability.c++ so that we can safely call isNull() in this file.

namespace _ {  // private

54
#endif  // !CAPNP_LITE
55

56 57
// =======================================================================================

58 59
struct WirePointer {
  // A pointer, in exactly the format in which it appears on the wire.
60 61

  // Copying and moving is not allowed because the offset would become wrong.
62 63 64 65
  WirePointer(const WirePointer& other) = delete;
  WirePointer(WirePointer&& other) = delete;
  WirePointer& operator=(const WirePointer& other) = delete;
  WirePointer& operator=(WirePointer&& other) = delete;
66

67
  // -----------------------------------------------------------------
68
  // Common part of all pointers:  kind + offset
69 70 71
  //
  // Actually this is not terribly common.  The "offset" could actually be different things
  // depending on the context:
72 73
  // - For a regular (e.g. struct/list) pointer, a signed word offset from the word immediately
  //   following the pointer pointer.  (The off-by-one means the offset is more often zero, saving
74
  //   bytes on the wire when packed.)
75
  // - For an inline composite list tag (not really a pointer, but structured similarly), an
76
  //   element count.
77
  // - For a FAR pointer, an unsigned offset into the target segment.
78
  // - For a FAR landing pad, zero indicates that the target value immediately follows the pad while
79
  //   1 indicates that the pad is followed by another FAR pointer that actually points at the
80 81 82
  //   value.

  enum Kind {
83
    STRUCT = 0,
84 85
    // Reference points at / describes a struct.

86
    LIST = 1,
87 88 89 90 91 92
    // Reference points at / describes a list.

    FAR = 2,
    // Reference is a "far pointer", which points at data located in a different segment.  The
    // eventual target is one of the other kinds.

93 94 95
    OTHER = 3
    // Reference has type "other".  If the next 30 bits are all zero (i.e. the lower 32 bits contain
    // only the kind OTHER) then the pointer is a capability.  All other values are reserved.
96 97
  };

98 99
  WireValue<uint32_t> offsetAndKind;

100
  KJ_ALWAYS_INLINE(Kind kind() const) {
101 102
    return static_cast<Kind>(offsetAndKind.get() & 3);
  }
103 104 105 106 107 108
  KJ_ALWAYS_INLINE(bool isPositional() const) {
    return (offsetAndKind.get() & 2) == 0;  // match STRUCT and LIST but not FAR or OTHER
  }
  KJ_ALWAYS_INLINE(bool isCapability() const) {
    return offsetAndKind.get() == OTHER;
  }
109

110
  KJ_ALWAYS_INLINE(word* target()) {
111
    return reinterpret_cast<word*>(this) + 1 + (static_cast<int32_t>(offsetAndKind.get()) >> 2);
112
  }
113
  KJ_ALWAYS_INLINE(const word* target() const) {
114 115
    return reinterpret_cast<const word*>(this) + 1 +
        (static_cast<int32_t>(offsetAndKind.get()) >> 2);
116
  }
117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133
  KJ_ALWAYS_INLINE(void setKindAndTarget(Kind kind, word* target, SegmentBuilder* segment)) {
    // Check that the target is really in the same segment, otherwise subtracting pointers is
    // undefined behavior.  As it turns out, it's undefined behavior that actually produces
    // unexpected results in a real-world situation that actually happened:  At one time,
    // OrphanBuilder's "tag" (a WirePointer) was allowed to be initialized as if it lived in
    // a particular segment when in fact it does not.  On 32-bit systems, where words might
    // only be 32-bit aligned, it's possible that the difference between `this` and `target` is
    // not a whole number of words.  But clang optimizes:
    //     (target - (word*)this - 1) << 2
    // to:
    //     (((ptrdiff_t)target - (ptrdiff_t)this - 8) >> 1)
    // So now when the pointers are not aligned the same, we can end up corrupting the bottom
    // two bits, where `kind` is stored.  For example, this turns a struct into a far pointer.
    // Ouch!
    KJ_DREQUIRE(segment->containsInterval(
        reinterpret_cast<word*>(this), reinterpret_cast<word*>(this + 1)));
    KJ_DREQUIRE(segment->containsInterval(target, target));
134
    offsetAndKind.set(((target - reinterpret_cast<word*>(this) - 1) << 2) | kind);
135
  }
136
  KJ_ALWAYS_INLINE(void setKindWithZeroOffset(Kind kind)) {
137 138
    offsetAndKind.set(kind);
  }
139 140 141 142 143 144 145 146 147 148
  KJ_ALWAYS_INLINE(void setKindAndTargetForEmptyStruct()) {
    // This pointer points at an empty struct.  Assuming the WirePointer itself is in-bounds, we
    // can set the target to point either at the WirePointer itself or immediately after it.  The
    // latter would cause the WirePointer to be "null" (since for an empty struct the upper 32
    // bits are going to be zero).  So we set an offset of -1, as if the struct were allocated
    // immediately before this pointer, to distinguish it from null.
    offsetAndKind.set(0xfffffffc);
  }
  KJ_ALWAYS_INLINE(void setKindForOrphan(Kind kind)) {
    // OrphanBuilder contains a WirePointer, but since it isn't located in a segment, it should
149 150 151 152
    // not have a valid offset (unless it is a FAR or OTHER pointer).  We set its offset to -1
    // because setting it to zero would mean a pointer to an empty struct would appear to be a null
    // pointer.
    KJ_DREQUIRE(isPositional());
153 154
    offsetAndKind.set(kind | 0xfffffffc);
  }
155

156
  KJ_ALWAYS_INLINE(ElementCount inlineCompositeListElementCount() const) {
157 158
    return (offsetAndKind.get() >> 2) * ELEMENTS;
  }
159
  KJ_ALWAYS_INLINE(void setKindAndInlineCompositeListElementCount(
160 161 162 163
      Kind kind, ElementCount elementCount)) {
    offsetAndKind.set(((elementCount / ELEMENTS) << 2) | kind);
  }

164
  KJ_ALWAYS_INLINE(WordCount farPositionInSegment() const) {
165
    KJ_DREQUIRE(kind() == FAR,
166
        "positionInSegment() should only be called on FAR pointers.");
167
    return (offsetAndKind.get() >> 3) * WORDS;
168
  }
169
  KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
170
    KJ_DREQUIRE(kind() == FAR,
171
        "isDoubleFar() should only be called on FAR pointers.");
172
    return (offsetAndKind.get() >> 2) & 1;
173
  }
174
  KJ_ALWAYS_INLINE(void setFar(bool isDoubleFar, WordCount pos)) {
175 176
    offsetAndKind.set(((pos / WORDS) << 3) | (static_cast<uint32_t>(isDoubleFar) << 2) |
                      static_cast<uint32_t>(Kind::FAR));
177
  }
178 179 180 181
  KJ_ALWAYS_INLINE(void setCap(uint index)) {
    offsetAndKind.set(static_cast<uint32_t>(Kind::OTHER));
    capRef.index.set(index);
  }
182 183

  // -----------------------------------------------------------------
184
  // Part of pointer that depends on the kind.
185

186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
  // Note:  Originally StructRef, ListRef, and FarRef were unnamed types, but this somehow
  //   tickled a bug in GCC:
  //     http://gcc.gnu.org/bugzilla/show_bug.cgi?id=58192
  struct StructRef {
    WireValue<WordCount16> dataSize;
    WireValue<WirePointerCount16> ptrCount;

    inline WordCount wordSize() const {
      return dataSize.get() + ptrCount.get() * WORDS_PER_POINTER;
    }

    KJ_ALWAYS_INLINE(void set(WordCount ds, WirePointerCount rc)) {
      dataSize.set(ds);
      ptrCount.set(rc);
    }
    KJ_ALWAYS_INLINE(void set(StructSize size)) {
      dataSize.set(size.data);
      ptrCount.set(size.pointers);
    }
  };

  struct ListRef {
    WireValue<uint32_t> elementSizeAndCount;

210 211
    KJ_ALWAYS_INLINE(ElementSize elementSize() const) {
      return static_cast<ElementSize>(elementSizeAndCount.get() & 7);
212 213 214 215 216 217 218 219
    }
    KJ_ALWAYS_INLINE(ElementCount elementCount() const) {
      return (elementSizeAndCount.get() >> 3) * ELEMENTS;
    }
    KJ_ALWAYS_INLINE(WordCount inlineCompositeWordCount() const) {
      return elementCount() * (1 * WORDS / ELEMENTS);
    }

220
    KJ_ALWAYS_INLINE(void set(ElementSize es, ElementCount ec)) {
221 222 223 224 225 226 227
      KJ_DREQUIRE(ec < (1 << 29) * ELEMENTS, "Lists are limited to 2**29 elements.");
      elementSizeAndCount.set(((ec / ELEMENTS) << 3) | static_cast<int>(es));
    }

    KJ_ALWAYS_INLINE(void setInlineComposite(WordCount wc)) {
      KJ_DREQUIRE(wc < (1 << 29) * WORDS, "Inline composite lists are limited to 2**29 words.");
      elementSizeAndCount.set(((wc / WORDS) << 3) |
228
                              static_cast<int>(ElementSize::INLINE_COMPOSITE));
229 230 231 232 233 234 235 236 237 238 239
    }
  };

  struct FarRef {
    WireValue<SegmentId> segmentId;

    KJ_ALWAYS_INLINE(void set(SegmentId si)) {
      segmentId.set(si);
    }
  };

240 241 242 243 244
  struct CapRef {
    WireValue<uint32_t> index;
    // Index into the message's capability table.
  };

245
  union {
246 247
    uint32_t upper32Bits;

248
    StructRef structRef;
249

250 251 252
    ListRef listRef;

    FarRef farRef;
253 254

    CapRef capRef;
255
  };
256

257
  KJ_ALWAYS_INLINE(bool isNull() const) {
258 259 260 261
    // If the upper 32 bits are zero, this is a pointer to an empty struct.  We consider that to be
    // our "null" value.
    return (offsetAndKind.get() == 0) & (upper32Bits == 0);
  }
262

263
};
264
static_assert(sizeof(WirePointer) == sizeof(word),
265
    "capnp::WirePointer is not exactly one word.  This will probably break everything.");
266 267 268 269 270 271
static_assert(POINTERS * WORDS_PER_POINTER * BYTES_PER_WORD / BYTES == sizeof(WirePointer),
    "WORDS_PER_POINTER is wrong.");
static_assert(POINTERS * BYTES_PER_POINTER / BYTES == sizeof(WirePointer),
    "BYTES_PER_POINTER is wrong.");
static_assert(POINTERS * BITS_PER_POINTER / BITS_PER_BYTE / BYTES == sizeof(WirePointer),
    "BITS_PER_POINTER is wrong.");
272

273 274 275 276 277 278 279 280 281
namespace {

static const union {
  AlignedData<POINTER_SIZE_IN_WORDS / WORDS> word;
  WirePointer pointer;
} zero = {{{0}}};

}  // namespace

282 283
// =======================================================================================

284 285 286 287 288 289 290 291 292 293
namespace {

template <typename T>
struct SegmentAnd {
  SegmentBuilder* segment;
  T value;
};

}  // namespace

294
struct WireHelpers {
295
  static KJ_ALWAYS_INLINE(WordCount roundBytesUpToWords(ByteCount bytes)) {
Kenton Varda's avatar
Kenton Varda committed
296
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
297 298 299
    return (bytes + 7 * BYTES) / BYTES_PER_WORD;
  }

300
  static KJ_ALWAYS_INLINE(ByteCount roundBitsUpToBytes(BitCount bits)) {
301
    return (bits + 7 * BITS) / BITS_PER_BYTE;
Kenton Varda's avatar
Kenton Varda committed
302 303
  }

304
  static KJ_ALWAYS_INLINE(WordCount64 roundBitsUpToWords(BitCount64 bits)) {
305 306 307 308
    static_assert(sizeof(word) == 8, "This code assumes 64-bit words.");
    return (bits + 63 * BITS) / BITS_PER_WORD;
  }

309
  static KJ_ALWAYS_INLINE(ByteCount64 roundBitsUpToBytes(BitCount64 bits)) {
310 311 312
    return (bits + 7 * BITS) / BITS_PER_BYTE;
  }

313
  static KJ_ALWAYS_INLINE(bool boundsCheck(
314
      SegmentReader* segment, const word* start, const word* end)) {
315
    // If segment is null, this is an unchecked message, so we don't do bounds checks.
316 317 318
    return segment == nullptr || segment->containsInterval(start, end);
  }

319 320 321 322 323
  static KJ_ALWAYS_INLINE(bool amplifiedRead(SegmentReader* segment, WordCount virtualAmount)) {
    // If segment is null, this is an unchecked message, so we don't do read limiter checks.
    return segment == nullptr || segment->amplifiedRead(virtualAmount);
  }

324
  static KJ_ALWAYS_INLINE(word* allocate(
325
      WirePointer*& ref, SegmentBuilder*& segment, CapTableBuilder* capTable, WordCount amount,
326
      WirePointer::Kind kind, BuilderArena* orphanArena)) {
David Renshaw's avatar
David Renshaw committed
327
    // Allocate space in the message for a new object, creating far pointers if necessary.
328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
    //
    // * `ref` starts out being a reference to the pointer which shall be assigned to point at the
    //   new object.  On return, `ref` points to a pointer which needs to be initialized with
    //   the object's type information.  Normally this is the same pointer, but it can change if
    //   a far pointer was allocated -- in this case, `ref` will end up pointing to the far
    //   pointer's tag.  Either way, `allocate()` takes care of making sure that the original
    //   pointer ends up leading to the new object.  On return, only the upper 32 bit of `*ref`
    //   need to be filled in by the caller.
    // * `segment` starts out pointing to the segment containing `ref`.  On return, it points to
    //   the segment containing the allocated object, which is usually the same segment but could
    //   be a different one if the original segment was out of space.
    // * `amount` is the number of words to allocate.
    // * `kind` is the kind of object to allocate.  It is used to initialize the pointer.  It
    //   cannot be `FAR` -- far pointers are allocated automatically as needed.
    // * `orphanArena` is usually null.  If it is non-null, then we're allocating an orphan object.
    //   In this case, `segment` starts out null; the allocation takes place in an arbitrary
    //   segment belonging to the arena.  `ref` will be initialized as a non-far pointer, but its
    //   target offset will be set to zero.

    if (orphanArena == nullptr) {
348
      if (!ref->isNull()) zeroObject(segment, capTable, ref);
349

350 351 352 353 354 355 356
      if (amount == 0 * WORDS && kind == WirePointer::STRUCT) {
        // Note that the check for kind == WirePointer::STRUCT will hopefully cause this whole
        // branch to be optimized away from all the call sites that are allocating non-structs.
        ref->setKindAndTargetForEmptyStruct();
        return reinterpret_cast<word*>(ref);
      }

357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373
      word* ptr = segment->allocate(amount);

      if (ptr == nullptr) {
        // Need to allocate in a new segment.  We'll need to allocate an extra pointer worth of
        // space to act as the landing pad for a far pointer.

        WordCount amountPlusRef = amount + POINTER_SIZE_IN_WORDS;
        auto allocation = segment->getArena()->allocate(amountPlusRef);
        segment = allocation.segment;
        ptr = allocation.words;

        // Set up the original pointer to be a far pointer to the new segment.
        ref->setFar(false, segment->getOffsetTo(ptr));
        ref->farRef.set(segment->getSegmentId());

        // Initialize the landing pad to indicate that the data immediately follows the pad.
        ref = reinterpret_cast<WirePointer*>(ptr);
374
        ref->setKindAndTarget(kind, ptr + POINTER_SIZE_IN_WORDS, segment);
375 376 377 378

        // Allocated space follows new pointer.
        return ptr + POINTER_SIZE_IN_WORDS;
      } else {
379
        ref->setKindAndTarget(kind, ptr, segment);
380 381
        return ptr;
      }
382
    } else {
383
      // orphanArena is non-null.  Allocate an orphan.
384
      KJ_DASSERT(ref->isNull());
385 386
      auto allocation = orphanArena->allocate(amount);
      segment = allocation.segment;
387
      ref->setKindForOrphan(kind);
388
      return allocation.words;
389 390 391
    }
  }

392
  static KJ_ALWAYS_INLINE(word* followFarsNoWritableCheck(
393 394 395 396 397 398 399 400 401 402
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    // If `ref` is a far pointer, follow it.  On return, `ref` will have been updated to point at
    // a WirePointer that contains the type information about the target object, and a pointer to
    // the object contents is returned.  The caller must NOT use `ref->target()` as this may or may
    // not actually return a valid pointer.  `segment` is also updated to point at the segment which
    // actually contains the object.
    //
    // If `ref` is not a far pointer, this simply returns `refTarget`.  Usually, `refTarget` should
    // be the same as `ref->target()`, but may not be in cases where `ref` is only a tag.

403
    if (ref->kind() == WirePointer::FAR) {
404
      segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
405 406
      WirePointer* pad =
          reinterpret_cast<WirePointer*>(segment->getPtrUnchecked(ref->farPositionInSegment()));
407 408 409
      if (!ref->isDoubleFar()) {
        ref = pad;
        return pad->target();
410
      }
411 412 413 414 415 416 417

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

      segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
      return segment->getPtrUnchecked(pad->farPositionInSegment());
418
    } else {
419
      return refTarget;
420 421 422
    }
  }

423 424 425 426 427 428 429
  static KJ_ALWAYS_INLINE(word* followFars(
      WirePointer*& ref, word* refTarget, SegmentBuilder*& segment)) {
    auto result = followFarsNoWritableCheck(ref, refTarget, segment);
    segment->checkWritable();
    return result;
  }

430 431 432
  static KJ_ALWAYS_INLINE(const word* followFars(
      const WirePointer*& ref, const word* refTarget, SegmentReader*& segment)) {
    // Like the other followFars() but operates on readers.
433

434
    // If the segment is null, this is an unchecked message, so there are no FAR pointers.
435
    if (segment != nullptr && ref->kind() == WirePointer::FAR) {
436
      // Look up the segment containing the landing pad.
437
      segment = segment->getArena()->tryGetSegment(ref->farRef.segmentId.get());
438
      KJ_REQUIRE(segment != nullptr, "Message contains far pointer to unknown segment.") {
439
        return nullptr;
Kenton Varda's avatar
Kenton Varda committed
440
      }
441

442 443
      // Find the landing pad and check that it is within bounds.
      const word* ptr = segment->getStartPtr() + ref->farPositionInSegment();
444
      WordCount padWords = (1 + ref->isDoubleFar()) * POINTER_SIZE_IN_WORDS;
445 446
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr + padWords),
                 "Message contains out-of-bounds far pointer.") {
447
        return nullptr;
448 449
      }

450
      const WirePointer* pad = reinterpret_cast<const WirePointer*>(ptr);
451 452 453 454 455 456 457 458 459 460 461 462

      // If this is not a double-far then the landing pad is our final pointer.
      if (!ref->isDoubleFar()) {
        ref = pad;
        return pad->target();
      }

      // Landing pad is another far pointer.  It is followed by a tag describing the pointed-to
      // object.
      ref = pad + 1;

      segment = segment->getArena()->tryGetSegment(pad->farRef.segmentId.get());
463
      KJ_REQUIRE(segment != nullptr, "Message contains double-far pointer to unknown segment.") {
464
        return nullptr;
465
      }
466 467

      return segment->getStartPtr() + pad->farPositionInSegment();
468
    } else {
469
      return refTarget;
470 471 472
    }
  }

473 474
  // -----------------------------------------------------------------

475
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref) {
476 477 478
    // Zero out the pointed-to object.  Use when the pointer is about to be overwritten making the
    // target object no longer reachable.

479 480 481
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

482 483 484
    switch (ref->kind()) {
      case WirePointer::STRUCT:
      case WirePointer::LIST:
485
        zeroObject(segment, capTable, ref, ref->target());
486 487 488
        break;
      case WirePointer::FAR: {
        segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
489 490 491 492 493 494 495
        if (segment->isWritable()) {  // Don't zero external data.
          WirePointer* pad =
              reinterpret_cast<WirePointer*>(segment->getPtrUnchecked(ref->farPositionInSegment()));

          if (ref->isDoubleFar()) {
            segment = segment->getArena()->getSegment(pad->farRef.segmentId.get());
            if (segment->isWritable()) {
496 497
              zeroObject(segment, capTable,
                         pad + 1, segment->getPtrUnchecked(pad->farPositionInSegment()));
498 499 500
            }
            memset(pad, 0, sizeof(WirePointer) * 2);
          } else {
501
            zeroObject(segment, capTable, pad);
502 503
            memset(pad, 0, sizeof(WirePointer));
          }
504 505 506
        }
        break;
      }
507 508
      case WirePointer::OTHER:
        if (ref->isCapability()) {
509 510 511
#if CAPNP_LITE
          KJ_FAIL_ASSERT("Capability encountered in builder in lite mode?") { break; }
#else  // CAPNP_LINE
512
          capTable->dropCap(ref->capRef.index.get());
513
#endif  // CAPNP_LITE, else
514 515 516 517
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
        break;
518 519 520
    }
  }

521 522
  static void zeroObject(SegmentBuilder* segment, CapTableBuilder* capTable,
                         WirePointer* tag, word* ptr) {
523 524 525
    // We shouldn't zero out external data linked into the message.
    if (!segment->isWritable()) return;

526 527 528 529 530 531
    switch (tag->kind()) {
      case WirePointer::STRUCT: {
        WirePointer* pointerSection =
            reinterpret_cast<WirePointer*>(ptr + tag->structRef.dataSize.get());
        uint count = tag->structRef.ptrCount.get() / POINTERS;
        for (uint i = 0; i < count; i++) {
532
          zeroObject(segment, capTable, pointerSection + i);
533 534 535 536 537 538
        }
        memset(ptr, 0, tag->structRef.wordSize() * BYTES_PER_WORD / BYTES);
        break;
      }
      case WirePointer::LIST: {
        switch (tag->listRef.elementSize()) {
539
          case ElementSize::VOID:
540 541
            // Nothing.
            break;
542 543 544 545 546
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES:
547
            memset(ptr, 0,
548 549
                roundBitsUpToWords(ElementCount64(tag->listRef.elementCount()) *
                                   dataBitsPerElement(tag->listRef.elementSize()))
550 551
                    * BYTES_PER_WORD / BYTES);
            break;
552
          case ElementSize::POINTER: {
553 554
            uint count = tag->listRef.elementCount() / ELEMENTS;
            for (uint i = 0; i < count; i++) {
555
              zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i);
556
            }
557
            memset(ptr, 0, POINTER_SIZE_IN_WORDS * count * BYTES_PER_WORD / BYTES);
558 559
            break;
          }
560
          case ElementSize::INLINE_COMPOSITE: {
561 562
            WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);

563
            KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
564 565 566 567 568
                  "Don't know how to handle non-STRUCT inline composite.");
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

            uint count = elementTag->inlineCompositeListElementCount() / ELEMENTS;
569 570 571 572 573 574
            if (pointerCount > 0 * POINTERS) {
              word* pos = ptr + POINTER_SIZE_IN_WORDS;
              for (uint i = 0; i < count; i++) {
                pos += dataSize;

                for (uint j = 0; j < pointerCount / POINTERS; j++) {
575
                  zeroObject(segment, capTable, reinterpret_cast<WirePointer*>(pos));
576 577
                  pos += POINTER_SIZE_IN_WORDS;
                }
578 579 580
              }
            }

581
            memset(ptr, 0, (elementTag->structRef.wordSize() * count + POINTER_SIZE_IN_WORDS)
582 583 584 585 586 587 588
                           * BYTES_PER_WORD / BYTES);
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
589 590 591
        KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
          break;
        }
592
        break;
593 594 595 596 597
      case WirePointer::OTHER:
        KJ_FAIL_ASSERT("Unexpected OTHER pointer.") {
          break;
        }
        break;
598 599 600
    }
  }

601
  static KJ_ALWAYS_INLINE(
602 603 604 605 606
      void zeroPointerAndFars(SegmentBuilder* segment, WirePointer* ref)) {
    // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, but
    // do not zero the object body.  Used when upgrading.

    if (ref->kind() == WirePointer::FAR) {
607 608 609 610 611
      SegmentBuilder* padSegment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
      if (padSegment->isWritable()) {  // Don't zero external data.
        word* pad = padSegment->getPtrUnchecked(ref->farPositionInSegment());
        memset(pad, 0, sizeof(WirePointer) * (1 + ref->isDoubleFar()));
      }
612 613 614 615
    }
    memset(ref, 0, sizeof(*ref));
  }

616 617 618

  // -----------------------------------------------------------------

619 620
  static MessageSizeCounts totalSize(
      SegmentReader* segment, const WirePointer* ref, int nestingLimit) {
621 622
    // Compute the total size of the object pointed to, not counting far pointer overhead.

623 624
    MessageSizeCounts result = { 0 * WORDS, 0 };

625
    if (ref->isNull()) {
626
      return result;
627 628
    }

629
    KJ_REQUIRE(nestingLimit > 0, "Message is too deeply-nested.") {
630
      return result;
631 632 633
    }
    --nestingLimit;

634
    const word* ptr = followFars(ref, ref->target(), segment);
635 636

    switch (ref->kind()) {
637
      case WirePointer::STRUCT: {
638 639 640
        KJ_REQUIRE(boundsCheck(segment, ptr, ptr + ref->structRef.wordSize()),
                   "Message contained out-of-bounds struct pointer.") {
          return result;
641
        }
642
        result.wordCount += ref->structRef.wordSize();
643 644 645 646 647 648 649 650 651 652 653

        const WirePointer* pointerSection =
            reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get());
        uint count = ref->structRef.ptrCount.get() / POINTERS;
        for (uint i = 0; i < count; i++) {
          result += totalSize(segment, pointerSection + i, nestingLimit);
        }
        break;
      }
      case WirePointer::LIST: {
        switch (ref->listRef.elementSize()) {
654
          case ElementSize::VOID:
655 656
            // Nothing.
            break;
657 658 659 660 661
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
662
            WordCount64 totalWords = roundBitsUpToWords(
663 664
                ElementCount64(ref->listRef.elementCount()) *
                dataBitsPerElement(ref->listRef.elementSize()));
665 666 667
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + totalWords),
                       "Message contained out-of-bounds list pointer.") {
              return result;
668
            }
669
            result.wordCount += totalWords;
670 671
            break;
          }
672
          case ElementSize::POINTER: {
673 674
            WirePointerCount count = ref->listRef.elementCount() * (POINTERS / ELEMENTS);

675 676 677
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + count * WORDS_PER_POINTER),
                       "Message contained out-of-bounds list pointer.") {
              return result;
678 679
            }

680
            result.wordCount += count * WORDS_PER_POINTER;
681 682 683 684 685 686 687

            for (uint i = 0; i < count / POINTERS; i++) {
              result += totalSize(segment, reinterpret_cast<const WirePointer*>(ptr) + i,
                                  nestingLimit);
            }
            break;
          }
688
          case ElementSize::INLINE_COMPOSITE: {
689
            WordCount wordCount = ref->listRef.inlineCompositeWordCount();
690 691 692
            KJ_REQUIRE(boundsCheck(segment, ptr, ptr + wordCount + POINTER_SIZE_IN_WORDS),
                       "Message contained out-of-bounds list pointer.") {
              return result;
693 694 695 696 697
            }

            const WirePointer* elementTag = reinterpret_cast<const WirePointer*>(ptr);
            ElementCount count = elementTag->inlineCompositeListElementCount();

698 699 700
            KJ_REQUIRE(elementTag->kind() == WirePointer::STRUCT,
                       "Don't know how to handle non-STRUCT inline composite.") {
              return result;
701
            }
702

703 704
            auto actualSize = elementTag->structRef.wordSize() / ELEMENTS * ElementCount64(count);
            KJ_REQUIRE(actualSize <= wordCount,
705 706
                       "Struct list pointer's elements overran size.") {
              return result;
707 708
            }

709 710 711 712
            // We count the actual size rather than the claimed word count because that's what
            // we'll end up with if we make a copy.
            result.wordCount += actualSize + POINTER_SIZE_IN_WORDS;

713 714 715
            WordCount dataSize = elementTag->structRef.dataSize.get();
            WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();

716 717 718 719
            if (pointerCount > 0 * POINTERS) {
              const word* pos = ptr + POINTER_SIZE_IN_WORDS;
              for (uint i = 0; i < count / ELEMENTS; i++) {
                pos += dataSize;
720

721 722 723 724 725
                for (uint j = 0; j < pointerCount / POINTERS; j++) {
                  result += totalSize(segment, reinterpret_cast<const WirePointer*>(pos),
                                      nestingLimit);
                  pos += POINTER_SIZE_IN_WORDS;
                }
726 727 728 729 730 731 732 733
              }
            }
            break;
          }
        }
        break;
      }
      case WirePointer::FAR:
734
        KJ_FAIL_ASSERT("Unexpected FAR pointer.") {
735 736 737
          break;
        }
        break;
738
      case WirePointer::OTHER:
739 740 741 742 743
        if (ref->isCapability()) {
          result.capCount++;
        } else {
          KJ_FAIL_REQUIRE("Unknown pointer type.") { break; }
        }
744
        break;
745 746 747 748 749
    }

    return result;
  }

750
  // -----------------------------------------------------------------
751
  // Copy from an unchecked message.
752

753
  static KJ_ALWAYS_INLINE(
754 755
      void copyStruct(SegmentBuilder* segment, CapTableBuilder* capTable,
                      word* dst, const word* src,
756
                      WordCount dataSize, WirePointerCount pointerCount)) {
757 758
    memcpy(dst, src, dataSize * BYTES_PER_WORD / BYTES);

759 760
    const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src + dataSize);
    WirePointer* dstRefs = reinterpret_cast<WirePointer*>(dst + dataSize);
761

762
    for (uint i = 0; i < pointerCount / POINTERS; i++) {
763
      SegmentBuilder* subSegment = segment;
764
      WirePointer* dstRef = dstRefs + i;
765
      copyMessage(subSegment, capTable, dstRef, srcRefs + i);
766 767 768
    }
  }

769
  static word* copyMessage(
770 771
      SegmentBuilder*& segment, CapTableBuilder* capTable,
      WirePointer*& dst, const WirePointer* src) {
772 773
    // Not always-inline because it's recursive.

774
    switch (src->kind()) {
775
      case WirePointer::STRUCT: {
776
        if (src->isNull()) {
777
          memset(dst, 0, sizeof(WirePointer));
778
          return nullptr;
779 780
        } else {
          const word* srcPtr = src->target();
781
          word* dstPtr = allocate(
782
              dst, segment, capTable, src->structRef.wordSize(), WirePointer::STRUCT, nullptr);
783

784
          copyStruct(segment, capTable, dstPtr, srcPtr, src->structRef.dataSize.get(),
785
                     src->structRef.ptrCount.get());
786

787
          dst->structRef.set(src->structRef.dataSize.get(), src->structRef.ptrCount.get());
788
          return dstPtr;
789 790
        }
      }
791
      case WirePointer::LIST: {
792
        switch (src->listRef.elementSize()) {
793 794 795 796 797 798
          case ElementSize::VOID:
          case ElementSize::BIT:
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES: {
799
            WordCount wordCount = roundBitsUpToWords(
800
                ElementCount64(src->listRef.elementCount()) *
801
                dataBitsPerElement(src->listRef.elementSize()));
802
            const word* srcPtr = src->target();
803
            word* dstPtr = allocate(dst, segment, capTable, wordCount, WirePointer::LIST, nullptr);
804 805
            memcpy(dstPtr, srcPtr, wordCount * BYTES_PER_WORD / BYTES);

806 807
            dst->listRef.set(src->listRef.elementSize(), src->listRef.elementCount());
            return dstPtr;
808 809
          }

810
          case ElementSize::POINTER: {
811 812
            const WirePointer* srcRefs = reinterpret_cast<const WirePointer*>(src->target());
            WirePointer* dstRefs = reinterpret_cast<WirePointer*>(
813
                allocate(dst, segment, capTable, src->listRef.elementCount() *
814
                    (1 * POINTERS / ELEMENTS) * WORDS_PER_POINTER,
815
                    WirePointer::LIST, nullptr));
816 817 818

            uint n = src->listRef.elementCount() / ELEMENTS;
            for (uint i = 0; i < n; i++) {
819
              SegmentBuilder* subSegment = segment;
820
              WirePointer* dstRef = dstRefs + i;
821
              copyMessage(subSegment, capTable, dstRef, srcRefs + i);
822 823
            }

824
            dst->listRef.set(ElementSize::POINTER, src->listRef.elementCount());
825
            return reinterpret_cast<word*>(dstRefs);
826 827
          }

828
          case ElementSize::INLINE_COMPOSITE: {
829
            const word* srcPtr = src->target();
830
            word* dstPtr = allocate(dst, segment, capTable,
831
                src->listRef.inlineCompositeWordCount() + POINTER_SIZE_IN_WORDS,
832
                WirePointer::LIST, nullptr);
833

834
            dst->listRef.setInlineComposite(src->listRef.inlineCompositeWordCount());
835

836 837
            const WirePointer* srcTag = reinterpret_cast<const WirePointer*>(srcPtr);
            memcpy(dstPtr, srcTag, sizeof(WirePointer));
838

839 840
            const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
            word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
841

842
            KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
843 844
                "INLINE_COMPOSITE of lists is not yet supported.");

845
            uint n = srcTag->inlineCompositeListElementCount() / ELEMENTS;
846
            for (uint i = 0; i < n; i++) {
847
              copyStruct(segment, capTable, dstElement, srcElement,
848
                  srcTag->structRef.dataSize.get(), srcTag->structRef.ptrCount.get());
849 850
              srcElement += srcTag->structRef.wordSize();
              dstElement += srcTag->structRef.wordSize();
851
            }
852
            return dstPtr;
853 854 855 856
          }
        }
        break;
      }
857 858
      case WirePointer::OTHER:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain OTHER pointers (e.g. capabilities).");
859 860 861
        break;
      case WirePointer::FAR:
        KJ_FAIL_REQUIRE("Unchecked messages cannot contain far pointers.");
862 863 864
        break;
    }

865
    return nullptr;
866 867
  }

868 869
  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, WirePointer* src) {
870 871
    // Make *dst point to the same object as *src.  Both must reside in the same message, but can
    // be in different segments.  Not always-inline because this is rarely used.
872 873 874 875 876
    //
    // Caller MUST zero out the source pointer after calling this, to make sure no later code
    // mistakenly thinks the source location still owns the object.  transferPointer() doesn't do
    // this zeroing itself because many callers transfer several pointers in a loop then zero out
    // the whole section.
877 878 879 880

    KJ_DASSERT(dst->isNull());
    // We expect the caller to ensure the target is already null so won't leak.

881
    if (src->isNull()) {
882
      memset(dst, 0, sizeof(WirePointer));
883
    } else if (src->isPositional()) {
884
      transferPointer(dstSegment, dst, srcSegment, src, src->target());
885 886 887
    } else {
      // Far and other pointers are position-independent, so we can just copy.
      memcpy(dst, src, sizeof(WirePointer));
888 889 890 891 892 893 894 895 896 897
    }
  }

  static void transferPointer(SegmentBuilder* dstSegment, WirePointer* dst,
                              SegmentBuilder* srcSegment, const WirePointer* srcTag,
                              word* srcPtr) {
    // Like the other overload, but splits src into a tag and a target.  Particularly useful for
    // OrphanBuilder.

    if (dstSegment == srcSegment) {
898
      // Same segment, so create a direct pointer.
899

900
      if (srcTag->kind() == WirePointer::STRUCT && srcTag->structRef.wordSize() == 0 * WORDS) {
901 902 903 904
        dst->setKindAndTargetForEmptyStruct();
      } else {
        dst->setKindAndTarget(srcTag->kind(), srcPtr, dstSegment);
      }
905 906 907

      // We can just copy the upper 32 bits.  (Use memcpy() to comply with aliasing rules.)
      memcpy(&dst->upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
908 909 910 911
    } else {
      // Need to create a far pointer.  Try to allocate it in the same segment as the source, so
      // that it doesn't need to be a double-far.

912 913
      WirePointer* landingPad =
          reinterpret_cast<WirePointer*>(srcSegment->allocate(1 * WORDS));
914 915
      if (landingPad == nullptr) {
        // Darn, need a double-far.
916 917 918
        auto allocation = srcSegment->getArena()->allocate(2 * WORDS);
        SegmentBuilder* farSegment = allocation.segment;
        landingPad = reinterpret_cast<WirePointer*>(allocation.words);
919

920
        landingPad[0].setFar(false, srcSegment->getOffsetTo(srcPtr));
921 922
        landingPad[0].farRef.segmentId.set(srcSegment->getSegmentId());

923 924
        landingPad[1].setKindWithZeroOffset(srcTag->kind());
        memcpy(&landingPad[1].upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
925 926 927 928 929

        dst->setFar(true, farSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(farSegment->getSegmentId());
      } else {
        // Simple landing pad is just a pointer.
930
        landingPad->setKindAndTarget(srcTag->kind(), srcPtr, srcSegment);
931
        memcpy(&landingPad->upper32Bits, &srcTag->upper32Bits, sizeof(srcTag->upper32Bits));
932 933 934 935 936 937 938

        dst->setFar(false, srcSegment->getOffsetTo(reinterpret_cast<word*>(landingPad)));
        dst->farRef.set(srcSegment->getSegmentId());
      }
    }
  }

939 940
  // -----------------------------------------------------------------

941
  static KJ_ALWAYS_INLINE(StructBuilder initStructPointer(
942
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
943
      BuilderArena* orphanArena = nullptr)) {
944
    // Allocate space for the new struct.  Newly-allocated space is automatically zeroed.
945
    word* ptr = allocate(ref, segment, capTable, size.total(), WirePointer::STRUCT, orphanArena);
946

947
    // Initialize the pointer.
948
    ref->structRef.set(size);
949 950

    // Build the StructBuilder.
951
    return StructBuilder(segment, capTable, ptr, reinterpret_cast<WirePointer*>(ptr + size.data),
952
                         size.data * BITS_PER_WORD, size.pointers);
953
  }
954

955
  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
956 957 958
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, StructSize size,
      const word* defaultValue)) {
    return getWritableStructPointer(ref, ref->target(), segment, capTable, size, defaultValue);
959 960 961
  }

  static KJ_ALWAYS_INLINE(StructBuilder getWritableStructPointer(
962 963
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
      StructSize size, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
964
    if (ref->isNull()) {
965
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
966
      if (defaultValue == nullptr ||
967
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
968
        return initStructPointer(ref, segment, capTable, size, orphanArena);
969
      }
970 971
      refTarget = copyMessage(segment, capTable, ref,
          reinterpret_cast<const WirePointer*>(defaultValue));
972 973
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
974

975 976
    WirePointer* oldRef = ref;
    SegmentBuilder* oldSegment = segment;
977
    word* oldPtr = followFars(oldRef, refTarget, oldSegment);
978

979
    KJ_REQUIRE(oldRef->kind() == WirePointer::STRUCT,
980 981 982
        "Message contains non-struct pointer where struct pointer was expected.") {
      goto useDefault;
    }
983

984 985 986 987
    WordCount oldDataSize = oldRef->structRef.dataSize.get();
    WirePointerCount oldPointerCount = oldRef->structRef.ptrCount.get();
    WirePointer* oldPointerSection =
        reinterpret_cast<WirePointer*>(oldPtr + oldDataSize);
988

989 990 991 992
    if (oldDataSize < size.data || oldPointerCount < size.pointers) {
      // The space allocated for this struct is too small.  Unlike with readers, we can't just
      // run with it and do bounds checks at access time, because how would we handle writes?
      // Instead, we have to copy the struct to a new space now.
993

994 995
      WordCount newDataSize = kj::max(oldDataSize, size.data);
      WirePointerCount newPointerCount = kj::max(oldPointerCount, size.pointers);
996
      WordCount totalSize = newDataSize + newPointerCount * WORDS_PER_POINTER;
997

998 999
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(segment, ref);
1000

1001
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1002
      ref->structRef.set(newDataSize, newPointerCount);
1003

1004 1005
      // Copy data section.
      memcpy(ptr, oldPtr, oldDataSize * BYTES_PER_WORD / BYTES);
1006

1007 1008 1009 1010
      // Copy pointer section.
      WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(ptr + newDataSize);
      for (uint i = 0; i < oldPointerCount / POINTERS; i++) {
        transferPointer(segment, newPointerSection + i, oldSegment, oldPointerSection + i);
1011
      }
1012 1013 1014 1015 1016 1017 1018 1019 1020

      // Zero out old location.  This has two purposes:
      // 1) We don't want to leak the original contents of the struct when the message is written
      //    out as it may contain secrets that the caller intends to remove from the new copy.
      // 2) Zeros will be deflated by packing, making this dead memory almost-free if it ever
      //    hits the wire.
      memset(oldPtr, 0,
             (oldDataSize + oldPointerCount * WORDS_PER_POINTER) * BYTES_PER_WORD / BYTES);

1021
      return StructBuilder(segment, capTable, ptr, newPointerSection, newDataSize * BITS_PER_WORD,
1022
                           newPointerCount);
1023
    } else {
1024 1025
      return StructBuilder(oldSegment, capTable, oldPtr, oldPointerSection,
                           oldDataSize * BITS_PER_WORD, oldPointerCount);
1026
    }
1027 1028
  }

1029
  static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
1030 1031
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, ElementSize elementSize, BuilderArena* orphanArena = nullptr)) {
1032
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1033
        "Should have called initStructListPointer() instead.");
1034

1035
    BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
1036 1037
    WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;
    auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1038

1039
    // Calculate size of the list.
1040
    WordCount wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
1041

1042
    // Allocate the list.
1043
    word* ptr = allocate(ref, segment, capTable, wordCount, WirePointer::LIST, orphanArena);
1044

1045
    // Initialize the pointer.
1046
    ref->listRef.set(elementSize, elementCount);
1047

1048
    // Build the ListBuilder.
1049 1050
    return ListBuilder(segment, capTable, ptr, step, elementCount, dataSize,
                       pointerCount, elementSize);
1051
  }
1052

1053
  static KJ_ALWAYS_INLINE(ListBuilder initStructListPointer(
1054 1055
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
      ElementCount elementCount, StructSize elementSize, BuilderArena* orphanArena = nullptr)) {
1056
    auto wordsPerElement = elementSize.total() / ELEMENTS;
1057

1058
    // Allocate the list, prefixed by a single WirePointer.
1059
    WordCount wordCount = elementCount * wordsPerElement;
1060 1061
    word* ptr = allocate(ref, segment, capTable, POINTER_SIZE_IN_WORDS + wordCount,
                         WirePointer::LIST, orphanArena);
1062

1063
    // Initialize the pointer.
1064
    // INLINE_COMPOSITE lists replace the element count with the word count.
1065
    ref->listRef.setInlineComposite(wordCount);
1066

1067
    // Initialize the list tag.
1068 1069 1070 1071
    reinterpret_cast<WirePointer*>(ptr)->setKindAndInlineCompositeListElementCount(
        WirePointer::STRUCT, elementCount);
    reinterpret_cast<WirePointer*>(ptr)->structRef.set(elementSize);
    ptr += POINTER_SIZE_IN_WORDS;
1072

1073
    // Build the ListBuilder.
1074
    return ListBuilder(segment, capTable, ptr, wordsPerElement * BITS_PER_WORD, elementCount,
1075
                       elementSize.data * BITS_PER_WORD, elementSize.pointers,
1076
                       ElementSize::INLINE_COMPOSITE);
1077 1078
  }

1079
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1080 1081 1082
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      ElementSize elementSize, const word* defaultValue)) {
    return getWritableListPointer(origRef, origRef->target(), origSegment, capTable, elementSize,
1083 1084 1085 1086
                                  defaultValue);
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
1087 1088
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable, ElementSize elementSize,
1089
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1090
    KJ_DREQUIRE(elementSize != ElementSize::INLINE_COMPOSITE,
1091
             "Use getStructList{Element,Field}() for structs.");
1092

1093 1094
    if (origRef->isNull()) {
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1095
      if (defaultValue == nullptr ||
1096
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1097
        return ListBuilder(elementSize);
1098
      }
1099
      origRefTarget = copyMessage(
1100
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1101 1102
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1103

1104
    // We must verify that the pointer has the right size.  Unlike in
David Renshaw's avatar
David Renshaw committed
1105
    // getWritableStructListPointer(), we never need to "upgrade" the data, because this
1106 1107
    // method is called only for non-struct lists, and there is no allowed upgrade path *to*
    // a non-struct list, only *from* them.
1108

1109 1110
    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
1111
    word* ptr = followFars(ref, origRefTarget, segment);
1112

1113
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1114 1115 1116
        "Called getList{Field,Element}() but existing pointer is not a list.") {
      goto useDefault;
    }
1117

1118
    ElementSize oldSize = ref->listRef.elementSize();
1119

1120
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1121 1122 1123 1124
      // The existing element size is INLINE_COMPOSITE, though we expected a list of primitives.
      // The existing data must have been written with a newer version of the protocol.  We
      // therefore never need to upgrade the data in this case, but we do need to validate that it
      // is a valid upgrade from what we expected.
1125

1126 1127
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
1128
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
1129 1130
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;
1131

1132 1133
      WordCount dataSize = tag->structRef.dataSize.get();
      WirePointerCount pointerCount = tag->structRef.ptrCount.get();
1134

1135
      switch (elementSize) {
1136
        case ElementSize::VOID:
1137 1138
          // Anything is a valid upgrade from Void.
          break;
1139

1140
        case ElementSize::BIT:
1141 1142 1143 1144 1145 1146 1147
          KJ_FAIL_REQUIRE(
              "Found struct list where bit list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
          break;

1148 1149 1150 1151
        case ElementSize::BYTE:
        case ElementSize::TWO_BYTES:
        case ElementSize::FOUR_BYTES:
        case ElementSize::EIGHT_BYTES:
1152 1153 1154 1155
          KJ_REQUIRE(dataSize >= 1 * WORDS,
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1156
          break;
1157

1158
        case ElementSize::POINTER:
1159 1160 1161 1162
          KJ_REQUIRE(pointerCount >= 1 * POINTERS,
                     "Existing list value is incompatible with expected type.") {
            goto useDefault;
          }
1163 1164 1165
          // Adjust the pointer to point at the reference segment.
          ptr += dataSize;
          break;
1166

1167
        case ElementSize::INLINE_COMPOSITE:
1168
          KJ_UNREACHABLE;
1169
      }
1170

1171
      // OK, looks valid.
1172

1173
      return ListBuilder(segment, capTable, ptr,
1174 1175
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
1176
                         dataSize * BITS_PER_WORD, pointerCount, ElementSize::INLINE_COMPOSITE);
1177 1178 1179
    } else {
      BitCount dataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      WirePointerCount pointerCount = pointersPerElement(oldSize) * ELEMENTS;
1180

1181 1182
      if (elementSize == ElementSize::BIT) {
        KJ_REQUIRE(oldSize == ElementSize::BIT,
1183 1184 1185 1186
            "Found non-bit list where bit list was expected.") {
          goto useDefault;
        }
      } else {
1187
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198
            "Found bit list where non-bit list was expected.") {
          goto useDefault;
        }
        KJ_REQUIRE(dataSize >= dataBitsPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
        KJ_REQUIRE(pointerCount >= pointersPerElement(elementSize) * ELEMENTS,
                   "Existing list value is incompatible with expected type.") {
          goto useDefault;
        }
1199
      }
1200

1201
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1202
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1203
                         dataSize, pointerCount, oldSize);
1204
    }
1205 1206
  }

1207
  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1208 1209 1210 1211
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      const word* defaultValue)) {
    return getWritableListPointerAnySize(origRef, origRef->target(), origSegment,
                                         capTable, defaultValue);
1212 1213 1214
  }

  static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointerAnySize(
1215 1216
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1217 1218 1219 1220 1221
      const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1222
        return ListBuilder(ElementSize::VOID);
1223 1224
      }
      origRefTarget = copyMessage(
1225
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }

    WirePointer* ref = origRef;
    SegmentBuilder* segment = origSegment;
    word* ptr = followFars(ref, origRefTarget, segment);

    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
        "Called getList{Field,Element}() but existing pointer is not a list.") {
      goto useDefault;
    }

1238
    ElementSize elementSize = ref->listRef.elementSize();
1239

1240
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
1241 1242 1243 1244 1245 1246
      // Read the tag to get the actual element count.
      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
          "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
      ptr += POINTER_SIZE_IN_WORDS;

1247
      return ListBuilder(segment, capTable, ptr,
1248 1249 1250
                         tag->structRef.wordSize() * BITS_PER_WORD / ELEMENTS,
                         tag->inlineCompositeListElementCount(),
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1251
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE);
1252 1253 1254 1255 1256
    } else {
      BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
      WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;

      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
1257
      return ListBuilder(segment, capTable, ptr, step, ref->listRef.elementCount(),
1258 1259 1260 1261
                         dataSize, pointerCount, elementSize);
    }
  }

1262
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1263 1264 1265 1266
      WirePointer* origRef, SegmentBuilder* origSegment, CapTableBuilder* capTable,
      StructSize elementSize, const word* defaultValue)) {
    return getWritableStructListPointer(origRef, origRef->target(), origSegment, capTable,
                                        elementSize, defaultValue);
1267 1268
  }
  static KJ_ALWAYS_INLINE(ListBuilder getWritableStructListPointer(
1269 1270
      WirePointer* origRef, word* origRefTarget,
      SegmentBuilder* origSegment, CapTableBuilder* capTable,
1271
      StructSize elementSize, const word* defaultValue, BuilderArena* orphanArena = nullptr)) {
1272 1273 1274 1275
    if (origRef->isNull()) {
    useDefault:
      if (defaultValue == nullptr ||
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1276
        return ListBuilder(ElementSize::INLINE_COMPOSITE);
1277
      }
1278
      origRefTarget = copyMessage(
1279
          origSegment, capTable, origRef, reinterpret_cast<const WirePointer*>(defaultValue));
1280 1281
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1282

1283
    // We must verify that the pointer has the right size and potentially upgrade it if not.
1284

1285 1286
    WirePointer* oldRef = origRef;
    SegmentBuilder* oldSegment = origSegment;
1287
    word* oldPtr = followFars(oldRef, origRefTarget, oldSegment);
1288

1289 1290
    KJ_REQUIRE(oldRef->kind() == WirePointer::LIST,
               "Called getList{Field,Element}() but existing pointer is not a list.") {
1291 1292 1293
      goto useDefault;
    }

1294
    ElementSize oldSize = oldRef->listRef.elementSize();
1295

1296
    if (oldSize == ElementSize::INLINE_COMPOSITE) {
1297
      // Existing list is INLINE_COMPOSITE, but we need to verify that the sizes match.
1298

1299 1300
      WirePointer* oldTag = reinterpret_cast<WirePointer*>(oldPtr);
      oldPtr += POINTER_SIZE_IN_WORDS;
1301 1302
      KJ_REQUIRE(oldTag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE list with non-STRUCT elements not supported.") {
1303 1304 1305
        goto useDefault;
      }

1306 1307 1308 1309
      WordCount oldDataSize = oldTag->structRef.dataSize.get();
      WirePointerCount oldPointerCount = oldTag->structRef.ptrCount.get();
      auto oldStep = (oldDataSize + oldPointerCount * WORDS_PER_POINTER) / ELEMENTS;
      ElementCount elementCount = oldTag->inlineCompositeListElementCount();
1310

1311 1312
      if (oldDataSize >= elementSize.data && oldPointerCount >= elementSize.pointers) {
        // Old size is at least as large as we need.  Ship it.
1313
        return ListBuilder(oldSegment, capTable, oldPtr, oldStep * BITS_PER_WORD, elementCount,
1314
                           oldDataSize * BITS_PER_WORD, oldPointerCount,
1315
                           ElementSize::INLINE_COMPOSITE);
1316
      }
1317

1318 1319
      // The structs in this list are smaller than expected, probably written using an older
      // version of the protocol.  We need to make a copy and expand them.
1320

1321 1322
      WordCount newDataSize = kj::max(oldDataSize, elementSize.data);
      WirePointerCount newPointerCount = kj::max(oldPointerCount, elementSize.pointers);
1323 1324
      auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
      WordCount totalSize = newStep * elementCount;
1325

1326 1327
      // Don't let allocate() zero out the object just yet.
      zeroPointerAndFars(origSegment, origRef);
1328

1329
      word* newPtr = allocate(origRef, origSegment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
1330
                              WirePointer::LIST, orphanArena);
1331
      origRef->listRef.setInlineComposite(totalSize);
1332

1333 1334 1335 1336
      WirePointer* newTag = reinterpret_cast<WirePointer*>(newPtr);
      newTag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
      newTag->structRef.set(newDataSize, newPointerCount);
      newPtr += POINTER_SIZE_IN_WORDS;
1337

1338 1339 1340 1341 1342
      word* src = oldPtr;
      word* dst = newPtr;
      for (uint i = 0; i < elementCount / ELEMENTS; i++) {
        // Copy data section.
        memcpy(dst, src, oldDataSize * BYTES_PER_WORD / BYTES);
1343

1344 1345 1346
        // Copy pointer section.
        WirePointer* newPointerSection = reinterpret_cast<WirePointer*>(dst + newDataSize);
        WirePointer* oldPointerSection = reinterpret_cast<WirePointer*>(src + oldDataSize);
1347 1348
        for (uint j = 0; j < oldPointerCount / POINTERS; j++) {
          transferPointer(origSegment, newPointerSection + j, oldSegment, oldPointerSection + j);
1349 1350
        }

1351 1352 1353
        dst += newStep * (1 * ELEMENTS);
        src += oldStep * (1 * ELEMENTS);
      }
1354

1355 1356
      // Zero out old location.  See explanation in getWritableStructPointer().
      memset(oldPtr, 0, oldStep * elementCount * BYTES_PER_WORD / BYTES);
1357

1358
      return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1359
                         newDataSize * BITS_PER_WORD, newPointerCount, ElementSize::INLINE_COMPOSITE);
1360
    } else {
1361
      // We're upgrading from a non-struct list.
1362

1363 1364 1365 1366
      BitCount oldDataSize = dataBitsPerElement(oldSize) * ELEMENTS;
      WirePointerCount oldPointerCount = pointersPerElement(oldSize) * ELEMENTS;
      auto oldStep = (oldDataSize + oldPointerCount * BITS_PER_POINTER) / ELEMENTS;
      ElementCount elementCount = oldRef->listRef.elementCount();
1367

1368
      if (oldSize == ElementSize::VOID) {
1369
        // Nothing to copy, just allocate a new list.
1370
        return initStructListPointer(origRef, origSegment, capTable, elementCount, elementSize);
1371
      } else {
1372
        // Upgrading to an inline composite list.
1373

1374
        KJ_REQUIRE(oldSize != ElementSize::BIT,
1375 1376 1377 1378 1379
            "Found bit list where struct list was expected; upgrading boolean lists to structs "
            "is no longer supported.") {
          goto useDefault;
        }

1380 1381
        WordCount newDataSize = elementSize.data;
        WirePointerCount newPointerCount = elementSize.pointers;
1382

1383
        if (oldSize == ElementSize::POINTER) {
1384
          newPointerCount = kj::max(newPointerCount, 1 * POINTERS);
1385 1386
        } else {
          // Old list contains data elements, so we need at least 1 word of data.
1387
          newDataSize = kj::max(newDataSize, 1 * WORDS);
1388
        }
1389

1390 1391
        auto newStep = (newDataSize + newPointerCount * WORDS_PER_POINTER) / ELEMENTS;
        WordCount totalWords = elementCount * newStep;
1392

1393 1394
        // Don't let allocate() zero out the object just yet.
        zeroPointerAndFars(origSegment, origRef);
1395

1396
        word* newPtr = allocate(origRef, origSegment, capTable, totalWords + POINTER_SIZE_IN_WORDS,
1397
                                WirePointer::LIST, orphanArena);
1398
        origRef->listRef.setInlineComposite(totalWords);
1399

1400 1401 1402 1403
        WirePointer* tag = reinterpret_cast<WirePointer*>(newPtr);
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, elementCount);
        tag->structRef.set(newDataSize, newPointerCount);
        newPtr += POINTER_SIZE_IN_WORDS;
1404

1405
        if (oldSize == ElementSize::POINTER) {
1406 1407 1408 1409 1410 1411
          WirePointer* dst = reinterpret_cast<WirePointer*>(newPtr + newDataSize);
          WirePointer* src = reinterpret_cast<WirePointer*>(oldPtr);
          for (uint i = 0; i < elementCount / ELEMENTS; i++) {
            transferPointer(origSegment, dst, oldSegment, src);
            dst += newStep / WORDS_PER_POINTER * (1 * ELEMENTS);
            ++src;
1412
          }
1413 1414 1415 1416 1417 1418 1419 1420 1421 1422
        } else {
          word* dst = newPtr;
          char* src = reinterpret_cast<char*>(oldPtr);
          ByteCount oldByteStep = oldDataSize / BITS_PER_BYTE;
          for (uint i = 0; i < elementCount / ELEMENTS; i++) {
            memcpy(dst, src, oldByteStep / BYTES);
            src += oldByteStep / BYTES;
            dst += newStep * (1 * ELEMENTS);
          }
        }
1423

1424
        // Zero out old location.  See explanation in getWritableStructPointer().
1425
        memset(oldPtr, 0, roundBitsUpToBytes(oldStep * elementCount) / BYTES);
1426

1427
        return ListBuilder(origSegment, capTable, newPtr, newStep * BITS_PER_WORD, elementCount,
1428
                           newDataSize * BITS_PER_WORD, newPointerCount,
1429
                           ElementSize::INLINE_COMPOSITE);
1430
      }
1431 1432 1433
    }
  }

1434
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> initTextPointer(
1435
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, ByteCount size,
1436
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1437 1438 1439 1440
    // The byte list must include a NUL terminator.
    ByteCount byteSize = size + 1 * BYTES;

    // Allocate the space.
1441
    word* ptr = allocate(
1442
        ref, segment, capTable, roundBytesUpToWords(byteSize), WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1443

1444
    // Initialize the pointer.
1445
    ref->listRef.set(ElementSize::BYTE, byteSize * (1 * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1446 1447

    // Build the Text::Builder.  This will initialize the NUL terminator.
1448
    return { segment, Text::Builder(reinterpret_cast<char*>(ptr), size / BYTES) };
Kenton Varda's avatar
Kenton Varda committed
1449 1450
  }

1451
  static KJ_ALWAYS_INLINE(SegmentAnd<Text::Builder> setTextPointer(
1452
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Text::Reader value,
1453
      BuilderArena* orphanArena = nullptr)) {
1454
    auto allocation = initTextPointer(ref, segment, capTable, value.size() * BYTES, orphanArena);
1455 1456
    memcpy(allocation.value.begin(), value.begin(), value.size());
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1457 1458
  }

1459
  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1460
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
Kenton Varda's avatar
Kenton Varda committed
1461
      const void* defaultValue, ByteCount defaultSize)) {
1462
    return getWritableTextPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1463 1464 1465
  }

  static KJ_ALWAYS_INLINE(Text::Builder getWritableTextPointer(
1466
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1467
      const void* defaultValue, ByteCount defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1468
    if (ref->isNull()) {
1469
    useDefault:
1470 1471 1472
      if (defaultSize == 0 * BYTES) {
        return nullptr;
      } else {
1473
        Text::Builder builder = initTextPointer(ref, segment, capTable, defaultSize).value;
1474 1475 1476
        memcpy(builder.begin(), defaultValue, defaultSize / BYTES);
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1477
    } else {
1478
      word* ptr = followFars(ref, refTarget, segment);
1479
      char* cptr = reinterpret_cast<char*>(ptr);
Kenton Varda's avatar
Kenton Varda committed
1480

1481
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1482
          "Called getText{Field,Element}() but existing pointer is not a list.");
1483
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1484
          "Called getText{Field,Element}() but existing list pointer is not byte-sized.");
Kenton Varda's avatar
Kenton Varda committed
1485

1486 1487 1488 1489 1490 1491
      size_t size = ref->listRef.elementCount() / ELEMENTS;
      KJ_REQUIRE(size > 0 && cptr[size-1] == '\0', "Text blob missing NUL terminator.") {
        goto useDefault;
      }

      return Text::Builder(cptr, size - 1);
Kenton Varda's avatar
Kenton Varda committed
1492 1493 1494
    }
  }

1495
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> initDataPointer(
1496
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, ByteCount size,
1497
      BuilderArena* orphanArena = nullptr)) {
Kenton Varda's avatar
Kenton Varda committed
1498
    // Allocate the space.
1499 1500
    word* ptr = allocate(ref, segment, capTable, roundBytesUpToWords(size),
                         WirePointer::LIST, orphanArena);
Kenton Varda's avatar
Kenton Varda committed
1501

1502
    // Initialize the pointer.
1503
    ref->listRef.set(ElementSize::BYTE, size * (1 * ELEMENTS / BYTES));
Kenton Varda's avatar
Kenton Varda committed
1504 1505

    // Build the Data::Builder.
1506
    return { segment, Data::Builder(reinterpret_cast<byte*>(ptr), size / BYTES) };
Kenton Varda's avatar
Kenton Varda committed
1507 1508
  }

1509
  static KJ_ALWAYS_INLINE(SegmentAnd<Data::Builder> setDataPointer(
1510
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable, Data::Reader value,
1511
      BuilderArena* orphanArena = nullptr)) {
1512
    auto allocation = initDataPointer(ref, segment, capTable, value.size() * BYTES, orphanArena);
1513 1514
    memcpy(allocation.value.begin(), value.begin(), value.size());
    return allocation;
Kenton Varda's avatar
Kenton Varda committed
1515 1516
  }

1517
  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1518
      WirePointer* ref, SegmentBuilder* segment, CapTableBuilder* capTable,
Kenton Varda's avatar
Kenton Varda committed
1519
      const void* defaultValue, ByteCount defaultSize)) {
1520
    return getWritableDataPointer(ref, ref->target(), segment, capTable, defaultValue, defaultSize);
1521 1522 1523
  }

  static KJ_ALWAYS_INLINE(Data::Builder getWritableDataPointer(
1524
      WirePointer* ref, word* refTarget, SegmentBuilder* segment, CapTableBuilder* capTable,
1525
      const void* defaultValue, ByteCount defaultSize)) {
Kenton Varda's avatar
Kenton Varda committed
1526
    if (ref->isNull()) {
1527 1528 1529
      if (defaultSize == 0 * BYTES) {
        return nullptr;
      } else {
1530
        Data::Builder builder = initDataPointer(ref, segment, capTable, defaultSize).value;
1531 1532 1533
        memcpy(builder.begin(), defaultValue, defaultSize / BYTES);
        return builder;
      }
Kenton Varda's avatar
Kenton Varda committed
1534
    } else {
1535
      word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
1536

1537
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
1538
          "Called getData{Field,Element}() but existing pointer is not a list.");
1539
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
1540
          "Called getData{Field,Element}() but existing list pointer is not byte-sized.");
Kenton Varda's avatar
Kenton Varda committed
1541

1542
      return Data::Builder(reinterpret_cast<byte*>(ptr), ref->listRef.elementCount() / ELEMENTS);
Kenton Varda's avatar
Kenton Varda committed
1543 1544 1545
    }
  }

1546
  static SegmentAnd<word*> setStructPointer(
1547
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, StructReader value,
1548
      BuilderArena* orphanArena = nullptr) {
1549
    WordCount dataSize = roundBitsUpToWords(value.dataSize);
1550 1551
    WordCount totalSize = dataSize + value.pointerCount * WORDS_PER_POINTER;

1552
    word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::STRUCT, orphanArena);
1553 1554 1555 1556 1557 1558 1559 1560 1561 1562
    ref->structRef.set(dataSize, value.pointerCount);

    if (value.dataSize == 1 * BITS) {
      *reinterpret_cast<char*>(ptr) = value.getDataField<bool>(0 * ELEMENTS);
    } else {
      memcpy(ptr, value.data, value.dataSize / BITS_PER_BYTE / BYTES);
    }

    WirePointer* pointerSection = reinterpret_cast<WirePointer*>(ptr + dataSize);
    for (uint i = 0; i < value.pointerCount / POINTERS; i++) {
1563 1564
      copyPointer(segment, capTable, pointerSection + i,
                  value.segment, value.capTable, value.pointers + i, value.nestingLimit);
1565
    }
1566

1567
    return { segment, ptr };
1568 1569
  }

1570
#if !CAPNP_LITE
1571
  static void setCapabilityPointer(
1572 1573 1574 1575
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref,
      kj::Own<ClientHook>&& cap) {
    if (!ref->isNull()) {
      zeroObject(segment, capTable, ref);
1576
    }
1577 1578 1579 1580 1581
    if (cap->isNull()) {
      memset(ref, 0, sizeof(*ref));
    } else {
      ref->setCap(capTable->injectCap(kj::mv(cap)));
    }
1582
  }
1583
#endif  // !CAPNP_LITE
1584

1585
  static SegmentAnd<word*> setListPointer(
1586
      SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* ref, ListReader value,
1587
      BuilderArena* orphanArena = nullptr) {
1588
    WordCount totalSize = roundBitsUpToWords(value.elementCount * value.step);
1589

1590
    if (value.elementSize != ElementSize::INLINE_COMPOSITE) {
1591
      // List of non-structs.
1592
      word* ptr = allocate(ref, segment, capTable, totalSize, WirePointer::LIST, orphanArena);
1593

1594
      if (value.elementSize == ElementSize::POINTER) {
1595
        // List of pointers.
1596
        ref->listRef.set(ElementSize::POINTER, value.elementCount);
1597
        for (uint i = 0; i < value.elementCount / ELEMENTS; i++) {
1598 1599 1600
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(ptr) + i,
                      value.segment, value.capTable,
                      reinterpret_cast<const WirePointer*>(value.ptr) + i,
1601
                      value.nestingLimit);
1602 1603 1604
        }
      } else {
        // List of data.
1605
        ref->listRef.set(value.elementSize, value.elementCount);
1606 1607
        memcpy(ptr, value.ptr, totalSize * BYTES_PER_WORD / BYTES);
      }
1608

1609
      return { segment, ptr };
1610 1611
    } else {
      // List of structs.
1612 1613
      word* ptr = allocate(ref, segment, capTable, totalSize + POINTER_SIZE_IN_WORDS,
                           WirePointer::LIST, orphanArena);
1614 1615
      ref->listRef.setInlineComposite(totalSize);

1616
      WordCount dataSize = roundBitsUpToWords(value.structDataSize);
1617 1618 1619 1620 1621
      WirePointerCount pointerCount = value.structPointerCount;

      WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, value.elementCount);
      tag->structRef.set(dataSize, pointerCount);
1622
      word* dst = ptr + POINTER_SIZE_IN_WORDS;
1623 1624 1625

      const word* src = reinterpret_cast<const word*>(value.ptr);
      for (uint i = 0; i < value.elementCount / ELEMENTS; i++) {
1626 1627
        memcpy(dst, src, value.structDataSize / BITS_PER_BYTE / BYTES);
        dst += dataSize;
1628 1629 1630
        src += dataSize;

        for (uint j = 0; j < pointerCount / POINTERS; j++) {
1631 1632 1633
          copyPointer(segment, capTable, reinterpret_cast<WirePointer*>(dst),
              value.segment, value.capTable, reinterpret_cast<const WirePointer*>(src),
              value.nestingLimit);
1634
          dst += POINTER_SIZE_IN_WORDS;
1635 1636 1637
          src += POINTER_SIZE_IN_WORDS;
        }
      }
1638

1639
      return { segment, ptr };
1640 1641 1642
    }
  }

1643
  static KJ_ALWAYS_INLINE(SegmentAnd<word*> copyPointer(
1644 1645
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
1646
      int nestingLimit, BuilderArena* orphanArena = nullptr)) {
1647 1648 1649
    return copyPointer(dstSegment, dstCapTable, dst,
                       srcSegment, srcCapTable, src, src->target(),
                       nestingLimit, orphanArena);
1650 1651 1652
  }

  static SegmentAnd<word*> copyPointer(
1653 1654 1655
      SegmentBuilder* dstSegment, CapTableBuilder* dstCapTable, WirePointer* dst,
      SegmentReader* srcSegment, CapTableReader* srcCapTable, const WirePointer* src,
      const word* srcTarget, int nestingLimit, BuilderArena* orphanArena = nullptr) {
1656 1657 1658 1659
    // Deep-copy the object pointed to by src into dst.  It turns out we can't reuse
    // readStructPointer(), etc. because they do type checking whereas here we want to accept any
    // valid pointer.

1660
    if (src->isNull()) {
1661
    useDefault:
1662
      if (!dst->isNull()) {
1663
        zeroObject(dstSegment, dstCapTable, dst);
1664 1665
        memset(dst, 0, sizeof(*dst));
      }
1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677
      return { dstSegment, nullptr };
    }

    const word* ptr = WireHelpers::followFars(src, srcTarget, srcSegment);
    if (KJ_UNLIKELY(ptr == nullptr)) {
      // Already reported the error.
      goto useDefault;
    }

    switch (src->kind()) {
      case WirePointer::STRUCT:
        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1678
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1679 1680 1681 1682 1683 1684 1685
          goto useDefault;
        }

        KJ_REQUIRE(boundsCheck(srcSegment, ptr, ptr + src->structRef.wordSize()),
                   "Message contained out-of-bounds struct pointer.") {
          goto useDefault;
        }
1686 1687
        return setStructPointer(dstSegment, dstCapTable, dst,
            StructReader(srcSegment, srcCapTable, ptr,
1688 1689 1690
                         reinterpret_cast<const WirePointer*>(ptr + src->structRef.dataSize.get()),
                         src->structRef.dataSize.get() * BITS_PER_WORD,
                         src->structRef.ptrCount.get(),
1691
                         nestingLimit - 1),
1692 1693 1694
            orphanArena);

      case WirePointer::LIST: {
1695
        ElementSize elementSize = src->listRef.elementSize();
1696 1697

        KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1698
              "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1699 1700 1701
          goto useDefault;
        }

1702
        if (elementSize == ElementSize::INLINE_COMPOSITE) {
1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719
          WordCount wordCount = src->listRef.inlineCompositeWordCount();
          const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
          ptr += POINTER_SIZE_IN_WORDS;

          KJ_REQUIRE(boundsCheck(srcSegment, ptr - POINTER_SIZE_IN_WORDS, ptr + wordCount),
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

          KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                     "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
            goto useDefault;
          }

          ElementCount elementCount = tag->inlineCompositeListElementCount();
          auto wordsPerElement = tag->structRef.wordSize() / ELEMENTS;

1720
          KJ_REQUIRE(wordsPerElement * ElementCount64(elementCount) <= wordCount,
1721 1722 1723 1724
                     "INLINE_COMPOSITE list's elements overrun its word count.") {
            goto useDefault;
          }

1725 1726 1727 1728 1729 1730 1731 1732 1733
          if (wordsPerElement * (1 * ELEMENTS) == 0 * WORDS) {
            // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
            // without having sent actual data.
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (1 * WORDS / ELEMENTS)),
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

1734 1735 1736
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr,
                         elementCount, wordsPerElement * BITS_PER_WORD,
1737
                         tag->structRef.dataSize.get() * BITS_PER_WORD,
1738
                         tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
1739
                         nestingLimit - 1),
1740 1741 1742 1743 1744 1745
              orphanArena);
        } else {
          BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
          WirePointerCount pointerCount = pointersPerElement(elementSize) * ELEMENTS;
          auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
          ElementCount elementCount = src->listRef.elementCount();
1746
          WordCount64 wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
1747 1748 1749 1750 1751 1752

          KJ_REQUIRE(boundsCheck(srcSegment, ptr, ptr + wordCount),
                     "Message contains out-of-bounds list pointer.") {
            goto useDefault;
          }

1753 1754 1755 1756 1757 1758 1759 1760 1761
          if (elementSize == ElementSize::VOID) {
            // Watch out for lists of void, which can claim to be arbitrarily large without having
            // sent actual data.
            KJ_REQUIRE(amplifiedRead(srcSegment, elementCount * (1 * WORDS / ELEMENTS)),
                       "Message contains amplified list pointer.") {
              goto useDefault;
            }
          }

1762 1763 1764
          return setListPointer(dstSegment, dstCapTable, dst,
              ListReader(srcSegment, srcCapTable, ptr, elementCount, step, dataSize, pointerCount,
                         elementSize, nestingLimit - 1),
1765 1766 1767 1768
              orphanArena);
        }
      }

1769 1770
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("Far pointer should have been handled above.") {
1771 1772 1773
          goto useDefault;
        }

1774 1775
      case WirePointer::OTHER: {
        KJ_REQUIRE(src->isCapability(), "Unknown pointer type.") {
1776 1777 1778
          goto useDefault;
        }

1779
#if !CAPNP_LITE
1780 1781
        KJ_IF_MAYBE(cap, srcCapTable->extractCap(src->capRef.index.get())) {
          setCapabilityPointer(dstSegment, dstCapTable, dst, kj::mv(*cap));
1782 1783
          // Return dummy non-null pointer so OrphanBuilder doesn't end up null.
          return { dstSegment, reinterpret_cast<word*>(1) };
1784
        } else {
1785
#endif  // !CAPNP_LITE
1786
          KJ_FAIL_REQUIRE("Message contained invalid capability pointer.") {
1787 1788
            goto useDefault;
          }
1789
#if !CAPNP_LITE
1790
        }
1791
#endif  // !CAPNP_LITE
1792
      }
1793
    }
Kenton Varda's avatar
Kenton Varda committed
1794 1795

    KJ_UNREACHABLE;
1796 1797
  }

1798 1799
  static void adopt(SegmentBuilder* segment, CapTableBuilder* capTable,
                    WirePointer* ref, OrphanBuilder&& value) {
1800
    KJ_REQUIRE(value.segment == nullptr || value.segment->getArena() == segment->getArena(),
1801 1802 1803
               "Adopted object must live in the same message.");

    if (!ref->isNull()) {
1804
      zeroObject(segment, capTable, ref);
1805 1806
    }

1807
    if (value == nullptr) {
1808 1809
      // Set null.
      memset(ref, 0, sizeof(*ref));
1810
    } else if (value.tagAsPtr()->isPositional()) {
1811
      WireHelpers::transferPointer(segment, ref, value.segment, value.tagAsPtr(), value.location);
1812 1813 1814
    } else {
      // FAR and OTHER pointers are position-independent, so we can just copy.
      memcpy(ref, value.tagAsPtr(), sizeof(WirePointer));
1815 1816 1817 1818 1819 1820 1821 1822
    }

    // Take ownership away from the OrphanBuilder.
    memset(value.tagAsPtr(), 0, sizeof(WirePointer));
    value.location = nullptr;
    value.segment = nullptr;
  }

1823 1824
  static OrphanBuilder disown(SegmentBuilder* segment, CapTableBuilder* capTable,
                              WirePointer* ref) {
1825 1826 1827 1828
    word* location;

    if (ref->isNull()) {
      location = nullptr;
1829 1830
    } else if (ref->kind() == WirePointer::OTHER) {
      KJ_REQUIRE(ref->isCapability(), "Unknown pointer type.") { break; }
1831
      location = reinterpret_cast<word*>(1);  // dummy so that it is non-null
1832 1833
    } else {
      WirePointer* refCopy = ref;
1834
      location = followFarsNoWritableCheck(refCopy, ref->target(), segment);
1835 1836
    }

1837
    OrphanBuilder result(ref, segment, capTable, location);
1838

1839
    if (!ref->isNull() && ref->isPositional()) {
1840
      result.tagAsPtr()->setKindForOrphan(ref->kind());
1841
    }
1842 1843 1844 1845 1846

    // Zero out the pointer that was disowned.
    memset(ref, 0, sizeof(*ref));

    return result;
1847 1848
  }

1849 1850
  // -----------------------------------------------------------------

1851
  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
1852 1853
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
1854
      int nestingLimit)) {
1855
    return readStructPointer(segment, capTable, ref, ref->target(), defaultValue, nestingLimit);
1856 1857 1858
  }

  static KJ_ALWAYS_INLINE(StructReader readStructPointer(
1859 1860
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
1861
      const word* defaultValue, int nestingLimit)) {
1862
    if (ref->isNull()) {
1863
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1864
      if (defaultValue == nullptr ||
1865
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1866
        return StructReader();
1867
      }
1868
      segment = nullptr;
1869
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
1870
      refTarget = ref->target();
1871 1872
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1873

1874
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1875
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1876 1877
      goto useDefault;
    }
1878

1879
    const word* ptr = followFars(ref, refTarget, segment);
1880
    if (KJ_UNLIKELY(ptr == nullptr)) {
1881 1882 1883
      // Already reported the error.
      goto useDefault;
    }
1884

1885 1886
    KJ_REQUIRE(ref->kind() == WirePointer::STRUCT,
               "Message contains non-struct pointer where struct pointer was expected.") {
1887 1888 1889
      goto useDefault;
    }

1890 1891
    KJ_REQUIRE(boundsCheck(segment, ptr, ptr + ref->structRef.wordSize()),
               "Message contained out-of-bounds struct pointer.") {
1892
      goto useDefault;
1893
    }
1894

1895
    return StructReader(
1896 1897
        segment, capTable,
        ptr, reinterpret_cast<const WirePointer*>(ptr + ref->structRef.dataSize.get()),
1898
        ref->structRef.dataSize.get() * BITS_PER_WORD,
1899
        ref->structRef.ptrCount.get(),
1900
        nestingLimit - 1);
1901 1902
  }

1903
#if !CAPNP_LITE
1904
  static KJ_ALWAYS_INLINE(kj::Own<ClientHook> readCapabilityPointer(
1905 1906
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, int nestingLimit)) {
1907 1908
    kj::Maybe<kj::Own<ClientHook>> maybeCap;

1909 1910 1911 1912 1913 1914
    KJ_REQUIRE(brokenCapFactory != nullptr,
               "Trying to read capabilities without ever having created a capability context.  "
               "To read capabilities from a message, you must imbue it with CapReaderContext, or "
               "use the Cap'n Proto RPC system.");

    if (ref->isNull()) {
1915
      return brokenCapFactory->newNullCap();
1916 1917 1918 1919 1920
    } else if (!ref->isCapability()) {
      KJ_FAIL_REQUIRE(
          "Message contains non-capability pointer where capability pointer was expected.") {
        break;
      }
1921
      return brokenCapFactory->newBrokenCap(
1922
          "Calling capability extracted from a non-capability pointer.");
1923
    } else KJ_IF_MAYBE(cap, capTable->extractCap(ref->capRef.index.get())) {
1924 1925
      return kj::mv(*cap);
    } else {
1926 1927 1928 1929
      KJ_FAIL_REQUIRE("Message contains invalid capability pointer.") {
        break;
      }
      return brokenCapFactory->newBrokenCap("Calling invalid capability pointer.");
1930 1931
    }
  }
1932
#endif  // !CAPNP_LITE
1933

1934
  static KJ_ALWAYS_INLINE(ListReader readListPointer(
1935 1936
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* defaultValue,
1937
      ElementSize expectedElementSize, int nestingLimit, bool checkElementSize = true)) {
1938
    return readListPointer(segment, capTable, ref, ref->target(), defaultValue,
1939
                           expectedElementSize, nestingLimit, checkElementSize);
1940 1941 1942
  }

  static KJ_ALWAYS_INLINE(ListReader readListPointer(
1943 1944
      SegmentReader* segment, CapTableReader* capTable,
      const WirePointer* ref, const word* refTarget,
1945
      const word* defaultValue, ElementSize expectedElementSize, int nestingLimit,
1946
      bool checkElementSize = true)) {
1947
    if (ref->isNull()) {
1948
    useDefault:
Kenton Varda's avatar
Kenton Varda committed
1949
      if (defaultValue == nullptr ||
1950
          reinterpret_cast<const WirePointer*>(defaultValue)->isNull()) {
1951
        return ListReader(expectedElementSize);
1952
      }
1953
      segment = nullptr;
1954
      ref = reinterpret_cast<const WirePointer*>(defaultValue);
1955
      refTarget = ref->target();
1956 1957
      defaultValue = nullptr;  // If the default value is itself invalid, don't use it again.
    }
1958

1959
    KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
1960
               "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
1961 1962
      goto useDefault;
    }
1963

1964
    const word* ptr = followFars(ref, refTarget, segment);
1965
    if (KJ_UNLIKELY(ptr == nullptr)) {
1966 1967 1968 1969
      // Already reported error.
      goto useDefault;
    }

1970 1971
    KJ_REQUIRE(ref->kind() == WirePointer::LIST,
               "Message contains non-list pointer where list pointer was expected.") {
1972
      goto useDefault;
1973 1974
    }

1975 1976
    ElementSize elementSize = ref->listRef.elementSize();
    if (elementSize == ElementSize::INLINE_COMPOSITE) {
1977 1978 1979 1980
#if _MSC_VER
      // TODO(msvc): MSVC thinks decltype(WORDS/ELEMENTS) is a const type. /eyeroll
      uint wordsPerElement;
#else
1981
      decltype(WORDS/ELEMENTS) wordsPerElement;
1982
#endif
1983
      ElementCount size;
1984

1985
      WordCount wordCount = ref->listRef.inlineCompositeWordCount();
1986

1987 1988 1989
      // An INLINE_COMPOSITE list points to a tag, which is formatted like a pointer.
      const WirePointer* tag = reinterpret_cast<const WirePointer*>(ptr);
      ptr += POINTER_SIZE_IN_WORDS;
1990

1991 1992
      KJ_REQUIRE(boundsCheck(segment, ptr - POINTER_SIZE_IN_WORDS, ptr + wordCount),
                 "Message contains out-of-bounds list pointer.") {
1993 1994
        goto useDefault;
      }
1995

1996 1997
      KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
                 "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
1998 1999
        goto useDefault;
      }
2000

2001 2002
      size = tag->inlineCompositeListElementCount();
      wordsPerElement = tag->structRef.wordSize() / ELEMENTS;
2003

2004
      KJ_REQUIRE(ElementCount64(size) * wordsPerElement <= wordCount,
2005
                 "INLINE_COMPOSITE list's elements overrun its word count.") {
2006 2007
        goto useDefault;
      }
2008

2009 2010 2011 2012 2013 2014 2015 2016 2017
      if (wordsPerElement * (1 * ELEMENTS) == 0 * WORDS) {
        // Watch out for lists of zero-sized structs, which can claim to be arbitrarily large
        // without having sent actual data.
        KJ_REQUIRE(amplifiedRead(segment, size * (1 * WORDS / ELEMENTS)),
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2018 2019 2020 2021 2022
      if (checkElementSize) {
        // If a struct list was not expected, then presumably a non-struct list was upgraded to a
        // struct list.  We need to manipulate the pointer to point at the first field of the
        // struct.  Together with the "stepBits", this will allow the struct list to be accessed as
        // if it were a primitive list without branching.
2023

2024 2025
        // Check whether the size is compatible.
        switch (expectedElementSize) {
2026
          case ElementSize::VOID:
2027
            break;
2028

2029
          case ElementSize::BIT:
2030 2031 2032 2033 2034 2035
            KJ_FAIL_REQUIRE(
                "Found struct list where bit list was expected; upgrading boolean lists to structs "
                "is no longer supported.") {
              goto useDefault;
            }
            break;
2036

2037 2038 2039 2040
          case ElementSize::BYTE:
          case ElementSize::TWO_BYTES:
          case ElementSize::FOUR_BYTES:
          case ElementSize::EIGHT_BYTES:
2041 2042 2043 2044 2045
            KJ_REQUIRE(tag->structRef.dataSize.get() > 0 * WORDS,
                       "Expected a primitive list, but got a list of pointer-only structs.") {
              goto useDefault;
            }
            break;
2046

2047
          case ElementSize::POINTER:
2048 2049 2050 2051 2052 2053 2054 2055 2056
            // We expected a list of pointers but got a list of structs.  Assuming the first field
            // in the struct is the pointer we were looking for, we want to munge the pointer to
            // point at the first element's pointer section.
            ptr += tag->structRef.dataSize.get();
            KJ_REQUIRE(tag->structRef.ptrCount.get() > 0 * POINTERS,
                       "Expected a pointer list, but got a list of data-only structs.") {
              goto useDefault;
            }
            break;
2057

2058
          case ElementSize::INLINE_COMPOSITE:
2059 2060
            break;
        }
2061 2062
      }

2063
      return ListReader(
2064
          segment, capTable, ptr, size, wordsPerElement * BITS_PER_WORD,
2065
          tag->structRef.dataSize.get() * BITS_PER_WORD,
2066
          tag->structRef.ptrCount.get(), ElementSize::INLINE_COMPOSITE,
2067
          nestingLimit - 1);
2068 2069

    } else {
2070
      // This is a primitive or pointer list, but all such lists can also be interpreted as struct
2071
      // lists.  We need to compute the data size and pointer count for such structs.
2072
      BitCount dataSize = dataBitsPerElement(ref->listRef.elementSize()) * ELEMENTS;
2073
      WirePointerCount pointerCount =
2074
          pointersPerElement(ref->listRef.elementSize()) * ELEMENTS;
2075
      ElementCount elementCount = ref->listRef.elementCount();
2076
      auto step = (dataSize + pointerCount * BITS_PER_POINTER) / ELEMENTS;
2077

2078 2079
      WordCount wordCount = roundBitsUpToWords(ElementCount64(elementCount) * step);
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr + wordCount),
2080
                 "Message contains out-of-bounds list pointer.") {
2081
        goto useDefault;
2082 2083
      }

2084 2085 2086 2087 2088 2089 2090 2091 2092
      if (elementSize == ElementSize::VOID) {
        // Watch out for lists of void, which can claim to be arbitrarily large without having sent
        // actual data.
        KJ_REQUIRE(amplifiedRead(segment, elementCount * (1 * WORDS / ELEMENTS)),
                   "Message contains amplified list pointer.") {
          goto useDefault;
        }
      }

2093
      if (checkElementSize) {
2094
        if (elementSize == ElementSize::BIT && expectedElementSize != ElementSize::BIT) {
2095 2096 2097 2098 2099
          KJ_FAIL_REQUIRE(
              "Found bit list where struct list was expected; upgrading boolean lists to structs "
              "is no longer supported.") {
            goto useDefault;
          }
2100 2101
        }

2102 2103 2104 2105
        // Verify that the elements are at least as large as the expected type.  Note that if we
        // expected INLINE_COMPOSITE, the expected sizes here will be zero, because bounds checking
        // will be performed at field access time.  So this check here is for the case where we
        // expected a list of some primitive or pointer type.
2106

2107 2108 2109 2110
        BitCount expectedDataBitsPerElement =
            dataBitsPerElement(expectedElementSize) * ELEMENTS;
        WirePointerCount expectedPointersPerElement =
            pointersPerElement(expectedElementSize) * ELEMENTS;
2111

2112 2113 2114 2115 2116 2117 2118 2119
        KJ_REQUIRE(expectedDataBitsPerElement <= dataSize,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
        KJ_REQUIRE(expectedPointersPerElement <= pointerCount,
                   "Message contained list with incompatible element type.") {
          goto useDefault;
        }
2120
      }
2121

2122
      return ListReader(segment, capTable, ptr, elementCount, step,
2123
                        dataSize, pointerCount, elementSize, nestingLimit - 1);
2124 2125
    }
  }
Kenton Varda's avatar
Kenton Varda committed
2126

2127
  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
2128
      SegmentReader* segment, const WirePointer* ref,
Kenton Varda's avatar
Kenton Varda committed
2129
      const void* defaultValue, ByteCount defaultSize)) {
2130 2131 2132 2133 2134 2135
    return readTextPointer(segment, ref, ref->target(), defaultValue, defaultSize);
  }

  static KJ_ALWAYS_INLINE(Text::Reader readTextPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
      const void* defaultValue, ByteCount defaultSize)) {
2136
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2137
    useDefault:
2138
      if (defaultValue == nullptr) defaultValue = "";
Kenton Varda's avatar
Kenton Varda committed
2139 2140
      return Text::Reader(reinterpret_cast<const char*>(defaultValue), defaultSize / BYTES);
    } else {
2141
      const word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
2142

2143
      if (KJ_UNLIKELY(ptr == nullptr)) {
2144
        // Already reported error.
Kenton Varda's avatar
Kenton Varda committed
2145 2146 2147
        goto useDefault;
      }

2148 2149
      uint size = ref->listRef.elementCount() / ELEMENTS;

2150 2151
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2152 2153 2154
        goto useDefault;
      }

2155
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2156
                 "Message contains list pointer of non-bytes where text was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2157 2158 2159
        goto useDefault;
      }

2160
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr +
2161
                     roundBytesUpToWords(ref->listRef.elementCount() * (1 * BYTES / ELEMENTS))),
2162
                 "Message contained out-of-bounds text pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2163 2164 2165
        goto useDefault;
      }

2166
      KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2167 2168 2169
        goto useDefault;
      }

Kenton Varda's avatar
Kenton Varda committed
2170 2171 2172
      const char* cptr = reinterpret_cast<const char*>(ptr);
      --size;  // NUL terminator

2173
      KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
Kenton Varda's avatar
Kenton Varda committed
2174 2175 2176 2177 2178 2179 2180
        goto useDefault;
      }

      return Text::Reader(cptr, size);
    }
  }

2181
  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
2182
      SegmentReader* segment, const WirePointer* ref,
Kenton Varda's avatar
Kenton Varda committed
2183
      const void* defaultValue, ByteCount defaultSize)) {
2184 2185 2186 2187 2188 2189
    return readDataPointer(segment, ref, ref->target(), defaultValue, defaultSize);
  }

  static KJ_ALWAYS_INLINE(Data::Reader readDataPointer(
      SegmentReader* segment, const WirePointer* ref, const word* refTarget,
      const void* defaultValue, ByteCount defaultSize)) {
2190
    if (ref->isNull()) {
Kenton Varda's avatar
Kenton Varda committed
2191
    useDefault:
2192
      return Data::Reader(reinterpret_cast<const byte*>(defaultValue), defaultSize / BYTES);
Kenton Varda's avatar
Kenton Varda committed
2193
    } else {
2194
      const word* ptr = followFars(ref, refTarget, segment);
Kenton Varda's avatar
Kenton Varda committed
2195

2196
      if (KJ_UNLIKELY(ptr == nullptr)) {
2197
        // Already reported error.
Kenton Varda's avatar
Kenton Varda committed
2198 2199 2200
        goto useDefault;
      }

2201 2202
      uint size = ref->listRef.elementCount() / ELEMENTS;

2203 2204
      KJ_REQUIRE(ref->kind() == WirePointer::LIST,
                 "Message contains non-list pointer where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2205 2206 2207
        goto useDefault;
      }

2208
      KJ_REQUIRE(ref->listRef.elementSize() == ElementSize::BYTE,
2209
                 "Message contains list pointer of non-bytes where data was expected.") {
Kenton Varda's avatar
Kenton Varda committed
2210 2211 2212
        goto useDefault;
      }

2213
      KJ_REQUIRE(boundsCheck(segment, ptr, ptr +
2214
                     roundBytesUpToWords(ref->listRef.elementCount() * (1 * BYTES / ELEMENTS))),
2215
                 "Message contained out-of-bounds data pointer.") {
Kenton Varda's avatar
Kenton Varda committed
2216 2217 2218
        goto useDefault;
      }

2219
      return Data::Reader(reinterpret_cast<const byte*>(ptr), size);
Kenton Varda's avatar
Kenton Varda committed
2220 2221
    }
  }
2222 2223 2224
};

// =======================================================================================
2225
// PointerBuilder
2226

2227
StructBuilder PointerBuilder::initStruct(StructSize size) {
2228
  return WireHelpers::initStructPointer(pointer, segment, capTable, size);
2229 2230
}

2231
StructBuilder PointerBuilder::getStruct(StructSize size, const word* defaultValue) {
2232
  return WireHelpers::getWritableStructPointer(pointer, segment, capTable, size, defaultValue);
2233 2234
}

2235
ListBuilder PointerBuilder::initList(ElementSize elementSize, ElementCount elementCount) {
2236
  return WireHelpers::initListPointer(pointer, segment, capTable, elementCount, elementSize);
2237 2238
}

2239
ListBuilder PointerBuilder::initStructList(ElementCount elementCount, StructSize elementSize) {
2240
  return WireHelpers::initStructListPointer(pointer, segment, capTable, elementCount, elementSize);
2241 2242
}

2243
ListBuilder PointerBuilder::getList(ElementSize elementSize, const word* defaultValue) {
2244
  return WireHelpers::getWritableListPointer(pointer, segment, capTable, elementSize, defaultValue);
2245
}
2246

2247
ListBuilder PointerBuilder::getStructList(StructSize elementSize, const word* defaultValue) {
2248 2249
  return WireHelpers::getWritableStructListPointer(
      pointer, segment, capTable, elementSize, defaultValue);
2250 2251
}

2252
ListBuilder PointerBuilder::getListAnySize(const word* defaultValue) {
2253
  return WireHelpers::getWritableListPointerAnySize(pointer, segment, capTable, defaultValue);
2254 2255
}

2256 2257
template <>
Text::Builder PointerBuilder::initBlob<Text>(ByteCount size) {
2258
  return WireHelpers::initTextPointer(pointer, segment, capTable, size).value;
2259 2260 2261
}
template <>
void PointerBuilder::setBlob<Text>(Text::Reader value) {
2262
  WireHelpers::setTextPointer(pointer, segment, capTable, value);
2263 2264 2265
}
template <>
Text::Builder PointerBuilder::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) {
2266
  return WireHelpers::getWritableTextPointer(pointer, segment, capTable, defaultValue, defaultSize);
2267 2268
}

2269 2270
template <>
Data::Builder PointerBuilder::initBlob<Data>(ByteCount size) {
2271
  return WireHelpers::initDataPointer(pointer, segment, capTable, size).value;
2272 2273 2274
}
template <>
void PointerBuilder::setBlob<Data>(Data::Reader value) {
2275
  WireHelpers::setDataPointer(pointer, segment, capTable, value);
2276 2277 2278
}
template <>
Data::Builder PointerBuilder::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) {
2279
  return WireHelpers::getWritableDataPointer(pointer, segment, capTable, defaultValue, defaultSize);
2280 2281
}

2282
void PointerBuilder::setStruct(const StructReader& value) {
2283
  WireHelpers::setStructPointer(segment, capTable, pointer, value);
2284 2285
}

2286
void PointerBuilder::setList(const ListReader& value) {
2287
  WireHelpers::setListPointer(segment, capTable, pointer, value);
2288 2289
}

2290
#if !CAPNP_LITE
2291
kj::Own<ClientHook> PointerBuilder::getCapability() {
2292
  return WireHelpers::readCapabilityPointer(
2293
      segment, capTable, pointer, kj::maxValue);
2294 2295
}

2296
void PointerBuilder::setCapability(kj::Own<ClientHook>&& cap) {
2297
  WireHelpers::setCapabilityPointer(segment, capTable, pointer, kj::mv(cap));
2298
}
2299
#endif  // !CAPNP_LITE
2300

2301
void PointerBuilder::adopt(OrphanBuilder&& value) {
2302
  WireHelpers::adopt(segment, capTable, pointer, kj::mv(value));
Kenton Varda's avatar
Kenton Varda committed
2303
}
2304 2305

OrphanBuilder PointerBuilder::disown() {
2306
  return WireHelpers::disown(segment, capTable, pointer);
Kenton Varda's avatar
Kenton Varda committed
2307 2308
}

2309
void PointerBuilder::clear() {
2310
  WireHelpers::zeroObject(segment, capTable, pointer);
2311 2312 2313
  memset(pointer, 0, sizeof(WirePointer));
}

2314 2315 2316 2317 2318 2319 2320
PointerType PointerBuilder::getPointerType() {
  if(pointer->isNull()) {
    return PointerType::NULL_;
  } else {
    WirePointer* ptr = pointer;
    WireHelpers::followFars(ptr, ptr->target(), segment);
    switch(ptr->kind()) {
2321 2322 2323 2324 2325 2326 2327 2328 2329
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("far pointer not followed?");
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
        return PointerType::CAPABILITY;
2330
    }
2331
    KJ_UNREACHABLE;
2332
  }
2333 2334
}

2335
void PointerBuilder::transferFrom(PointerBuilder other) {
2336
  if (!pointer->isNull()) {
2337
    WireHelpers::zeroObject(segment, capTable, pointer);
2338 2339
    memset(pointer, 0, sizeof(*pointer));
  }
2340
  WireHelpers::transferPointer(segment, pointer, other.segment, other.pointer);
2341
  memset(other.pointer, 0, sizeof(*other.pointer));
2342 2343 2344
}

void PointerBuilder::copyFrom(PointerReader other) {
2345 2346 2347 2348 2349 2350 2351 2352
  if (other.pointer == nullptr) {
    if (!pointer->isNull()) {
      WireHelpers::zeroObject(segment, capTable, pointer);
      memset(pointer, 0, sizeof(*pointer));
    }
  } else {
    WireHelpers::copyPointer(segment, capTable, pointer,
                             other.segment, other.capTable, other.pointer, other.nestingLimit);
2353
  }
2354 2355 2356
}

PointerReader PointerBuilder::asReader() const {
2357
  return PointerReader(segment, capTable, pointer, kj::maxValue);
2358 2359
}

2360 2361
BuilderArena* PointerBuilder::getArena() const {
  return segment->getArena();
2362 2363
}

2364 2365 2366 2367 2368 2369 2370 2371 2372 2373
CapTableBuilder* PointerBuilder::getCapTable() {
  return capTable;
}

PointerBuilder PointerBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2374 2375 2376
// =======================================================================================
// PointerReader

2377 2378
PointerReader PointerReader::getRoot(SegmentReader* segment, CapTableReader* capTable,
                                     const word* location, int nestingLimit) {
Kenton Varda's avatar
Kenton Varda committed
2379 2380 2381 2382 2383
  KJ_REQUIRE(WireHelpers::boundsCheck(segment, location, location + POINTER_SIZE_IN_WORDS),
             "Root location out-of-bounds.") {
    location = nullptr;
  }

2384 2385
  return PointerReader(segment, capTable,
      reinterpret_cast<const WirePointer*>(location), nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2386 2387
}

2388 2389
StructReader PointerReader::getStruct(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2390
  return WireHelpers::readStructPointer(segment, capTable, ref, defaultValue, nestingLimit);
2391 2392
}

2393
ListReader PointerReader::getList(ElementSize expectedElementSize, const word* defaultValue) const {
2394 2395
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2396
      segment, capTable, ref, defaultValue, expectedElementSize, nestingLimit);
2397 2398
}

2399 2400 2401
ListReader PointerReader::getListAnySize(const word* defaultValue) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readListPointer(
2402
      segment, capTable, ref, defaultValue, ElementSize::VOID /* dummy */, nestingLimit, false);
2403 2404
}

2405
template <>
2406 2407 2408
Text::Reader PointerReader::getBlob<Text>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readTextPointer(segment, ref, defaultValue, defaultSize);
Kenton Varda's avatar
Kenton Varda committed
2409
}
2410

2411
template <>
2412 2413 2414
Data::Reader PointerReader::getBlob<Data>(const void* defaultValue, ByteCount defaultSize) const {
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
  return WireHelpers::readDataPointer(segment, ref, defaultValue, defaultSize);
Kenton Varda's avatar
Kenton Varda committed
2415 2416
}

2417
#if !CAPNP_LITE
2418
kj::Own<ClientHook> PointerReader::getCapability() const {
2419
  const WirePointer* ref = pointer == nullptr ? &zero.pointer : pointer;
2420
  return WireHelpers::readCapabilityPointer(segment, capTable, ref, nestingLimit);
2421
}
2422
#endif  // !CAPNP_LITE
2423

2424 2425 2426
const word* PointerReader::getUnchecked() const {
  KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
  return reinterpret_cast<const word*>(pointer);
2427 2428
}

2429
MessageSizeCounts PointerReader::targetSize() const {
2430 2431
  return pointer == nullptr ? MessageSizeCounts { 0 * WORDS, 0 }
                            : WireHelpers::totalSize(segment, pointer, nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
2432 2433
}

2434
PointerType PointerReader::getPointerType() const {
2435
  if(pointer == nullptr || pointer->isNull()) {
2436 2437 2438 2439 2440 2441 2442
    return PointerType::NULL_;
  } else {
    word* refTarget = nullptr;
    const WirePointer* ptr = pointer;
    SegmentReader* sgmt = segment;
    WireHelpers::followFars(ptr, refTarget, sgmt);
    switch(ptr->kind()) {
2443 2444 2445 2446 2447 2448 2449 2450 2451
      case WirePointer::FAR:
        KJ_FAIL_ASSERT("far pointer not followed?");
      case WirePointer::STRUCT:
        return PointerType::STRUCT;
      case WirePointer::LIST:
        return PointerType::LIST;
      case WirePointer::OTHER:
        KJ_REQUIRE(ptr->isCapability(), "unknown pointer type");
        return PointerType::CAPABILITY;
2452
    }
2453
    KJ_UNREACHABLE;
2454
  }
2455 2456
}

2457 2458 2459 2460
kj::Maybe<Arena&> PointerReader::getArena() const {
  return segment == nullptr ? nullptr : segment->getArena();
}

2461 2462 2463 2464
CapTableReader* PointerReader::getCapTable() {
  return capTable;
}

2465 2466 2467 2468 2469 2470
PointerReader PointerReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2471 2472 2473
// =======================================================================================
// StructBuilder

2474 2475 2476 2477 2478 2479 2480 2481
void StructBuilder::clearAll() {
  if (dataSize == 1 * BITS) {
    setDataField<bool>(1 * ELEMENTS, false);
  } else {
    memset(data, 0, dataSize / BITS_PER_BYTE / BYTES);
  }

  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2482
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2483 2484 2485 2486
  }
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);
}

2487 2488 2489 2490 2491 2492 2493 2494 2495 2496 2497 2498 2499 2500 2501 2502 2503 2504 2505 2506 2507 2508 2509 2510
void StructBuilder::transferContentFrom(StructBuilder other) {
  // Determine the amount of data the builders have in common.
  BitCount sharedDataSize = kj::min(dataSize, other.dataSize);

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
    if (dataSize == 1 * BITS) {
      setDataField<bool>(0 * ELEMENTS, false);
    } else {
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE / BYTES;
      memset(unshared, 0, (dataSize - sharedDataSize) / BITS_PER_BYTE / BYTES);
    }
  }

  // Copy over the shared part.
  if (sharedDataSize == 1 * BITS) {
    setDataField<bool>(0 * ELEMENTS, other.getDataField<bool>(0 * ELEMENTS));
  } else {
    memcpy(data, other.data, sharedDataSize / BITS_PER_BYTE / BYTES);
  }

  // Zero out all pointers in the target.
  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2511
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2512
  }
2513
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);
2514 2515 2516 2517 2518 2519 2520 2521 2522 2523 2524 2525 2526

  // Transfer the pointers.
  WirePointerCount sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (uint i = 0; i < sharedPointerCount / POINTERS; i++) {
    WireHelpers::transferPointer(segment, pointers + i, other.segment, other.pointers + i);
  }

  // Zero out the pointers that were transferred in the source because it no longer has ownership.
  // If the source had any extra pointers that the destination didn't have space for, we
  // intentionally leave them be, so that they'll be cleaned up later.
  memset(other.pointers, 0, sharedPointerCount * BYTES_PER_POINTER / BYTES);
}

2527 2528 2529 2530 2531 2532 2533 2534 2535 2536 2537 2538 2539 2540 2541 2542 2543 2544 2545 2546 2547 2548 2549 2550
void StructBuilder::copyContentFrom(StructReader other) {
  // Determine the amount of data the builders have in common.
  BitCount sharedDataSize = kj::min(dataSize, other.dataSize);

  if (dataSize > sharedDataSize) {
    // Since the target is larger than the source, make sure to zero out the extra bits that the
    // source doesn't have.
    if (dataSize == 1 * BITS) {
      setDataField<bool>(0 * ELEMENTS, false);
    } else {
      byte* unshared = reinterpret_cast<byte*>(data) + sharedDataSize / BITS_PER_BYTE / BYTES;
      memset(unshared, 0, (dataSize - sharedDataSize) / BITS_PER_BYTE / BYTES);
    }
  }

  // Copy over the shared part.
  if (sharedDataSize == 1 * BITS) {
    setDataField<bool>(0 * ELEMENTS, other.getDataField<bool>(0 * ELEMENTS));
  } else {
    memcpy(data, other.data, sharedDataSize / BITS_PER_BYTE / BYTES);
  }

  // Zero out all pointers in the target.
  for (uint i = 0; i < pointerCount / POINTERS; i++) {
2551
    WireHelpers::zeroObject(segment, capTable, pointers + i);
2552 2553 2554 2555 2556 2557
  }
  memset(pointers, 0, pointerCount * BYTES_PER_POINTER / BYTES);

  // Copy the pointers.
  WirePointerCount sharedPointerCount = kj::min(pointerCount, other.pointerCount);
  for (uint i = 0; i < sharedPointerCount / POINTERS; i++) {
2558 2559
    WireHelpers::copyPointer(segment, capTable, pointers + i,
        other.segment, other.capTable, other.pointers + i, other.nestingLimit);
2560 2561 2562
  }
}

2563
StructReader StructBuilder::asReader() const {
2564
  return StructReader(segment, capTable, data, pointers,
2565
      dataSize, pointerCount, kj::maxValue);
2566 2567
}

2568 2569 2570 2571
BuilderArena* StructBuilder::getArena() {
  return segment->getArena();
}

2572 2573 2574 2575 2576 2577 2578 2579 2580 2581
CapTableBuilder* StructBuilder::getCapTable() {
  return capTable;
}

StructBuilder StructBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2582 2583 2584
// =======================================================================================
// StructReader

2585 2586 2587
MessageSizeCounts StructReader::totalSize() const {
  MessageSizeCounts result = {
    WireHelpers::roundBitsUpToWords(dataSize) + pointerCount * WORDS_PER_POINTER, 0 };
2588 2589 2590 2591 2592 2593 2594 2595

  for (uint i = 0; i < pointerCount / POINTERS; i++) {
    result += WireHelpers::totalSize(segment, pointers + i, nestingLimit);
  }

  if (segment != nullptr) {
    // This traversal should not count against the read limit, because it's highly likely that
    // the caller is going to traverse the object again, e.g. to copy it.
2596
    segment->unread(result.wordCount);
2597 2598 2599 2600 2601
  }

  return result;
}

2602 2603 2604 2605
CapTableReader* StructReader::getCapTable() {
  return capTable;
}

2606 2607 2608 2609 2610 2611
StructReader StructReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2612 2613 2614
// =======================================================================================
// ListBuilder

2615
Text::Builder ListBuilder::asText() {
2616 2617
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2618 2619 2620 2621 2622
    return Text::Builder();
  }

  size_t size = elementCount / ELEMENTS;

2623
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2624 2625 2626 2627 2628 2629
    return Text::Builder();
  }

  char* cptr = reinterpret_cast<char*>(ptr);
  --size;  // NUL terminator

2630
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
2631 2632 2633 2634 2635 2636 2637
    return Text::Builder();
  }

  return Text::Builder(cptr, size);
}

Data::Builder ListBuilder::asData() {
2638 2639
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2640 2641 2642
    return Data::Builder();
  }

2643
  return Data::Builder(reinterpret_cast<byte*>(ptr), elementCount / ELEMENTS);
2644 2645
}

2646
StructBuilder ListBuilder::getStructElement(ElementCount index) {
2647 2648
  BitCount64 indexBit = ElementCount64(index) * step;
  byte* structData = ptr + indexBit / BITS_PER_BYTE;
2649
  KJ_DASSERT(indexBit % BITS_PER_BYTE == 0 * BITS);
2650
  return StructBuilder(segment, capTable, structData,
2651
      reinterpret_cast<WirePointer*>(structData + structDataSize / BITS_PER_BYTE),
2652
      structDataSize, structPointerCount);
2653 2654
}

2655
ListReader ListBuilder::asReader() const {
2656
  return ListReader(segment, capTable, ptr, elementCount, step, structDataSize, structPointerCount,
2657
                    elementSize, kj::maxValue);
2658 2659
}

2660 2661 2662 2663
BuilderArena* ListBuilder::getArena() {
  return segment->getArena();
}

2664 2665 2666 2667 2668 2669 2670 2671 2672 2673
CapTableBuilder* ListBuilder::getCapTable() {
  return capTable;
}

ListBuilder ListBuilder::imbue(CapTableBuilder* capTable) {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2674 2675 2676
// =======================================================================================
// ListReader

2677
Text::Reader ListReader::asText() {
2678 2679
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2680 2681 2682 2683 2684
    return Text::Reader();
  }

  size_t size = elementCount / ELEMENTS;

2685
  KJ_REQUIRE(size > 0, "Message contains text that is not NUL-terminated.") {
2686 2687 2688 2689 2690 2691
    return Text::Reader();
  }

  const char* cptr = reinterpret_cast<const char*>(ptr);
  --size;  // NUL terminator

2692
  KJ_REQUIRE(cptr[size] == '\0', "Message contains text that is not NUL-terminated.") {
2693 2694 2695 2696 2697 2698 2699
    return Text::Reader();
  }

  return Text::Reader(cptr, size);
}

Data::Reader ListReader::asData() {
2700 2701
  KJ_REQUIRE(structDataSize == 8 * BITS && structPointerCount == 0 * POINTERS,
             "Expected Text, got list of non-bytes.") {
2702 2703 2704
    return Data::Reader();
  }

2705
  return Data::Reader(reinterpret_cast<const byte*>(ptr), elementCount / ELEMENTS);
2706 2707
}

2708
kj::ArrayPtr<const byte> ListReader::asRawBytes() {
2709 2710
  KJ_REQUIRE(structPointerCount == 0 * POINTERS,
             "Expected data only, got pointers.") {
2711
    return kj::ArrayPtr<const byte>();
2712 2713
  }

2714 2715
  return kj::ArrayPtr<const byte>(reinterpret_cast<const byte*>(ptr),
      WireHelpers::roundBitsUpToBytes(elementCount * (structDataSize / ELEMENTS)) / BYTES);
2716 2717
}

2718
StructReader ListReader::getStructElement(ElementCount index) const {
2719
  KJ_REQUIRE(nestingLimit > 0,
David Renshaw's avatar
David Renshaw committed
2720
             "Message is too deeply-nested or contains cycles.  See capnp::ReaderOptions.") {
2721
    return StructReader();
2722
  }
2723

2724 2725
  BitCount64 indexBit = ElementCount64(index) * step;
  const byte* structData = ptr + indexBit / BITS_PER_BYTE;
2726 2727
  const WirePointer* structPointers =
      reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
2728 2729

  // This check should pass if there are no bugs in the list pointer validation code.
2730
  KJ_DASSERT(structPointerCount == 0 * POINTERS ||
Kenton Varda's avatar
Kenton Varda committed
2731
         (uintptr_t)structPointers % sizeof(void*) == 0,
2732
         "Pointer section of struct list element not aligned.");
2733

2734
  KJ_DASSERT(indexBit % BITS_PER_BYTE == 0 * BITS);
2735
  return StructReader(
2736
      segment, capTable, structData, structPointers,
2737
      structDataSize, structPointerCount,
2738
      nestingLimit - 1);
2739
}
2740

2741 2742 2743 2744
CapTableReader* ListReader::getCapTable() {
  return capTable;
}

2745 2746 2747 2748 2749 2750
ListReader ListReader::imbue(CapTableReader* capTable) const {
  auto result = *this;
  result.capTable = capTable;
  return result;
}

2751 2752 2753
// =======================================================================================
// OrphanBuilder

2754 2755
OrphanBuilder OrphanBuilder::initStruct(
    BuilderArena* arena, CapTableBuilder* capTable, StructSize size) {
2756
  OrphanBuilder result;
2757 2758
  StructBuilder builder = WireHelpers::initStructPointer(
      result.tagAsPtr(), nullptr, capTable, size, arena);
2759
  result.segment = builder.segment;
2760
  result.capTable = capTable;
2761
  result.location = builder.getLocation();
2762 2763 2764 2765
  return result;
}

OrphanBuilder OrphanBuilder::initList(
2766 2767
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, ElementSize elementSize) {
2768
  OrphanBuilder result;
2769
  ListBuilder builder = WireHelpers::initListPointer(
2770
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
2771
  result.segment = builder.segment;
2772
  result.capTable = capTable;
2773
  result.location = builder.getLocation();
2774 2775 2776 2777
  return result;
}

OrphanBuilder OrphanBuilder::initStructList(
2778 2779
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementCount elementCount, StructSize elementSize) {
2780 2781
  OrphanBuilder result;
  ListBuilder builder = WireHelpers::initStructListPointer(
2782
      result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);
2783
  result.segment = builder.segment;
2784
  result.capTable = capTable;
2785 2786
  result.location = builder.getLocation();
  return result;
2787 2788
}

2789 2790
OrphanBuilder OrphanBuilder::initText(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
2791
  OrphanBuilder result;
2792
  auto allocation = WireHelpers::initTextPointer(result.tagAsPtr(), nullptr, capTable, size, arena);
2793
  result.segment = allocation.segment;
2794
  result.capTable = capTable;
2795
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2796 2797 2798
  return result;
}

2799 2800
OrphanBuilder OrphanBuilder::initData(
    BuilderArena* arena, CapTableBuilder* capTable, ByteCount size) {
2801
  OrphanBuilder result;
2802
  auto allocation = WireHelpers::initDataPointer(result.tagAsPtr(), nullptr, capTable, size, arena);
2803
  result.segment = allocation.segment;
2804
  result.capTable = capTable;
2805
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2806 2807 2808
  return result;
}

2809 2810
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom) {
2811
  OrphanBuilder result;
2812 2813
  auto allocation = WireHelpers::setStructPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
2814
  result.segment = allocation.segment;
2815
  result.capTable = capTable;
2816
  result.location = reinterpret_cast<word*>(allocation.value);
2817 2818 2819
  return result;
}

2820 2821
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom) {
2822
  OrphanBuilder result;
2823 2824
  auto allocation = WireHelpers::setListPointer(
      nullptr, capTable, result.tagAsPtr(), copyFrom, arena);
2825
  result.segment = allocation.segment;
2826
  result.capTable = capTable;
2827
  result.location = reinterpret_cast<word*>(allocation.value);
2828 2829 2830
  return result;
}

2831 2832
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom) {
2833
  OrphanBuilder result;
2834
  auto allocation = WireHelpers::copyPointer(
2835 2836
      nullptr, capTable, result.tagAsPtr(),
      copyFrom.segment, copyFrom.capTable, copyFrom.pointer, copyFrom.nestingLimit, arena);
2837
  result.segment = allocation.segment;
2838
  result.capTable = capTable;
2839 2840 2841 2842
  result.location = reinterpret_cast<word*>(allocation.value);
  return result;
}

2843 2844
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom) {
2845
  OrphanBuilder result;
2846
  auto allocation = WireHelpers::setTextPointer(
2847
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
2848
  result.segment = allocation.segment;
2849
  result.capTable = capTable;
2850
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2851 2852 2853
  return result;
}

2854 2855
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom) {
2856
  OrphanBuilder result;
2857
  auto allocation = WireHelpers::setDataPointer(
2858
      result.tagAsPtr(), nullptr, capTable, copyFrom, arena);
2859
  result.segment = allocation.segment;
2860
  result.capTable = capTable;
2861
  result.location = reinterpret_cast<word*>(allocation.value.begin());
2862 2863 2864
  return result;
}

2865
#if !CAPNP_LITE
2866 2867
OrphanBuilder OrphanBuilder::copy(
    BuilderArena* arena, CapTableBuilder* capTable, kj::Own<ClientHook> copyFrom) {
2868
  OrphanBuilder result;
2869
  WireHelpers::setCapabilityPointer(nullptr, capTable, result.tagAsPtr(), kj::mv(copyFrom));
2870
  result.segment = arena->getSegment(SegmentId(0));
2871
  result.capTable = capTable;
2872
  result.location = &result.tag;  // dummy to make location non-null
2873 2874
  return result;
}
2875
#endif  // !CAPNP_LITE
2876

2877 2878 2879 2880 2881 2882 2883 2884 2885 2886 2887 2888 2889 2890 2891 2892 2893 2894 2895 2896 2897 2898 2899 2900 2901 2902 2903 2904 2905 2906 2907 2908 2909 2910 2911 2912 2913 2914 2915 2916 2917 2918 2919 2920 2921 2922 2923 2924 2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942 2943 2944 2945 2946 2947 2948 2949 2950 2951 2952 2953 2954 2955 2956 2957 2958 2959 2960 2961
OrphanBuilder OrphanBuilder::concat(
    BuilderArena* arena, CapTableBuilder* capTable,
    ElementSize elementSize, StructSize structSize,
    kj::ArrayPtr<const ListReader> lists) {
  KJ_REQUIRE(lists.size() > 0, "Can't concat empty list ");

  // Find the overall element count and size.
  ElementCount elementCount = 0 * ELEMENTS;
  for (auto& list: lists) {
    elementCount += list.elementCount;
    if (list.elementSize != elementSize) {
      // If element sizes don't all match, upgrade to struct list.
      KJ_REQUIRE(list.elementSize != ElementSize::BIT && elementSize != ElementSize::BIT,
                 "can't upgrade bit lists to struct lists");
      elementSize = ElementSize::INLINE_COMPOSITE;
    }
    structSize.data = kj::max(structSize.data,
        WireHelpers::roundBitsUpToWords(list.structDataSize));
    structSize.pointers = kj::max(structSize.pointers, list.structPointerCount);
  }

  // Allocate the list.
  OrphanBuilder result;
  ListBuilder builder = (elementSize == ElementSize::INLINE_COMPOSITE)
      ? WireHelpers::initStructListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, structSize, arena)
      : WireHelpers::initListPointer(
          result.tagAsPtr(), nullptr, capTable, elementCount, elementSize, arena);

  // Copy elements.
  switch (elementSize) {
    case ElementSize::INLINE_COMPOSITE: {
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.getStructElement(pos).copyContentFrom(list.getStructElement(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    case ElementSize::POINTER: {
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.getPointerElement(pos).copyFrom(list.getPointerElement(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    case ElementSize::BIT: {
      // It's difficult to memcpy() bits since a list could start or end mid-byte. For now we
      // do a slow, naive loop. Probably no one will ever care.
      ElementCount pos = 0 * ELEMENTS;
      for (auto& list: lists) {
        for (ElementCount i = 0 * ELEMENTS; i < list.size(); i += 1 * ELEMENTS) {
          builder.setDataElement<bool>(pos, list.getDataElement<bool>(i));
          pos += 1 * ELEMENTS;
        }
      }
      break;
    }
    default: {
      // We know all the inputs had identical size because otherwise we would have chosen
      // INLINE_COMPOSITE. Therefore, we can safely use memcpy() here instead of copying each
      // element manually.
      byte* target = builder.ptr;
      auto step = builder.step / BITS_PER_BYTE;
      for (auto& list: lists) {
        auto count = step * list.size();
        memcpy(target, list.ptr, count / BYTES);
        target += count / BYTES;
      }
      break;
    }
  }

  // Return orphan.
  result.segment = builder.segment;
  result.capTable = capTable;
  result.location = builder.getLocation();
  return result;
}

2962 2963 2964 2965 2966 2967 2968 2969 2970
OrphanBuilder OrphanBuilder::referenceExternalData(BuilderArena* arena, Data::Reader data) {
  KJ_REQUIRE(reinterpret_cast<uintptr_t>(data.begin()) % sizeof(void*) == 0,
             "Cannot referenceExternalData() that is not aligned.");

  auto wordCount = WireHelpers::roundBytesUpToWords(data.size() * BYTES);
  kj::ArrayPtr<const word> words(reinterpret_cast<const word*>(data.begin()), wordCount / WORDS);

  OrphanBuilder result;
  result.tagAsPtr()->setKindForOrphan(WirePointer::LIST);
2971
  result.tagAsPtr()->listRef.set(ElementSize::BYTE, data.size() * ELEMENTS);
2972 2973
  result.segment = arena->addExternalSegment(words);

2974 2975 2976
  // External data cannot possibly contain capabilities.
  result.capTable = nullptr;

2977 2978 2979 2980 2981 2982 2983
  // const_cast OK here because we will check whether the segment is writable when we try to get
  // a builder.
  result.location = const_cast<word*>(words.begin());

  return result;
}

2984
StructBuilder OrphanBuilder::asStruct(StructSize size) {
2985 2986
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

2987
  StructBuilder result = WireHelpers::getWritableStructPointer(
2988
      tagAsPtr(), location, segment, capTable, size, nullptr, segment->getArena());
2989 2990

  // Watch out, the pointer could have been updated if the object had to be relocated.
2991
  location = reinterpret_cast<word*>(result.data);
2992 2993 2994 2995

  return result;
}

2996
ListBuilder OrphanBuilder::asList(ElementSize elementSize) {
2997 2998
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

2999
  ListBuilder result = WireHelpers::getWritableListPointer(
3000
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3001 3002

  // Watch out, the pointer could have been updated if the object had to be relocated.
3003 3004 3005
  // (Actually, currently this is not true for primitive lists, but let's not turn into a bug if
  // it changes!)
  location = result.getLocation();
3006 3007 3008 3009 3010

  return result;
}

ListBuilder OrphanBuilder::asStructList(StructSize elementSize) {
3011 3012
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3013
  ListBuilder result = WireHelpers::getWritableStructListPointer(
3014
      tagAsPtr(), location, segment, capTable, elementSize, nullptr, segment->getArena());
3015 3016

  // Watch out, the pointer could have been updated if the object had to be relocated.
3017
  location = result.getLocation();
3018 3019 3020 3021 3022

  return result;
}

Text::Builder OrphanBuilder::asText() {
3023 3024
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3025
  // Never relocates.
3026 3027
  return WireHelpers::getWritableTextPointer(
      tagAsPtr(), location, segment, capTable, nullptr, 0 * BYTES);
3028 3029 3030
}

Data::Builder OrphanBuilder::asData() {
3031 3032
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));

3033
  // Never relocates.
3034 3035
  return WireHelpers::getWritableDataPointer(
      tagAsPtr(), location, segment, capTable, nullptr, 0 * BYTES);
3036 3037
}

3038
StructReader OrphanBuilder::asStructReader(StructSize size) const {
3039
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3040
  return WireHelpers::readStructPointer(
3041
      segment, capTable, tagAsPtr(), location, nullptr, kj::maxValue);
3042 3043
}

3044
ListReader OrphanBuilder::asListReader(ElementSize elementSize) const {
3045
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3046
  return WireHelpers::readListPointer(
3047
      segment, capTable, tagAsPtr(), location, nullptr, elementSize, kj::maxValue);
3048 3049
}

3050
#if !CAPNP_LITE
3051
kj::Own<ClientHook> OrphanBuilder::asCapability() const {
3052
  return WireHelpers::readCapabilityPointer(segment, capTable, tagAsPtr(), kj::maxValue);
3053
}
3054
#endif  // !CAPNP_LITE
3055

3056
Text::Reader OrphanBuilder::asTextReader() const {
3057
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3058 3059 3060 3061
  return WireHelpers::readTextPointer(segment, tagAsPtr(), location, nullptr, 0 * BYTES);
}

Data::Reader OrphanBuilder::asDataReader() const {
3062
  KJ_DASSERT(tagAsPtr()->isNull() == (location == nullptr));
3063 3064 3065
  return WireHelpers::readDataPointer(segment, tagAsPtr(), location, nullptr, 0 * BYTES);
}

3066
bool OrphanBuilder::truncate(ElementCount size, bool isText) {
3067 3068 3069 3070 3071
  WirePointer* ref = tagAsPtr();
  SegmentBuilder* segment = this->segment;

  word* target = WireHelpers::followFars(ref, location, segment);

3072 3073 3074 3075 3076
  if (ref->isNull()) {
    // We don't know the right element size, so we can't resize this list.
    return size == 0 * ELEMENTS;
  }

3077
  KJ_REQUIRE(ref->kind() == WirePointer::LIST, "Can't truncate non-list.") {
3078
    return false;
3079 3080
  }

3081 3082
  if (isText) size += 1 * ELEMENTS;

3083
  ElementSize elementSize = ref->listRef.elementSize();
3084

3085 3086
  if (elementSize == ElementSize::INLINE_COMPOSITE) {
    WordCount oldWordCount = ref->listRef.inlineCompositeWordCount();
3087

3088 3089 3090 3091
    WirePointer* tag = reinterpret_cast<WirePointer*>(target);
    ++target;
    KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
               "INLINE_COMPOSITE lists of non-STRUCT type are not supported.") {
3092
      return false;
3093 3094 3095 3096 3097 3098 3099
    }
    StructSize structSize(tag->structRef.dataSize.get(), tag->structRef.ptrCount.get());
    WordCount elementWordCount = structSize.total();

    ElementCount oldSize = tag->inlineCompositeListElementCount();
    word* newEndWord = target + size * (elementWordCount / ELEMENTS);
    word* oldEndWord = target + oldWordCount;
3100

3101 3102 3103
    if (size <= oldSize) {
      // Zero the trailing elements.
      for (uint i = size / ELEMENTS; i < oldSize / ELEMENTS; i++) {
3104
        WireHelpers::zeroObject(segment, capTable, tag, target + i * elementWordCount);
3105 3106 3107 3108 3109
      }
      ref->listRef.setInlineComposite(size * (elementWordCount / ELEMENTS));
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else if (newEndWord <= oldEndWord) {
3110
      // Apparently the old list was over-allocated? The word count is more than needed to store
3111 3112 3113 3114 3115 3116 3117 3118 3119 3120 3121 3122 3123
      // the elements. This is "valid" but shouldn't happen in practice unless someone is toying
      // with us.
      word* expectedEnd = target + oldSize * (elementWordCount / ELEMENTS);
      KJ_ASSERT(newEndWord >= expectedEnd);
      memset(expectedEnd, 0, (newEndWord - expectedEnd) * sizeof(word));
      tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
        ref->listRef.setInlineComposite(size * (elementWordCount / ELEMENTS));
        tag->setKindAndInlineCompositeListElementCount(WirePointer::STRUCT, size);
      } else {
        // Need to re-allocate and transfer.
3124
        OrphanBuilder replacement = initStructList(segment->getArena(), capTable, size, structSize);
3125 3126 3127 3128 3129

        ListBuilder newList = replacement.asStructList(structSize);
        word* element = target;
        for (uint i = 0; i < oldSize / ELEMENTS; i++) {
          newList.getStructElement(i * ELEMENTS).transferContentFrom(
3130
              StructBuilder(segment, capTable, element,
3131 3132 3133 3134
                            reinterpret_cast<WirePointer*>(element + structSize.data),
                            structSize.data * BITS_PER_WORD, structSize.pointers));
          element += elementWordCount;
        }
3135

3136 3137 3138 3139 3140 3141 3142
        *this = kj::mv(replacement);
      }
    }
  } else if (elementSize == ElementSize::POINTER) {
    auto oldSize = ref->listRef.elementCount();
    word* newEndWord = target + size * (POINTER_SIZE_IN_WORDS / ELEMENTS);
    word* oldEndWord = target + oldSize * (POINTER_SIZE_IN_WORDS / ELEMENTS);
3143

3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157
    if (size <= oldSize) {
      // Zero the trailing elements.
      for (WirePointer* element = reinterpret_cast<WirePointer*>(newEndWord);
           element < reinterpret_cast<WirePointer*>(oldEndWord); ++element) {
        WireHelpers::zeroPointerAndFars(segment, element);
      }
      ref->listRef.set(ElementSize::POINTER, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the new memory is already zero'd.
        ref->listRef.set(ElementSize::POINTER, size);
      } else {
        // Need to re-allocate and transfer.
3158 3159
        OrphanBuilder replacement = initList(
            segment->getArena(), capTable, size, ElementSize::POINTER);
3160 3161 3162 3163
        ListBuilder newList = replacement.asList(ElementSize::POINTER);
        WirePointer* oldPointers = reinterpret_cast<WirePointer*>(target);
        for (uint i = 0; i < oldSize / ELEMENTS; i++) {
          newList.getPointerElement(i * ELEMENTS).transferFrom(
3164
              PointerBuilder(segment, capTable, oldPointers + i));
3165 3166 3167 3168 3169 3170 3171 3172 3173 3174 3175 3176 3177 3178 3179 3180 3181 3182 3183 3184 3185 3186 3187 3188 3189 3190 3191
        }
        *this = kj::mv(replacement);
      }
    }
  } else {
    auto oldSize = ref->listRef.elementCount();
    auto step = dataBitsPerElement(elementSize);
    word* newEndWord = target + WireHelpers::roundBitsUpToWords(size * step);
    word* oldEndWord = target + WireHelpers::roundBitsUpToWords(oldSize * step);

    if (size <= oldSize) {
      // When truncating text, we want to set the null terminator as well, so we'll do our zeroing
      // at the byte level.
      byte* begin = reinterpret_cast<byte*>(target);
      byte* newEndByte = begin + WireHelpers::roundBitsUpToBytes(size * step) - isText;
      byte* oldEndByte = reinterpret_cast<byte*>(oldEndWord);

      memset(newEndByte, 0, oldEndByte - newEndByte);
      ref->listRef.set(elementSize, size);
      segment->tryTruncate(oldEndWord, newEndWord);
    } else {
      // We're trying to extend, not truncate.
      if (segment->tryExtend(oldEndWord, newEndWord)) {
        // Done in-place. Nothing else to do now; the memory is already zero'd.
        ref->listRef.set(elementSize, size);
      } else {
        // Need to re-allocate and transfer.
3192
        OrphanBuilder replacement = initList(segment->getArena(), capTable, size, elementSize);
3193 3194 3195 3196 3197 3198 3199
        ListBuilder newList = replacement.asList(elementSize);
        auto words = WireHelpers::roundBitsUpToWords(dataBitsPerElement(elementSize) * oldSize);
        memcpy(newList.ptr, target, words * BYTES_PER_WORD / BYTES);
        *this = kj::mv(replacement);
      }
    }
  }
3200 3201 3202 3203 3204 3205

  return true;
}

void OrphanBuilder::truncate(ElementCount size, ElementSize elementSize) {
  if (!truncate(size, false)) {
3206
    *this = initList(segment->getArena(), capTable, size, elementSize);
3207 3208 3209 3210 3211
  }
}

void OrphanBuilder::truncate(ElementCount size, StructSize elementSize) {
  if (!truncate(size, false)) {
3212
    *this = initStructList(segment->getArena(), capTable, size, elementSize);
3213 3214 3215 3216 3217
  }
}

void OrphanBuilder::truncateText(ElementCount size) {
  if (!truncate(size, true)) {
3218
    *this = initText(segment->getArena(), capTable, size * (1 * BYTES / ELEMENTS));
3219
  }
3220 3221
}

3222
void OrphanBuilder::euthanize() {
3223 3224 3225
  // Carefully catch any exceptions and rethrow them as recoverable exceptions since we may be in
  // a destructor.
  auto exception = kj::runCatchingExceptions([&]() {
3226
    if (tagAsPtr()->isPositional()) {
3227
      WireHelpers::zeroObject(segment, capTable, tagAsPtr(), location);
3228
    } else {
3229
      WireHelpers::zeroObject(segment, capTable, tagAsPtr());
3230
    }
3231

3232
    memset(&tag, 0, sizeof(tag));
3233 3234 3235 3236 3237 3238 3239
    segment = nullptr;
    location = nullptr;
  });

  KJ_IF_MAYBE(e, exception) {
    kj::getExceptionCallback().onRecoverableException(kj::mv(*e));
  }
3240 3241
}

3242
}  // namespace _ (private)
3243
}  // namespace capnp