layout.h 50.8 KB
Newer Older
1
// Copyright (c) 2013-2016 Sandstorm Development Group, Inc. and contributors
Kenton Varda's avatar
Kenton Varda committed
2
// Licensed under the MIT License:
3
//
Kenton Varda's avatar
Kenton Varda committed
4 5 6 7 8 9
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
10
//
Kenton Varda's avatar
Kenton Varda committed
11 12
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
13
//
Kenton Varda's avatar
Kenton Varda committed
14 15 16 17 18 19 20
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
21

Kenton Varda's avatar
Kenton Varda committed
22 23
// This file is NOT intended for use by clients, except in generated code.
//
Kenton Varda's avatar
Kenton Varda committed
24 25 26 27
// This file defines low-level, non-type-safe classes for traversing the Cap'n Proto memory layout
// (which is also its wire format).  Code generated by the Cap'n Proto compiler uses these classes,
// as does other parts of the Cap'n proto library which provide a higher-level interface for
// dynamic introspection.
Kenton Varda's avatar
Kenton Varda committed
28

Kenton Varda's avatar
Kenton Varda committed
29 30
#ifndef CAPNP_LAYOUT_H_
#define CAPNP_LAYOUT_H_
31

32
#if defined(__GNUC__) && !defined(CAPNP_HEADER_WARNINGS)
33 34 35
#pragma GCC system_header
#endif

Kenton Varda's avatar
Kenton Varda committed
36
#include <kj/common.h>
37
#include <kj/memory.h>
38
#include "common.h"
Kenton Varda's avatar
Kenton Varda committed
39
#include "blob.h"
40
#include "endian.h"
41

42
#if (defined(__mips__) || defined(__hppa__)) && !defined(CAPNP_CANONICALIZE_NAN)
43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60
#define CAPNP_CANONICALIZE_NAN 1
// Explicitly detect NaNs and canonicalize them to the quiet NaN value as would be returned by
// __builtin_nan("") on systems implementing the IEEE-754 recommended (but not required) NaN
// signalling/quiet differentiation (such as x86).  Unfortunately, some architectures -- in
// particular, MIPS -- represent quiet vs. signalling nans differently than the rest of the world.
// Canonicalizing them makes output consistent (which is important!), but hurts performance
// slightly.
//
// Note that trying to convert MIPS NaNs to standard NaNs without losing data doesn't work.
// Signaling vs. quiet is indicated by a bit, with the meaning being the opposite on MIPS vs.
// everyone else.  It would be great if we could just flip that bit, but we can't, because if the
// significand is all-zero, then the value is infinity rather than NaN.  This means that on most
// machines, where the bit indicates quietness, there is one more quiet NaN value than signalling
// NaN value, whereas on MIPS there is one more sNaN than qNaN, and thus there is no isomorphic
// mapping that properly preserves quietness.  Instead of doing something hacky, we just give up
// and blow away NaN payloads, because no one uses them anyway.
#endif

61
namespace capnp {
62

63
#if !CAPNP_LITE
64
class ClientHook;
65
#endif  // !CAPNP_LITE
66

67
namespace _ {  // private
Kenton Varda's avatar
Kenton Varda committed
68

69 70
class PointerBuilder;
class PointerReader;
71 72 73 74
class StructBuilder;
class StructReader;
class ListBuilder;
class ListReader;
75
class OrphanBuilder;
76
struct WirePointer;
77
struct WireHelpers;
78 79
class SegmentReader;
class SegmentBuilder;
80
class Arena;
81
class BuilderArena;
82

83
// =============================================================================
84

85
#if CAPNP_DEBUG_TYPES
86
typedef kj::UnitRatio<kj::Bounded<64, uint>, BitLabel, ElementLabel> BitsPerElementTableType;
87 88 89 90 91
#else
typedef uint BitsPerElementTableType;
#endif

static constexpr BitsPerElementTableType BITS_PER_ELEMENT_TABLE[8] = {
92 93 94 95 96 97 98 99
  bounded< 0>() * BITS / ELEMENTS,
  bounded< 1>() * BITS / ELEMENTS,
  bounded< 8>() * BITS / ELEMENTS,
  bounded<16>() * BITS / ELEMENTS,
  bounded<32>() * BITS / ELEMENTS,
  bounded<64>() * BITS / ELEMENTS,
  bounded< 0>() * BITS / ELEMENTS,
  bounded< 0>() * BITS / ELEMENTS
Kenton Varda's avatar
Kenton Varda committed
100
};
101

102
inline KJ_CONSTEXPR() BitsPerElementTableType dataBitsPerElement(ElementSize size) {
103
  return _::BITS_PER_ELEMENT_TABLE[static_cast<int>(size)];
104 105
}

106
inline constexpr PointersPerElementN<1> pointersPerElement(ElementSize size) {
107 108 109
  return size == ElementSize::POINTER
      ? PointersPerElementN<1>(ONE * POINTERS / ELEMENTS)
      : PointersPerElementN<1>(ZERO * POINTERS / ELEMENTS);
110 111 112
}

static constexpr BitsPerElementTableType BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[8] = {
113 114 115 116 117 118 119 120
  bounded< 0>() * BITS / ELEMENTS,
  bounded< 1>() * BITS / ELEMENTS,
  bounded< 8>() * BITS / ELEMENTS,
  bounded<16>() * BITS / ELEMENTS,
  bounded<32>() * BITS / ELEMENTS,
  bounded<64>() * BITS / ELEMENTS,
  bounded<64>() * BITS / ELEMENTS,
  bounded< 0>() * BITS / ELEMENTS
121 122 123 124
};

inline KJ_CONSTEXPR() BitsPerElementTableType bitsPerElementIncludingPointers(ElementSize size) {
  return _::BITS_PER_ELEMENT_INCLUDING_PONITERS_TABLE[static_cast<int>(size)];
125 126
}

127
template <size_t size> struct ElementSizeForByteSize;
128 129 130 131
template <> struct ElementSizeForByteSize<1> { static constexpr ElementSize value = ElementSize::BYTE; };
template <> struct ElementSizeForByteSize<2> { static constexpr ElementSize value = ElementSize::TWO_BYTES; };
template <> struct ElementSizeForByteSize<4> { static constexpr ElementSize value = ElementSize::FOUR_BYTES; };
template <> struct ElementSizeForByteSize<8> { static constexpr ElementSize value = ElementSize::EIGHT_BYTES; };
132 133

template <typename T> struct ElementSizeForType {
134
  static constexpr ElementSize value =
135
      // Primitive types that aren't special-cased below can be determined from sizeof().
136
      CAPNP_KIND(T) == Kind::PRIMITIVE ? ElementSizeForByteSize<sizeof(T)>::value :
137 138
      CAPNP_KIND(T) == Kind::ENUM ? ElementSize::TWO_BYTES :
      CAPNP_KIND(T) == Kind::STRUCT ? ElementSize::INLINE_COMPOSITE :
139 140

      // Everything else is a pointer.
141
      ElementSize::POINTER;
142 143
};

144
// Void and bool are special.
145 146
template <> struct ElementSizeForType<Void> { static constexpr ElementSize value = ElementSize::VOID; };
template <> struct ElementSizeForType<bool> { static constexpr ElementSize value = ElementSize::BIT; };
147

148
// Lists and blobs are pointers, not structs.
149
template <typename T, Kind K> struct ElementSizeForType<List<T, K>> {
150
  static constexpr ElementSize value = ElementSize::POINTER;
151 152
};
template <> struct ElementSizeForType<Text> {
153
  static constexpr ElementSize value = ElementSize::POINTER;
154 155
};
template <> struct ElementSizeForType<Data> {
156
  static constexpr ElementSize value = ElementSize::POINTER;
157
};
158

159
template <typename T>
160
inline constexpr ElementSize elementSizeForType() {
161 162 163
  return ElementSizeForType<T>::value;
}

164
struct MessageSizeCounts {
165
  WordCountN<61, uint64_t> wordCount;  // 2^64 bytes
166 167 168
  uint capCount;

  MessageSizeCounts& operator+=(const MessageSizeCounts& other) {
169 170 171
    // OK to truncate unchecked because this class is used to count actual stuff in memory, and
    // we couldn't possibly have anywhere near 2^61 words.
    wordCount = assumeBits<61>(wordCount + other.wordCount);
172 173 174 175
    capCount += other.capCount;
    return *this;
  }

176 177 178 179
  void addWords(WordCountN<61, uint64_t> other) {
    wordCount = assumeBits<61>(wordCount + other);
  }

180
  MessageSize asPublic() {
181
    return MessageSize { unbound(wordCount / WORDS), capCount };
182 183
  }
};
184 185 186

// =============================================================================

187 188 189 190 191 192 193 194 195
template <int wordCount>
union AlignedData {
  // Useful for declaring static constant data blobs as an array of bytes, but forcing those
  // bytes to be word-aligned.

  uint8_t bytes[wordCount * sizeof(word)];
  word words[wordCount];
};

196
struct StructSize {
197 198
  StructDataWordCount data;
  StructPointerCount pointers;
199

200
  inline constexpr WordCountN<17> total() const { return data + pointers * WORDS_PER_POINTER; }
201 202

  StructSize() = default;
203
  inline constexpr StructSize(StructDataWordCount data, StructPointerCount pointers)
204
      : data(data), pointers(pointers) {}
205 206
};

207
template <typename T, typename CapnpPrivate = typename T::_capnpPrivate>
208
inline constexpr StructSize structSize() {
209 210
  return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
                    bounded(CapnpPrivate::pointerCount) * POINTERS);
211
}
212

213 214 215 216 217 218
template <typename T, typename CapnpPrivate = typename T::_capnpPrivate,
          typename = kj::EnableIf<CAPNP_KIND(T) == Kind::STRUCT>>
inline constexpr StructSize minStructSizeForElement() {
  // If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
  // to hold a T.

219 220
  return StructSize(bounded(CapnpPrivate::dataWordSize) * WORDS,
                    bounded(CapnpPrivate::pointerCount) * POINTERS);
221 222 223 224 225 226 227 228
}

template <typename T, typename = kj::EnableIf<CAPNP_KIND(T) != Kind::STRUCT>>
inline constexpr StructSize minStructSizeForElement() {
  // If T is a struct, return its struct size. Otherwise return the minimum struct size big enough
  // to hold a T.

  return StructSize(
229 230
      dataBitsPerElement(elementSizeForType<T>()) * ELEMENTS > ZERO * BITS
          ? StructDataWordCount(ONE * WORDS) : StructDataWordCount(ZERO * WORDS),
231 232 233
      pointersPerElement(elementSizeForType<T>()) * ELEMENTS);
}

234 235
// -------------------------------------------------------------------
// Masking of default values
236

237
template <typename T, Kind kind = CAPNP_KIND(T)> struct Mask_;
238 239 240 241
template <typename T> struct Mask_<T, Kind::PRIMITIVE> { typedef T Type; };
template <typename T> struct Mask_<T, Kind::ENUM> { typedef uint16_t Type; };
template <> struct Mask_<float, Kind::PRIMITIVE> { typedef uint32_t Type; };
template <> struct Mask_<double, Kind::PRIMITIVE> { typedef uint64_t Type; };
242

243
template <typename T> struct Mask_<T, Kind::OTHER> {
244 245 246
  // Union discriminants end up here.
  static_assert(sizeof(T) == 2, "Don't know how to mask this type.");
  typedef uint16_t Type;
247 248
};

249
template <typename T>
250
using Mask = typename Mask_<T>::Type;
251 252

template <typename T>
253
KJ_ALWAYS_INLINE(Mask<T> mask(T value, Mask<T> mask));
254
template <typename T>
255
KJ_ALWAYS_INLINE(T unmask(Mask<T> value, Mask<T> mask));
256 257

template <typename T>
258 259
inline Mask<T> mask(T value, Mask<T> mask) {
  return static_cast<Mask<T> >(value) ^ mask;
260 261 262 263
}

template <>
inline uint32_t mask<float>(float value, uint32_t mask) {
264 265
#if CAPNP_CANONICALIZE_NAN
  if (value != value) {
266
    return 0x7fc00000u ^ mask;
267 268 269
  }
#endif

270 271 272 273 274 275 276 277
  uint32_t i;
  static_assert(sizeof(i) == sizeof(value), "float is not 32 bits?");
  memcpy(&i, &value, sizeof(value));
  return i ^ mask;
}

template <>
inline uint64_t mask<double>(double value, uint64_t mask) {
278 279
#if CAPNP_CANONICALIZE_NAN
  if (value != value) {
280
    return 0x7ff8000000000000ull ^ mask;
281 282 283
  }
#endif

284 285 286 287 288 289 290
  uint64_t i;
  static_assert(sizeof(i) == sizeof(value), "double is not 64 bits?");
  memcpy(&i, &value, sizeof(value));
  return i ^ mask;
}

template <typename T>
291
inline T unmask(Mask<T> value, Mask<T> mask) {
292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312
  return static_cast<T>(value ^ mask);
}

template <>
inline float unmask<float>(uint32_t value, uint32_t mask) {
  value ^= mask;
  float result;
  static_assert(sizeof(result) == sizeof(value), "float is not 32 bits?");
  memcpy(&result, &value, sizeof(value));
  return result;
}

template <>
inline double unmask<double>(uint64_t value, uint64_t mask) {
  value ^= mask;
  double result;
  static_assert(sizeof(result) == sizeof(value), "double is not 64 bits?");
  memcpy(&result, &value, sizeof(value));
  return result;
}

313 314
// -------------------------------------------------------------------

315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337
class CapTableReader {
public:
#if !CAPNP_LITE
  virtual kj::Maybe<kj::Own<ClientHook>> extractCap(uint index) = 0;
  // Extract the capability at the given index.  If the index is invalid, returns null.
#endif  // !CAPNP_LITE
};

class CapTableBuilder: public CapTableReader {
public:
#if !CAPNP_LITE
  virtual uint injectCap(kj::Own<ClientHook>&& cap) = 0;
  // Add the capability to the message and return its index.  If the same ClientHook is injected
  // twice, this may return the same index both times, but in this case dropCap() needs to be
  // called an equal number of times to actually remove the cap.

  virtual void dropCap(uint index) = 0;
  // Remove a capability injected earlier.  Called when the pointer is overwritten or zero'd out.
#endif  // !CAPNP_LITE
};

// -------------------------------------------------------------------

338 339 340 341
class PointerBuilder: public kj::DisallowConstCopy {
  // Represents a single pointer, usually embedded in a struct or a list.

public:
342
  inline PointerBuilder(): segment(nullptr), capTable(nullptr), pointer(nullptr) {}
343

344 345
  static inline PointerBuilder getRoot(
      SegmentBuilder* segment, CapTableBuilder* capTable, word* location);
Kenton Varda's avatar
Kenton Varda committed
346 347 348
  // Get a PointerBuilder representing a message root located in the given segment at the given
  // location.

349
  inline bool isNull() { return getPointerType() == PointerType::NULL_; }
350
  PointerType getPointerType() const;
351 352

  StructBuilder getStruct(StructSize size, const word* defaultValue);
353
  ListBuilder getList(ElementSize elementSize, const word* defaultValue);
354
  ListBuilder getStructList(StructSize elementSize, const word* defaultValue);
355
  ListBuilder getListAnySize(const word* defaultValue);
356 357
  template <typename T> typename T::Builder getBlob(
      const void* defaultValue, ByteCount defaultSize);
358
#if !CAPNP_LITE
359
  kj::Own<ClientHook> getCapability();
360
#endif  // !CAPNP_LITE
361 362 363 364 365
  // Get methods:  Get the value.  If it is null, initialize it to a copy of the default value.
  // The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
  // simple byte array for blobs.

  StructBuilder initStruct(StructSize size);
366
  ListBuilder initList(ElementSize elementSize, ElementCount elementCount);
367 368 369 370 371
  ListBuilder initStructList(ElementCount elementCount, StructSize size);
  template <typename T> typename T::Builder initBlob(ByteCount size);
  // Init methods:  Initialize the pointer to a newly-allocated object, discarding the existing
  // object.

Matthew Maurer's avatar
Matthew Maurer committed
372 373
  void setStruct(const StructReader& value, bool canonical = false);
  void setList(const ListReader& value, bool canonical = false);
374
  template <typename T> void setBlob(typename T::Reader value);
375
#if !CAPNP_LITE
376
  void setCapability(kj::Own<ClientHook>&& cap);
377
#endif  // !CAPNP_LITE
378 379 380 381 382 383 384 385 386 387 388 389 390 391 392
  // Set methods:  Initialize the pointer to a newly-allocated copy of the given value, discarding
  // the existing object.

  void adopt(OrphanBuilder&& orphan);
  // Set the pointer to point at the given orphaned value.

  OrphanBuilder disown();
  // Set the pointer to null and return its previous value as an orphan.

  void clear();
  // Clear the pointer to null, discarding its previous value.

  void transferFrom(PointerBuilder other);
  // Equivalent to `adopt(other.disown())`.

Matthew Maurer's avatar
Matthew Maurer committed
393
  void copyFrom(PointerReader other, bool canonical = false);
394
  // Equivalent to `set(other.get())`.
Matthew Maurer's avatar
Matthew Maurer committed
395 396
  // If you set the canonical flag, it will attempt to lay the target out
  // canonically, provided enough space is available.
397

398 399
  PointerReader asReader() const;

400
  BuilderArena* getArena() const;
401 402
  // Get the arena containing this pointer.

403 404 405 406 407 408
  CapTableBuilder* getCapTable();
  // Gets the capability context in which this object is operating.

  PointerBuilder imbue(CapTableBuilder* capTable);
  // Return a copy of this builder except using the given capability context.

409 410
private:
  SegmentBuilder* segment;     // Memory segment in which the pointer resides.
411
  CapTableBuilder* capTable;   // Table of capability indexes.
412 413
  WirePointer* pointer;        // Pointer to the pointer.

414 415
  inline PointerBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, WirePointer* pointer)
      : segment(segment), capTable(capTable), pointer(pointer) {}
416 417 418

  friend class StructBuilder;
  friend class ListBuilder;
419
  friend class OrphanBuilder;
420 421 422 423
};

class PointerReader {
public:
424 425
  inline PointerReader()
      : segment(nullptr), capTable(nullptr), pointer(nullptr), nestingLimit(0x7fffffff) {}
426

427 428
  static PointerReader getRoot(SegmentReader* segment, CapTableReader* capTable,
                               const word* location, int nestingLimit);
Kenton Varda's avatar
Kenton Varda committed
429 430 431 432 433 434
  // Get a PointerReader representing a message root located in the given segment at the given
  // location.

  static inline PointerReader getRootUnchecked(const word* location);
  // Get a PointerReader for an unchecked message.

435
  MessageSizeCounts targetSize() const;
Kenton Varda's avatar
Kenton Varda committed
436 437 438 439 440 441
  // Return the total size of the target object and everything to which it points.  Does not count
  // far pointer overhead.  This is useful for deciding how much space is needed to copy the object
  // into a flat array.  However, the caller is advised NOT to treat this value as secure.  Instead,
  // use the result as a hint for allocating the first segment, do the copy, and then throw an
  // exception if it overruns.

442 443
  inline bool isNull() const { return getPointerType() == PointerType::NULL_; }
  PointerType getPointerType() const;
444 445

  StructReader getStruct(const word* defaultValue) const;
446
  ListReader getList(ElementSize expectedElementSize, const word* defaultValue) const;
447
  ListReader getListAnySize(const word* defaultValue) const;
448 449
  template <typename T>
  typename T::Reader getBlob(const void* defaultValue, ByteCount defaultSize) const;
450
#if !CAPNP_LITE
451
  kj::Own<ClientHook> getCapability() const;
452
#endif  // !CAPNP_LITE
453 454 455 456 457 458 459 460 461
  // Get methods:  Get the value.  If it is null, return the default value instead.
  // The default value is encoded as an "unchecked message" for structs, lists, and objects, or a
  // simple byte array for blobs.

  const word* getUnchecked() const;
  // If this is an unchecked message, get a word* pointing at the location of the pointer.  This
  // word* can actually be passed to readUnchecked() to read the designated sub-object later.  If
  // this isn't an unchecked message, throws an exception.

462 463 464
  kj::Maybe<Arena&> getArena() const;
  // Get the arena containing this pointer.

465 466 467
  CapTableReader* getCapTable();
  // Gets the capability context in which this object is operating.

468 469 470
  PointerReader imbue(CapTableReader* capTable) const;
  // Return a copy of this reader except using the given capability context.

Matthew Maurer's avatar
Matthew Maurer committed
471 472 473 474 475 476 477
  bool isCanonical(const word **readHead);
  // Validate this pointer's canonicity, subject to the conditions:
  // * All data to the left of readHead has been read thus far (for pointer
  //   ordering)
  // * All pointers in preorder have already been checked
  // * This pointer is in the first and only segment of the message

478 479
private:
  SegmentReader* segment;      // Memory segment in which the pointer resides.
480
  CapTableReader* capTable;    // Table of capability indexes.
481 482 483 484 485 486
  const WirePointer* pointer;  // Pointer to the pointer.  null = treat as null pointer.

  int nestingLimit;
  // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
  // Once this reaches zero, further pointers will be pruned.

487 488 489
  inline PointerReader(SegmentReader* segment, CapTableReader* capTable,
                       const WirePointer* pointer, int nestingLimit)
      : segment(segment), capTable(capTable), pointer(pointer), nestingLimit(nestingLimit) {}
490 491 492

  friend class StructReader;
  friend class ListReader;
493
  friend class PointerBuilder;
494
  friend class OrphanBuilder;
495 496
};

497
// -------------------------------------------------------------------
Kenton Varda's avatar
Kenton Varda committed
498

499
class StructBuilder: public kj::DisallowConstCopy {
500
public:
501
  inline StructBuilder(): segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr) {}
502

503 504 505 506
  inline word* getLocation() { return reinterpret_cast<word*>(data); }
  // Get the object's location.  Only valid for independently-allocated objects (i.e. not list
  // elements).

507 508
  inline StructDataBitCount getDataSectionSize() const { return dataSize; }
  inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
509
  inline kj::ArrayPtr<byte> getDataSectionAsBlob();
510
  inline _::ListBuilder getPointerSectionAsList();
511

512
  template <typename T>
513
  KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset));
514 515
  // Return true if the field is set to something other than its default value.

516
  template <typename T>
517
  KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset));
518
  // Gets the data field value of the given type at the given offset.  The offset is measured in
519 520
  // multiples of the field size, determined by the type.

521
  template <typename T>
522
  KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask));
523 524 525
  // Like getDataField() but applies the given XOR mask to the data on load.  Used for reading
  // fields with non-zero default values.

526
  template <typename T>
527
  KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset, kj::NoInfer<T> value));
528 529
  // Sets the data field value at the given offset.

530
  template <typename T>
531
  KJ_ALWAYS_INLINE(void setDataField(StructDataOffset offset,
532
                                     kj::NoInfer<T> value, Mask<T> mask));
533 534 535
  // Like setDataField() but applies the given XOR mask before storing.  Used for writing fields
  // with non-zero default values.

536
  KJ_ALWAYS_INLINE(PointerBuilder getPointerField(StructPointerOffset ptrIndex));
537
  // Get a builder for a pointer field given the index within the pointer section.
538

539 540 541
  void clearAll();
  // Clear all pointers and data.

542 543 544 545 546
  void transferContentFrom(StructBuilder other);
  // Adopt all pointers from `other`, and also copy all data.  If `other`'s sections are larger
  // than this, the extra data is not transferred, meaning there is a risk of data loss when
  // transferring from messages built with future versions of the protocol.

547 548 549 550 551
  void copyContentFrom(StructReader other);
  // Copy content from `other`.  If `other`'s sections are larger than this, the extra data is not
  // copied, meaning there is a risk of data loss when copying from messages built with future
  // versions of the protocol.

552
  StructReader asReader() const;
553
  // Gets a StructReader pointing at the same memory.
554

555 556 557
  BuilderArena* getArena();
  // Gets the arena in which this object is allocated.

558 559 560 561 562 563
  CapTableBuilder* getCapTable();
  // Gets the capability context in which this object is operating.

  StructBuilder imbue(CapTableBuilder* capTable);
  // Return a copy of this builder except using the given capability context.

564
private:
565
  SegmentBuilder* segment;     // Memory segment in which the struct resides.
566
  CapTableBuilder* capTable;   // Table of capability indexes.
Kenton Varda's avatar
Kenton Varda committed
567
  void* data;                  // Pointer to the encoded data.
568
  WirePointer* pointers;   // Pointer to the encoded pointers.
569

570
  StructDataBitCount dataSize;
571
  // Size of data section.  We use a bit count rather than a word count to more easily handle the
572 573
  // case of struct lists encoded with less than a word per element.

574
  StructPointerCount pointerCount;  // Size of the pointer section.
575

576 577
  inline StructBuilder(SegmentBuilder* segment, CapTableBuilder* capTable,
                       void* data, WirePointer* pointers,
578
                       StructDataBitCount dataSize, StructPointerCount pointerCount)
579
      : segment(segment), capTable(capTable), data(data), pointers(pointers),
580
        dataSize(dataSize), pointerCount(pointerCount) {}
581

582
  friend class ListBuilder;
583
  friend struct WireHelpers;
584
  friend class OrphanBuilder;
585 586
};

587
class StructReader {
588
public:
589
  inline StructReader()
590 591
      : segment(nullptr), capTable(nullptr), data(nullptr), pointers(nullptr),
        dataSize(ZERO * BITS), pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
592 593
  inline StructReader(kj::ArrayPtr<const word> data)
      : segment(nullptr), capTable(nullptr), data(data.begin()), pointers(nullptr),
594 595
        dataSize(assumeBits<STRUCT_DATA_WORD_COUNT_BITS>(data.size()) * WORDS * BITS_PER_WORD),
        pointerCount(ZERO * POINTERS), nestingLimit(0x7fffffff) {}
596

Kenton Varda's avatar
Kenton Varda committed
597 598
  const void* getLocation() const { return data; }

599 600
  inline StructDataBitCount getDataSectionSize() const { return dataSize; }
  inline StructPointerCount getPointerSectionSize() const { return pointerCount; }
601
  inline kj::ArrayPtr<const byte> getDataSectionAsBlob();
602
  inline _::ListReader getPointerSectionAsList();
603

604 605
  kj::Array<word> canonicalize();

606
  template <typename T>
607
  KJ_ALWAYS_INLINE(bool hasDataField(StructDataOffset offset) const);
608 609
  // Return true if the field is set to something other than its default value.

610
  template <typename T>
611
  KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset) const);
612
  // Get the data field value of the given type at the given offset.  The offset is measured in
613
  // multiples of the field size, determined by the type.  Returns zero if the offset is past the
614
  // end of the struct's data section.
615 616

  template <typename T>
617
  KJ_ALWAYS_INLINE(T getDataField(StructDataOffset offset, Mask<T> mask) const);
618 619
  // Like getDataField(offset), but applies the given XOR mask to the result.  Used for reading
  // fields with non-zero default values.
620

621
  KJ_ALWAYS_INLINE(PointerReader getPointerField(StructPointerOffset ptrIndex) const);
622 623
  // Get a reader for a pointer field given the index within the pointer section.  If the index
  // is out-of-bounds, returns a null pointer.
624

625
  MessageSizeCounts totalSize() const;
626 627 628 629 630 631
  // Return the total size of the struct and everything to which it points.  Does not count far
  // pointer overhead.  This is useful for deciding how much space is needed to copy the struct
  // into a flat array.  However, the caller is advised NOT to treat this value as secure.  Instead,
  // use the result as a hint for allocating the first segment, do the copy, and then throw an
  // exception if it overruns.

632 633 634
  CapTableReader* getCapTable();
  // Gets the capability context in which this object is operating.

635 636 637
  StructReader imbue(CapTableReader* capTable) const;
  // Return a copy of this reader except using the given capability context.

Matthew Maurer's avatar
Matthew Maurer committed
638 639 640 641 642 643 644 645 646 647 648 649 650 651 652
  bool isCanonical(const word **readHead, const word **ptrHead,
                   bool *dataTrunc, bool *ptrTrunc);
  // Validate this pointer's canonicity, subject to the conditions:
  // * All data to the left of readHead has been read thus far (for pointer
  //   ordering)
  // * All pointers in preorder have already been checked
  // * This pointer is in the first and only segment of the message
  //
  // If this function returns false, the struct is non-canonical. If it
  // returns true, then:
  // * If it is a composite in a list, it is canonical if at least one struct
  //   in the list outputs dataTrunc = 1, and at least one outputs ptrTrunc = 1
  // * If it is derived from a struct pointer, it is canonical if
  //   dataTrunc = 1 AND ptrTrunc = 1

653
private:
654 655
  SegmentReader* segment;    // Memory segment in which the struct resides.
  CapTableReader* capTable;  // Table of capability indexes.
656

657
  const void* data;
658
  const WirePointer* pointers;
659

660
  StructDataBitCount dataSize;
661
  // Size of data section.  We use a bit count rather than a word count to more easily handle the
662 663
  // case of struct lists encoded with less than a word per element.

664
  StructPointerCount pointerCount;  // Size of the pointer section.
665

666
  int nestingLimit;
667 668
  // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
  // Once this reaches zero, further pointers will be pruned.
669
  // TODO(perf):  Limit to 16 bits for better packing?
670

671 672
  inline StructReader(SegmentReader* segment, CapTableReader* capTable,
                      const void* data, const WirePointer* pointers,
673 674
                      StructDataBitCount dataSize, StructPointerCount pointerCount,
                      int nestingLimit)
675
      : segment(segment), capTable(capTable), data(data), pointers(pointers),
676
        dataSize(dataSize), pointerCount(pointerCount),
677
        nestingLimit(nestingLimit) {}
678

679 680
  friend class ListReader;
  friend class StructBuilder;
681 682 683 684 685
  friend struct WireHelpers;
};

// -------------------------------------------------------------------

686
class ListBuilder: public kj::DisallowConstCopy {
687
public:
688
  inline explicit ListBuilder(ElementSize elementSize)
689 690
      : segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
        step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
691
        structPointerCount(ZERO * POINTERS), elementSize(elementSize) {}
692

693
  inline word* getLocation() {
694
    // Get the object's location.
695

696
    if (elementSize == ElementSize::INLINE_COMPOSITE && ptr != nullptr) {
697
      return reinterpret_cast<word*>(ptr) - POINTER_SIZE_IN_WORDS;
698 699
    } else {
      return reinterpret_cast<word*>(ptr);
700 701 702
    }
  }

703 704
  inline ElementSize getElementSize() const { return elementSize; }

705
  inline ListElementCount size() const;
706 707
  // The number of elements in the list.

708 709 710 711
  Text::Builder asText();
  Data::Builder asData();
  // Reinterpret the list as a blob.  Throws an exception if the elements are not byte-sized.

712
  template <typename T>
713
  KJ_ALWAYS_INLINE(T getDataElement(ElementCount index));
714 715 716
  // Get the element of the given type at the given index.

  template <typename T>
717
  KJ_ALWAYS_INLINE(void setDataElement(ElementCount index, kj::NoInfer<T> value));
718
  // Set the element at the given index.
719

720
  KJ_ALWAYS_INLINE(PointerBuilder getPointerElement(ElementCount index));
Kenton Varda's avatar
Kenton Varda committed
721

722
  StructBuilder getStructElement(ElementCount index);
723

724 725
  ListReader asReader() const;
  // Get a ListReader pointing at the same memory.
726

727 728 729
  BuilderArena* getArena();
  // Gets the arena in which this object is allocated.

730 731 732 733 734 735
  CapTableBuilder* getCapTable();
  // Gets the capability context in which this object is operating.

  ListBuilder imbue(CapTableBuilder* capTable);
  // Return a copy of this builder except using the given capability context.

736
private:
737 738
  SegmentBuilder* segment;    // Memory segment in which the list resides.
  CapTableBuilder* capTable;  // Table of capability indexes.
739

740
  byte* ptr;  // Pointer to list content.
741

742
  ListElementCount elementCount;  // Number of elements in the list.
743

744 745 746
  BitsPerElementN<23> step;
  // The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
  // words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 128 bits.
747

748 749
  StructDataBitCount structDataSize;
  StructPointerCount structPointerCount;
750 751
  // The struct properties to use when interpreting the elements as structs.  All lists can be
  // interpreted as struct lists, so these are always filled in.
752

753 754
  ElementSize elementSize;
  // The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
755 756
  // from other types when the overall size is exactly zero or one words.

757
  inline ListBuilder(SegmentBuilder* segment, CapTableBuilder* capTable, void* ptr,
758
                     BitsPerElementN<23> step, ListElementCount size,
759
                     StructDataBitCount structDataSize, StructPointerCount structPointerCount,
760
                     ElementSize elementSize)
761
      : segment(segment), capTable(capTable), ptr(reinterpret_cast<byte*>(ptr)),
762
        elementCount(size), step(step), structDataSize(structDataSize),
763
        structPointerCount(structPointerCount), elementSize(elementSize) {}
764

765
  friend class StructBuilder;
766
  friend struct WireHelpers;
767
  friend class OrphanBuilder;
768 769
};

770
class ListReader {
771
public:
772
  inline explicit ListReader(ElementSize elementSize)
773 774 775
      : segment(nullptr), capTable(nullptr), ptr(nullptr), elementCount(ZERO * ELEMENTS),
        step(ZERO * BITS / ELEMENTS), structDataSize(ZERO * BITS),
        structPointerCount(ZERO * POINTERS), elementSize(elementSize), nestingLimit(0x7fffffff) {}
776

777
  inline ListElementCount size() const;
778 779
  // The number of elements in the list.

780 781
  inline ElementSize getElementSize() const { return elementSize; }

782 783 784 785
  Text::Reader asText();
  Data::Reader asData();
  // Reinterpret the list as a blob.  Throws an exception if the elements are not byte-sized.

786
  kj::ArrayPtr<const byte> asRawBytes();
787

788
  template <typename T>
789
  KJ_ALWAYS_INLINE(T getDataElement(ElementCount index) const);
790 791
  // Get the element of the given type at the given index.

792
  KJ_ALWAYS_INLINE(PointerReader getPointerElement(ElementCount index) const);
793

794
  StructReader getStructElement(ElementCount index) const;
795

796 797 798
  CapTableReader* getCapTable();
  // Gets the capability context in which this object is operating.

799 800 801
  ListReader imbue(CapTableReader* capTable) const;
  // Return a copy of this reader except using the given capability context.

802
  bool isCanonical(const word **readHead, const WirePointer* ref);
Matthew Maurer's avatar
Matthew Maurer committed
803 804 805 806 807 808
  // Validate this pointer's canonicity, subject to the conditions:
  // * All data to the left of readHead has been read thus far (for pointer
  //   ordering)
  // * All pointers in preorder have already been checked
  // * This pointer is in the first and only segment of the message

809
private:
810 811
  SegmentReader* segment;    // Memory segment in which the list resides.
  CapTableReader* capTable;  // Table of capability indexes.
812

813
  const byte* ptr;  // Pointer to list content.
814

815
  ListElementCount elementCount;  // Number of elements in the list.
816

817 818 819
  BitsPerElementN<23> step;
  // The distance between elements. The maximum value occurs when a struct contains 2^16-1 data
  // words and 2^16-1 pointers, i.e. 2^17 - 2 words, or 2^23 - 2 bits.
820

821 822
  StructDataBitCount structDataSize;
  StructPointerCount structPointerCount;
823 824
  // The struct properties to use when interpreting the elements as structs.  All lists can be
  // interpreted as struct lists, so these are always filled in.
825

826 827
  ElementSize elementSize;
  // The element size as a ElementSize. This is only really needed to disambiguate INLINE_COMPOSITE
828 829
  // from other types when the overall size is exactly zero or one words.

830
  int nestingLimit;
831 832 833
  // Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
  // Once this reaches zero, further pointers will be pruned.

834
  inline ListReader(SegmentReader* segment, CapTableReader* capTable, const void* ptr,
835
                    ListElementCount elementCount, BitsPerElementN<23> step,
836
                    StructDataBitCount structDataSize, StructPointerCount structPointerCount,
837
                    ElementSize elementSize, int nestingLimit)
838 839
      : segment(segment), capTable(capTable), ptr(reinterpret_cast<const byte*>(ptr)),
        elementCount(elementCount), step(step), structDataSize(structDataSize),
840 841
        structPointerCount(structPointerCount), elementSize(elementSize),
        nestingLimit(nestingLimit) {}
842

843 844
  friend class StructReader;
  friend class ListBuilder;
845
  friend struct WireHelpers;
846
  friend class OrphanBuilder;
847 848
};

849 850
// -------------------------------------------------------------------

851 852
class OrphanBuilder {
public:
853 854 855
  inline OrphanBuilder(): segment(nullptr), capTable(nullptr), location(nullptr) {
    memset(&tag, 0, sizeof(tag));
  }
856
  OrphanBuilder(const OrphanBuilder& other) = delete;
857
  inline OrphanBuilder(OrphanBuilder&& other) noexcept;
858
  inline ~OrphanBuilder() noexcept(false);
859

860 861 862 863 864 865 866 867 868 869 870 871 872
  static OrphanBuilder initStruct(BuilderArena* arena, CapTableBuilder* capTable, StructSize size);
  static OrphanBuilder initList(BuilderArena* arena, CapTableBuilder* capTable,
                                ElementCount elementCount, ElementSize elementSize);
  static OrphanBuilder initStructList(BuilderArena* arena, CapTableBuilder* capTable,
                                      ElementCount elementCount, StructSize elementSize);
  static OrphanBuilder initText(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);
  static OrphanBuilder initData(BuilderArena* arena, CapTableBuilder* capTable, ByteCount size);

  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, StructReader copyFrom);
  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, ListReader copyFrom);
  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, PointerReader copyFrom);
  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Text::Reader copyFrom);
  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable, Data::Reader copyFrom);
873
#if !CAPNP_LITE
874 875
  static OrphanBuilder copy(BuilderArena* arena, CapTableBuilder* capTable,
                            kj::Own<ClientHook> copyFrom);
876
#endif  // !CAPNP_LITE
877

878 879 880 881
  static OrphanBuilder concat(BuilderArena* arena, CapTableBuilder* capTable,
                              ElementSize expectedElementSize, StructSize expectedStructSize,
                              kj::ArrayPtr<const ListReader> lists);

882 883
  static OrphanBuilder referenceExternalData(BuilderArena* arena, Data::Reader data);

884 885 886
  OrphanBuilder& operator=(const OrphanBuilder& other) = delete;
  inline OrphanBuilder& operator=(OrphanBuilder&& other);

887 888
  inline bool operator==(decltype(nullptr)) const { return location == nullptr; }
  inline bool operator!=(decltype(nullptr)) const { return location != nullptr; }
889 890 891 892

  StructBuilder asStruct(StructSize size);
  // Interpret as a struct, or throw an exception if not a struct.

893
  ListBuilder asList(ElementSize elementSize);
894 895 896 897 898 899
  // Interpret as a list, or throw an exception if not a list.  elementSize cannot be
  // INLINE_COMPOSITE -- use asStructList() instead.

  ListBuilder asStructList(StructSize elementSize);
  // Interpret as a struct list, or throw an exception if not a list.

900 901 902
  ListBuilder asListAnySize();
  // For AnyList.

903 904 905 906
  Text::Builder asText();
  Data::Builder asData();
  // Interpret as a blob, or throw an exception if not a blob.

907
  StructReader asStructReader(StructSize size) const;
908
  ListReader asListReader(ElementSize elementSize) const;
909
  ListReader asListReaderAnySize() const;
910
#if !CAPNP_LITE
911
  kj::Own<ClientHook> asCapability() const;
912
#endif  // !CAPNP_LITE
913 914 915
  Text::Reader asTextReader() const;
  Data::Reader asDataReader() const;

916 917 918 919 920 921 922 923
  bool truncate(ElementCount size, bool isText) KJ_WARN_UNUSED_RESULT;
  // Resize the orphan list to the given size. Returns false if the list is currently empty but
  // the requested size is non-zero, in which case the caller will need to allocate a new list.

  void truncate(ElementCount size, ElementSize elementSize);
  void truncate(ElementCount size, StructSize elementSize);
  void truncateText(ElementCount size);
  // Versions of truncate() that know how to allocate a new list if needed.
924

925
private:
926
  static_assert(ONE * POINTERS * WORDS_PER_POINTER == ONE * WORDS,
927 928 929 930 931
                "This struct assumes a pointer is one word.");
  word tag;
  // Contains an encoded WirePointer representing this object.  WirePointer is defined in
  // layout.c++, but fits in a word.
  //
932 933 934 935 936 937 938
  // This may be a FAR pointer.  Even in that case, `location` points to the eventual destination
  // of that far pointer.  The reason we keep the far pointer around rather than just making `tag`
  // represent the final destination is because if the eventual adopter of the pointer is not in
  // the target's segment then it may be useful to reuse the far pointer landing pad.
  //
  // If `tag` is not a far pointer, its offset is garbage; only `location` points to the actual
  // target.
939 940

  SegmentBuilder* segment;
941
  // Segment in which the object resides.
942

943 944 945
  CapTableBuilder* capTable;
  // Table of capability indexes.

946
  word* location;
947
  // Pointer to the object, or nullptr if the pointer is null.  For capabilities, we make this
948
  // 0x1 just so that it is non-null for operator==, but it is never used.
949

950 951 952
  inline OrphanBuilder(const void* tagPtr, SegmentBuilder* segment,
                       CapTableBuilder* capTable, word* location)
      : segment(segment), capTable(capTable), location(location) {
953 954 955 956
    memcpy(&tag, tagPtr, sizeof(tag));
  }

  inline WirePointer* tagAsPtr() { return reinterpret_cast<WirePointer*>(&tag); }
957
  inline const WirePointer* tagAsPtr() const { return reinterpret_cast<const WirePointer*>(&tag); }
958 959 960 961 962 963 964 965

  void euthanize();
  // Erase the target object, zeroing it out and possibly reclaiming the memory.  Called when
  // the OrphanBuilder is being destroyed or overwritten and it is non-null.

  friend struct WireHelpers;
};

966 967 968
// =======================================================================================
// Internal implementation details...

969 970
// These are defined in the source file.
template <> typename Text::Builder PointerBuilder::initBlob<Text>(ByteCount size);
971
template <> void PointerBuilder::setBlob<Text>(typename Text::Reader value);
972 973 974 975
template <> typename Text::Builder PointerBuilder::getBlob<Text>(
    const void* defaultValue, ByteCount defaultSize);
template <> typename Text::Reader PointerReader::getBlob<Text>(
    const void* defaultValue, ByteCount defaultSize) const;
976 977

template <> typename Data::Builder PointerBuilder::initBlob<Data>(ByteCount size);
978
template <> void PointerBuilder::setBlob<Data>(typename Data::Reader value);
979 980 981 982
template <> typename Data::Builder PointerBuilder::getBlob<Data>(
    const void* defaultValue, ByteCount defaultSize);
template <> typename Data::Reader PointerReader::getBlob<Data>(
    const void* defaultValue, ByteCount defaultSize) const;
983

984 985 986
inline PointerBuilder PointerBuilder::getRoot(
    SegmentBuilder* segment, CapTableBuilder* capTable, word* location) {
  return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(location));
Kenton Varda's avatar
Kenton Varda committed
987 988 989
}

inline PointerReader PointerReader::getRootUnchecked(const word* location) {
990 991
  return PointerReader(nullptr, nullptr,
                       reinterpret_cast<const WirePointer*>(location), 0x7fffffff);
Kenton Varda's avatar
Kenton Varda committed
992 993
}

994 995
// -------------------------------------------------------------------

996
inline kj::ArrayPtr<byte> StructBuilder::getDataSectionAsBlob() {
997
  return kj::ArrayPtr<byte>(reinterpret_cast<byte*>(data),
998
      unbound(dataSize / BITS_PER_BYTE / BYTES));
999 1000
}

1001
inline _::ListBuilder StructBuilder::getPointerSectionAsList() {
1002 1003 1004
  return _::ListBuilder(segment, capTable, pointers, ONE * POINTERS * BITS_PER_POINTER / ELEMENTS,
                        pointerCount * (ONE * ELEMENTS / POINTERS),
                        ZERO * BITS, ONE * POINTERS, ElementSize::POINTER);
1005 1006
}

1007
template <typename T>
1008
inline bool StructBuilder::hasDataField(StructDataOffset offset) {
1009 1010 1011 1012
  return getDataField<Mask<T>>(offset) != 0;
}

template <>
1013
inline bool StructBuilder::hasDataField<Void>(StructDataOffset offset) {
1014 1015 1016
  return false;
}

1017
template <typename T>
1018
inline T StructBuilder::getDataField(StructDataOffset offset) {
1019
  return reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
1020 1021 1022
}

template <>
1023 1024
inline bool StructBuilder::getDataField<bool>(StructDataOffset offset) {
  BitCount32 boffset = offset * (ONE * BITS / ELEMENTS);
1025
  byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
1026
  return (*reinterpret_cast<uint8_t*>(b) &
1027
      unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
1028 1029
}

1030
template <>
1031
inline Void StructBuilder::getDataField<Void>(StructDataOffset offset) {
1032
  return VOID;
1033 1034
}

1035
template <typename T>
1036
inline T StructBuilder::getDataField(StructDataOffset offset, Mask<T> mask) {
1037
  return unmask<T>(getDataField<Mask<T> >(offset), mask);
1038 1039
}

1040
template <typename T>
1041
inline void StructBuilder::setDataField(StructDataOffset offset, kj::NoInfer<T> value) {
1042
  reinterpret_cast<WireValue<T>*>(data)[unbound(offset / ELEMENTS)].set(value);
1043 1044
}

1045 1046 1047
#if CAPNP_CANONICALIZE_NAN
// Use mask() on floats and doubles to make sure we canonicalize NaNs.
template <>
1048
inline void StructBuilder::setDataField<float>(StructDataOffset offset, float value) {
1049 1050 1051
  setDataField<uint32_t>(offset, mask<float>(value, 0));
}
template <>
1052
inline void StructBuilder::setDataField<double>(StructDataOffset offset, double value) {
1053 1054 1055 1056
  setDataField<uint64_t>(offset, mask<double>(value, 0));
}
#endif

1057
template <>
1058
inline void StructBuilder::setDataField<bool>(StructDataOffset offset, bool value) {
1059
  auto boffset = offset * (ONE * BITS / ELEMENTS);
1060
  byte* b = reinterpret_cast<byte*>(data) + boffset / BITS_PER_BYTE;
1061
  uint bitnum = unboundMaxBits<3>(boffset % BITS_PER_BYTE / BITS);
1062 1063
  *reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << bitnum))
                                 | (static_cast<uint8_t>(value) << bitnum);
1064 1065
}

1066
template <>
1067
inline void StructBuilder::setDataField<Void>(StructDataOffset offset, Void value) {}
1068

1069
template <typename T>
1070
inline void StructBuilder::setDataField(StructDataOffset offset,
1071
                                        kj::NoInfer<T> value, Mask<T> m) {
1072
  setDataField<Mask<T> >(offset, mask<T>(value, m));
1073 1074
}

1075
inline PointerBuilder StructBuilder::getPointerField(StructPointerOffset ptrIndex) {
1076
  // Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
1077
  return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(
1078 1079 1080
      reinterpret_cast<word*>(pointers) + ptrIndex * WORDS_PER_POINTER));
}

1081 1082
// -------------------------------------------------------------------

1083
inline kj::ArrayPtr<const byte> StructReader::getDataSectionAsBlob() {
1084
  return kj::ArrayPtr<const byte>(reinterpret_cast<const byte*>(data),
1085
      unbound(dataSize / BITS_PER_BYTE / BYTES));
1086 1087
}

1088
inline _::ListReader StructReader::getPointerSectionAsList() {
1089 1090
  return _::ListReader(segment, capTable, pointers, pointerCount * (ONE * ELEMENTS / POINTERS),
                       ONE * POINTERS * BITS_PER_POINTER / ELEMENTS, ZERO * BITS, ONE * POINTERS,
1091
                       ElementSize::POINTER, nestingLimit);
1092 1093
}

1094
template <typename T>
1095
inline bool StructReader::hasDataField(StructDataOffset offset) const {
1096 1097 1098 1099
  return getDataField<Mask<T>>(offset) != 0;
}

template <>
1100
inline bool StructReader::hasDataField<Void>(StructDataOffset offset) const {
1101 1102 1103 1104
  return false;
}

template <typename T>
1105
inline T StructReader::getDataField(StructDataOffset offset) const {
1106
  if ((offset + ONE * ELEMENTS) * capnp::bitsPerElement<T>() <= dataSize) {
1107
    return reinterpret_cast<const WireValue<T>*>(data)[unbound(offset / ELEMENTS)].get();
1108
  } else {
1109
    return static_cast<T>(0);
1110
  }
1111 1112 1113
}

template <>
1114
inline bool StructReader::getDataField<bool>(StructDataOffset offset) const {
1115
  auto boffset = offset * (ONE * BITS / ELEMENTS);
1116
  if (boffset < dataSize) {
1117
    const byte* b = reinterpret_cast<const byte*>(data) + boffset / BITS_PER_BYTE;
1118
    return (*reinterpret_cast<const uint8_t*>(b) &
1119
        unbound(ONE << (boffset % BITS_PER_BYTE / BITS))) != 0;
1120
  } else {
1121
    return false;
1122
  }
1123 1124
}

1125
template <>
1126
inline Void StructReader::getDataField<Void>(StructDataOffset offset) const {
1127
  return VOID;
1128 1129
}

1130
template <typename T>
1131
T StructReader::getDataField(StructDataOffset offset, Mask<T> mask) const {
1132
  return unmask<T>(getDataField<Mask<T> >(offset), mask);
1133 1134
}

1135
inline PointerReader StructReader::getPointerField(StructPointerOffset ptrIndex) const {
1136 1137
  if (ptrIndex < pointerCount) {
    // Hacky because WirePointer is defined in the .c++ file (so is incomplete here).
1138
    return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
1139 1140 1141 1142 1143 1144
        reinterpret_cast<const word*>(pointers) + ptrIndex * WORDS_PER_POINTER), nestingLimit);
  } else{
    return PointerReader();
  }
}

1145 1146
// -------------------------------------------------------------------

1147
inline ListElementCount ListBuilder::size() const { return elementCount; }
1148 1149

template <typename T>
1150
inline T ListBuilder::getDataElement(ElementCount index) {
1151
  return reinterpret_cast<WireValue<T>*>(
1152
      ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
1153

Kenton Varda's avatar
Kenton Varda committed
1154
  // TODO(perf):  Benchmark this alternate implementation, which I suspect may make better use of
1155 1156 1157 1158
  //   the x86 SIB byte.  Also use it for all the other getData/setData implementations below, and
  //   the various non-inline methods that look up pointers.
  //   Also if using this, consider changing ptr back to void* instead of byte*.
//  return reinterpret_cast<WireValue<T>*>(ptr)[
1159
//      index / ELEMENTS * (step / capnp::bitsPerElement<T>())].get();
1160 1161 1162
}

template <>
1163
inline bool ListBuilder::getDataElement<bool>(ElementCount index) {
1164
  // Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
1165
  auto bindex = index * (ONE * BITS / ELEMENTS);
1166
  byte* b = ptr + bindex / BITS_PER_BYTE;
1167
  return (*reinterpret_cast<uint8_t*>(b) &
1168
      unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
1169 1170
}

1171
template <>
1172
inline Void ListBuilder::getDataElement<Void>(ElementCount index) {
1173
  return VOID;
1174 1175
}

1176
template <typename T>
1177
inline void ListBuilder::setDataElement(ElementCount index, kj::NoInfer<T> value) {
1178
  reinterpret_cast<WireValue<T>*>(
1179
      ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->set(value);
1180 1181
}

1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193
#if CAPNP_CANONICALIZE_NAN
// Use mask() on floats and doubles to make sure we canonicalize NaNs.
template <>
inline void ListBuilder::setDataElement<float>(ElementCount index, float value) {
  setDataElement<uint32_t>(index, mask<float>(value, 0));
}
template <>
inline void ListBuilder::setDataElement<double>(ElementCount index, double value) {
  setDataElement<uint64_t>(index, mask<double>(value, 0));
}
#endif

1194
template <>
1195
inline void ListBuilder::setDataElement<bool>(ElementCount index, bool value) {
1196
  // Ignore stepBytes for bit lists because bit lists cannot be upgraded to struct lists.
1197
  auto bindex = index * (ONE * BITS / ELEMENTS);
1198
  byte* b = ptr + bindex / BITS_PER_BYTE;
1199
  auto bitnum = bindex % BITS_PER_BYTE / BITS;
1200 1201
  *reinterpret_cast<uint8_t*>(b) = (*reinterpret_cast<uint8_t*>(b) & ~(1 << unbound(bitnum)))
                                 | (static_cast<uint8_t>(value) << unbound(bitnum));
1202 1203
}

1204
template <>
1205
inline void ListBuilder::setDataElement<Void>(ElementCount index, Void value) {}
1206

1207
inline PointerBuilder ListBuilder::getPointerElement(ElementCount index) {
1208
  return PointerBuilder(segment, capTable, reinterpret_cast<WirePointer*>(ptr +
1209
      upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE));
1210 1211
}

1212 1213
// -------------------------------------------------------------------

1214
inline ListElementCount ListReader::size() const { return elementCount; }
1215 1216

template <typename T>
1217
inline T ListReader::getDataElement(ElementCount index) const {
1218
  return reinterpret_cast<const WireValue<T>*>(
1219
      ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE)->get();
1220 1221 1222
}

template <>
1223
inline bool ListReader::getDataElement<bool>(ElementCount index) const {
1224
  // Ignore step for bit lists because bit lists cannot be upgraded to struct lists.
1225
  auto bindex = index * (ONE * BITS / ELEMENTS);
1226
  const byte* b = ptr + bindex / BITS_PER_BYTE;
1227
  return (*reinterpret_cast<const uint8_t*>(b) &
1228
      unbound(ONE << (bindex % BITS_PER_BYTE / BITS))) != 0;
1229 1230
}

1231 1232
template <>
inline Void ListReader::getDataElement<Void>(ElementCount index) const {
1233
  return VOID;
1234 1235
}

1236
inline PointerReader ListReader::getPointerElement(ElementCount index) const {
1237
  return PointerReader(segment, capTable, reinterpret_cast<const WirePointer*>(
1238
      ptr + upgradeBound<uint64_t>(index) * step / BITS_PER_BYTE), nestingLimit);
1239
}
1240

1241 1242
// -------------------------------------------------------------------

1243
inline OrphanBuilder::OrphanBuilder(OrphanBuilder&& other) noexcept
1244
    : segment(other.segment), capTable(other.capTable), location(other.location) {
1245 1246 1247 1248 1249
  memcpy(&tag, &other.tag, sizeof(tag));  // Needs memcpy to comply with aliasing rules.
  other.segment = nullptr;
  other.location = nullptr;
}

1250
inline OrphanBuilder::~OrphanBuilder() noexcept(false) {
1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262
  if (segment != nullptr) euthanize();
}

inline OrphanBuilder& OrphanBuilder::operator=(OrphanBuilder&& other) {
  // With normal smart pointers, it's important to handle the case where the incoming pointer
  // is actually transitively owned by this one.  In this case, euthanize() would destroy `other`
  // before we copied it.  This isn't possible in the case of `OrphanBuilder` because it only
  // owns message objects, and `other` is not itself a message object, therefore cannot possibly
  // be transitively owned by `this`.

  if (segment != nullptr) euthanize();
  segment = other.segment;
1263
  capTable = other.capTable;
1264 1265 1266 1267 1268 1269 1270
  location = other.location;
  memcpy(&tag, &other.tag, sizeof(tag));  // Needs memcpy to comply with aliasing rules.
  other.segment = nullptr;
  other.location = nullptr;
  return *this;
}

1271
}  // namespace _ (private)
1272
}  // namespace capnp
1273

Kenton Varda's avatar
Kenton Varda committed
1274
#endif  // CAPNP_LAYOUT_H_