Commit c0921532 authored by Kenton Varda's avatar Kenton Varda

Starting work on C++ runtime.

parent fa8ca65d
all:
echo "You probably accidentally told Eclipse to build. Stopping."
once:
CXX=g++-4.7 CXXFLAGS='-std=gnu++0x -O2 -Wall' LIBS='-lz -pthread' ekam -j6
continuous:
CXX=g++-4.7 CXXFLAGS='-std=gnu++0x -g -Wall' LIBS='-lz -pthread' ekam -j6 -c -n :51315
clean:
rm -rf bin lib tmp
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "arena.h"
namespace capnproto {
Arena::~Arena() {}
} // namespace capnproto
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <inttypes.h>
#include <stddef.h>
#ifndef CAPNPROTO_ARENA_H_
#define CAPNPROTO_ARENA_H_
namespace capnproto {
class Segment;
class SegmentAllocator;
class Arena {
public:
Arena(SegmentAllocator* allocator);
~Arena();
const Segment* addReadSegment(uint32_t index, const void* data, size_t size);
const Segment* getSegment(uint32_t index) const;
Segment* getWritableSegment(uint32_t index);
// Get the segment at the given index.
Segment* getSegmentWithAvailable(uint32_t minimumSize);
// Find a segment that has at least the given number of bytes available.
void parseError(const char* message) const;
};
class Segment {
public:
inline void* allocate(uint32_t amount);
inline void* getPtrUnchecked(uint32_t offset);
inline uint32_t getOffset(const void* ptr) const;
inline const void* getPtrChecked(uint32_t offset, uint32_t bytesBefore,
uint32_t bytesAfter) const;
inline Arena* getArena();
inline const Arena* getArena() const;
inline uint32_t getSegmentId() const;
private:
Arena* arena;
uint32_t id;
void* start;
uint32_t size;
uint32_t pos;
int64_t* readLimit;
friend class Arena;
inline Segment(Arena* arena, uint32_t index, void* start, uint32_t size, uint32_t pos,
int64_t* readLimit);
inline ~Segment();
Segment(const Segment& other) = delete;
Segment& operator=(const Segment& other) = delete;
void readLimitReached() const;
// TODO: Do we need mutex locking?
};
class SegmentAllocator {
public:
virtual ~SegmentAllocator();
virtual void* allocate(size_t size);
virtual void free(void* ptr, size_t size);
};
// =======================================================================================
inline Segment::Segment(Arena* arena, uint32_t id, void* start, uint32_t size, uint32_t pos,
int64_t* readLimit)
: arena(arena), id(id), start(start), size(size), pos(pos),
readLimit(readLimit) {}
inline Segment::~Segment() {}
inline void* Segment::allocate(uint32_t amount) {
if (amount > size - pos) {
return nullptr;
} else {
uint32_t offset = pos;
pos += size;
return reinterpret_cast<char*>(start) + offset;
}
}
inline void* Segment::getPtrUnchecked(uint32_t offset) {
return reinterpret_cast<char*>(start) + offset;
}
inline uint32_t Segment::getOffset(const void* ptr) const {
return reinterpret_cast<const char*>(ptr) - reinterpret_cast<const char*>(start);
}
inline const void* Segment::getPtrChecked(uint32_t offset, uint32_t bytesBefore,
uint32_t bytesAfter) const {
// Check bounds. Watch out for overflow and underflow here.
if (offset > size || bytesBefore > offset || bytesAfter > size - offset) {
return nullptr;
} else {
// Enforce the read limit. Synchronization is not necessary because readLimit is just a rough
// counter to prevent infinite loops leading to DoS attacks.
if ((*readLimit -= bytesBefore + bytesAfter) < 0) readLimitReached();
return reinterpret_cast<char*>(start) + offset;
}
}
inline Arena* Segment::getArena() {
return arena;
}
inline const Arena* Segment::getArena() const {
return arena;
}
inline uint32_t Segment::getSegmentId() const {
return id;
}
} // namespace capnproto
#endif // CAPNPROTO_ARENA_H_
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "descriptor.h"
namespace capnproto {
namespace internal {
} // namespace internal
} // namespace capnproto
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is NOT intended for use by clients, except in generated code.
//
// The structures declared here provide basic information about user-defined capnproto data types
// needed in order to read, write, and traverse the raw message data. Basically, descriptors
// serve two purposes:
// - Define the layout of struct types so that they can be allocated and so that incoming structs
// can be validated.
// - Define the default values used when fields are absent on the wire.
//
// We use raw structs with no inheritance in order to allow for static initialization via the data
// segment with no need to execute any code at startup.
#ifndef CAPNPROTO_DESCRIPTOR_H_
#define CAPNPROTO_DESCRIPTOR_H_
#include <inttypes.h>
#include "macros.h"
namespace capnproto {
namespace internal {
struct ListDescriptor;
struct StructDescriptor;
struct FieldDescriptor;
struct Descriptor {
// This is the "base type" for descriptors that describe the target of a reference.
// StructDescriptor and ListDescriptor should be treated as if they subclass this type. However,
// because subclassing breaks the ability to initialize structs using an initializer list -- which
// we need for static initialization purposes -- we don't actually use inheritance. Instead,
// each of the "subclasses" has a field of type Descriptor as its first field.
enum class Kind: uint8_t {
LIST, // This is a ListDescriptor
STRUCT // This is a StructDescriptor
};
Kind kind;
inline const ListDescriptor* asList() const;
inline const StructDescriptor* asStruct() const;
// Poor-man's downcast.
};
enum class FieldSize: uint8_t {
BIT,
BYTE,
TWO_BYTES,
FOUR_BYTES,
EIGHT_BYTES,
REFERENCE, // Indicates that the field lives in the reference segment, not the data segment.
KEY_REFERENCE, // A 64-bit key, 64-bit reference pair.
STRUCT // An arbitrary-sized inlined struct. Used only for list elements, not struct
// fields, since a struct cannot embed another struct inline.
};
inline int sizeInBits(FieldSize s) {
static const int table[] = {1, 8, 16, 32, 64, 64, 128, -1};
return table[static_cast<int>(s)];
}
inline int byteOffsetForFieldZero(FieldSize s) {
// For the given field size, get the offset, in bytes, between a struct pointer and the location
// of the struct's first field, if the struct's first field is of the given type. We use this
// to adjust pointers when non-struct lists are converted to struct lists or vice versa.
static const int table[] = {1, 1, 2, 4, 8, 0, 8, 0};
return table[static_cast<int>(s)];
}
struct ListDescriptor {
// Describes a list.
Descriptor base;
FieldSize elementSize;
// Size of each element of the list. Also determines whether it is a reference list or a data
// list.
const Descriptor* elementDescriptor;
// For a reference list, this is a descriptor of an element. Otherwise, NULL.
uint32_t defaultCount;
// Number of elements in the default value.
const void* defaultData;
// For a data list, points to an array of elements representing the default contents of the list.
// Note that unlike data segments of structs, this pointer points to the first byte of the data.
// For a reference list, points to an array of descriptor pointers -- use defaultReferences()
// for type-safety.
const Descriptor* const* defaultReferences() const {
// Convenience accessor for reference lists.
return reinterpret_cast<const Descriptor* const*>(defaultData);
}
};
static_assert(__builtin_offsetof(ListDescriptor, base) == 0,
"'base' must be the first member of ListDescriptor to allow reinterpret_cast from "
"Descriptor to ListDescriptor.");
struct StructDescriptor {
// Describes a struct.
Descriptor base;
uint8_t fieldCount;
// Number of fields in this type -- that we were aware of at compile time, of course.
uint8_t dataSize;
// Size of the data segment, in 64-bit words.
uint8_t referenceCount;
// Number of references in the reference segment.
const FieldDescriptor* fields;
// Array of FieldDescriptors.
const void* defaultData;
// Default data. The pointer actually points to the byte immediately after the end of the data.
const Descriptor* const* defaultReferences;
// Array of descriptors describing the references.
inline uint32_t wordSize() const {
// Size of the struct in words.
return static_cast<uint32_t>(fieldCount) + static_cast<uint32_t>(referenceCount);
}
};
static_assert(__builtin_offsetof(StructDescriptor, base) == 0,
"'base' must be the first member of StructDescriptor to allow reinterpret_cast from "
"Descriptor to StructDescriptor.");
struct FieldDescriptor {
// Describes one field of a struct.
uint8_t requiredDataSize;
// The minimum size of the data segment of any object which includes this field. This is always
// offset * size / 64 bits, rounded up. This value is useful for validating object references
// received on the wire -- if dataSize is insufficient to support fieldCount, don't trust it!
uint8_t requiredReferenceSize;
// The minimum size of the reference segment of any object which includes this field. Same deal
// as with requiredDataSize.
uint16_t offset;
// If the field is a data field (size != REFERENCE), then this is the offset within the object's
// data segment at which the field is positioned, measured in multiples of the field's size. This
// offset is intended to be *subtracted* from the object pointer, since the object pointer points
// to the beginning of the reference segment, which is located immediately after the data segment.
// Therefore, this offset can never be zero.
//
// For size == BIT, the meaning is slightly different: bits are numbered from zero, starting with
// the eight bits in the last byte of the data segment, followed by the eight bits in the byte
// before that, and so on. Within each byte, bits are numbered from least-significant to
// most-significant -- i.e. *not* backwards. This awkward numbering is necessary to allow a
// list of booleans to be upgraded to a list of structs where field number zero is a boolean --
// we need the first boolean in either a list or a struct to be located at the same end of its
// byte.
//
// If the field is a reference field (size == REFERENCE), then this is the index within the
// reference array at which the field is located.
FieldSize size;
// Size of this field.
uint8_t hole32Offset;
uint16_t hole16Offset;
uint16_t hole8Offset;
// In the case that this field is the last one in the object, and thus the object's data segment
// size is equal to requiredDataSize, then the following offsets indicate locations of "holes" in
// the data segment which are not occupied by any field. The packing algorithm guarantees that
// there can be at most one hole of each size. An offset of zero indicates that no hole is
// present. Each offset is measured in multiples two times the hole size. E.g. hole32Offset is
// measured in 64-bit words. (The packing algorithm guarantees that hole offsets will always be
// an even multiple of the hole size.)
uint16_t bitholeOffset;
// If the object contains boolean fields and the number of booleans is not divisible by 8, then
// there will also be a hole of 1-7 bits somewhere. bitholeOffset is the offset, in bits, of the
// first (most-significant) such missing bit. All subsequent (less-significant) bits within the
// same byte are also missing.
};
inline const ListDescriptor* Descriptor::asList() const {
CAPNPROTO_DEBUG_ASSERT(kind == Kind::LIST, "asList() called on Descriptor that isn't a list.");
return reinterpret_cast<const ListDescriptor*>(this);
}
inline const StructDescriptor* Descriptor::asStruct() const {
CAPNPROTO_DEBUG_ASSERT(
kind == Kind::STRUCT, "asStruct() called on Descriptor that isn't a struct.");
return reinterpret_cast<const StructDescriptor*>(this);
}
} // namespace internal
} // namespace capnproto
#endif // CAPNPROTO_DESCRIPTOR_H_
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "macros.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <exception>
namespace capnproto {
namespace internal {
class Exception: public std::exception {
// Exception thrown in case of fatal errors.
public:
Exception(const char* file, int line, const char* expectation, const char* message);
virtual ~Exception() noexcept;
const char* getFile() { return file; }
int getLine() { return line; }
const char* getExpectation() { return expectation; }
const char* getMessage() { return message; }
virtual const char* what();
private:
const char* file;
int line;
const char* expectation;
const char* message;
char* whatBuffer;
};
Exception::Exception(
const char* file, int line, const char* expectation, const char* message)
: file(file), line(line), expectation(expectation), message(message), whatBuffer(nullptr) {
fprintf(stderr, "Captain Proto debug assertion failed:\n %s:%d: %s\n %s",
file, line, expectation, message);
}
Exception::~Exception() noexcept {
delete [] whatBuffer;
}
const char* Exception::what() {
whatBuffer = new char[strlen(file) + strlen(expectation) + strlen(message) + 256];
sprintf(whatBuffer, "Captain Proto debug assertion failed:\n %s:%d: %s\n %s",
file, line, expectation, message);
return whatBuffer;
}
void assertionFailure(const char* file, int line, const char* expectation, const char* message) {
throw Exception(file, line, expectation, message);
}
} // namespace internal
} // namespace capnproto
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef CAPNPROTO_MACROS_H_
#define CAPNPROTO_MACROS_H_
namespace capnproto {
namespace internal {
#define CAPNPROTO_EXPECT_TRUE(condition) __builtin_expect(condition, true)
#define CAPNPROTO_EXPECT_FALSE(condition) __builtin_expect(condition, false)
// Branch prediction macros. Evaluates to the condition given, but also tells the compiler that we
// expect the condition to be true/false enough of the time that it's worth hard-coding branch
// prediction.
#define CAPNPROTO_ALWAYS_INLINE(prototype) inline prototype __attribute__((always_inline))
// Force a function to always be inlined. Apply only to the prototype, not to the definition.
void assertionFailure(const char* file, int line, const char* expectation, const char* message)
__attribute__((noreturn));
// CAPNPROTO_ASSERT is just like assert() except it avoids polluting the global namespace with an
// unqualified macro name and it throws an exception (derived from std::exception).
#ifdef NDEBUG
#define CAPNPROTO_DEBUG_ASSERT(condition, message)
#else
#define CAPNPROTO_DEBUG_ASSERT(condition, message) \
if (CAPNPROTO_EXPECT_TRUE(condition)); else ::capnproto::internal::assertionFailure(\
__FILE__, __LINE__, #condition, message)
#endif
#define CAPNPROTO_ASSERT(condition, message) \
::capnproto::internal::assertionFailure(__FILE__, __LINE__, #condition, message)
} // namespace internal
template <typename T, typename U>
T implicit_cast(U u) {
return u;
}
} // namespace capnproto
#endif // CAPNPROTO_MACROS_H_
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "wire-format.h"
#include "arena.h"
#include "descriptor.h"
namespace capnproto {
namespace internal {
namespace debug {
bool fieldIsStruct(const StructDescriptor* descriptor, int fieldNumber, int refIndex) {
return descriptor->fieldCount > fieldNumber &&
descriptor->fields[fieldNumber].size == FieldSize::REFERENCE &&
descriptor->fields[fieldNumber].offset == refIndex &&
descriptor->defaultReferences[refIndex]->kind == Descriptor::Kind::STRUCT;
}
bool fieldIsList(const StructDescriptor* descriptor, int fieldNumber, int refIndex) {
return descriptor->fieldCount > fieldNumber &&
descriptor->fields[fieldNumber].size == FieldSize::REFERENCE &&
descriptor->fields[fieldNumber].offset == refIndex &&
descriptor->defaultReferences[refIndex]->kind == Descriptor::Kind::LIST;
}
bool fieldIsData(const StructDescriptor* descriptor, int fieldNumber, int dataOffset, int bitSize) {
return descriptor->fieldCount > fieldNumber &&
descriptor->fields[fieldNumber].size != FieldSize::REFERENCE &&
sizeInBits(descriptor->fields[fieldNumber].size) == bitSize &&
descriptor->fields[fieldNumber].offset == dataOffset;
}
bool dataFieldInRange(const StructDescriptor* descriptor, uint32_t dataOffset, uint32_t size) {
return descriptor->dataSize * sizeof(uint64_t) >= (dataOffset * size);
}
bool bitFieldInRange(const StructDescriptor* descriptor, uint32_t offset) {
return descriptor->dataSize * sizeof(uint64_t) > offset / 64;
}
bool refFieldIsStruct(const StructDescriptor* descriptor, int refIndex) {
return descriptor->referenceCount > refIndex &&
descriptor->defaultReferences[refIndex]->kind == Descriptor::Kind::STRUCT;
}
bool refFieldIsList(const StructDescriptor* descriptor, int refIndex) {
return descriptor->referenceCount > refIndex &&
descriptor->defaultReferences[refIndex]->kind == Descriptor::Kind::LIST;
}
bool elementsAreStructs(const ListDescriptor* descriptor) {
return descriptor->elementSize == FieldSize::STRUCT;
}
bool elementsAreStructs(const ListDescriptor* descriptor, uint32_t wordSize) {
return descriptor->elementSize == FieldSize::STRUCT &&
descriptor->elementDescriptor->asStruct()->wordSize() == wordSize;
}
bool elementsAreLists(const ListDescriptor* descriptor) {
return descriptor->elementSize == FieldSize::REFERENCE;
}
bool elementsAreData(const ListDescriptor* descriptor, int bitSize) {
switch (descriptor->elementSize) {
case FieldSize::REFERENCE:
case FieldSize::KEY_REFERENCE:
case FieldSize::STRUCT:
return false;
default:
return true;
}
}
} // namespace debug
} // namespace internal
// =======================================================================================
struct WireReference {
// A reference, in exactly the format in which it appears on the wire.
// Copying and moving is not allowed because the offset would become wrong.
WireReference(const WireReference& other) = delete;
WireReference(WireReference&& other) = delete;
WireReference& operator=(const WireReference& other) = delete;
WireReference& operator=(WireReference&& other) = delete;
enum Tag {
STRUCT = 0,
LIST = 1,
CAPABILITY = 2,
FAR = 3
};
WireValue<uint32_t> offsetAndTag;
union {
struct {
WireValue<uint8_t> fieldCount;
WireValue<uint8_t> dataSize;
WireValue<uint8_t> refCount;
WireValue<uint8_t> reserved0;
} structRef;
// Also covers capabilities.
struct {
WireValue<uint32_t> elementSizeAndCount;
CAPNPROTO_ALWAYS_INLINE(internal::FieldSize elementSize() const) {
return static_cast<internal::FieldSize>(elementSizeAndCount.get() >> 29);
}
CAPNPROTO_ALWAYS_INLINE(uint32_t elementCount() const) {
return elementSizeAndCount.get() & 0x1fffffffu;
}
} listRef;
struct {
WireValue<uint32_t> segmentId;
} farRef;
};
CAPNPROTO_ALWAYS_INLINE(bool isNull() const) { return offsetAndTag.get() == 0; }
CAPNPROTO_ALWAYS_INLINE(uint32_t offset() const) { return offsetAndTag.get() & ~7; }
CAPNPROTO_ALWAYS_INLINE(int tag() const) { return offsetAndTag.get() & 7; }
CAPNPROTO_ALWAYS_INLINE(void setTagAndOffset(Tag tag, uint32_t offset)) {
CAPNPROTO_DEBUG_ASSERT((offset & 7) == 0, "Offsets must be word-aligned.");
offsetAndTag.set(offset | tag);
}
CAPNPROTO_ALWAYS_INLINE(void setStruct(
const internal::StructDescriptor* descriptor, uint32_t offset)) {
setTagAndOffset(STRUCT, offset);
structRef.fieldCount.set(descriptor->fieldCount);
structRef.dataSize.set(descriptor->dataSize);
structRef.refCount.set(descriptor->referenceCount);
structRef.reserved0.set(0);
}
CAPNPROTO_ALWAYS_INLINE(void setList(
const internal::ListDescriptor* descriptor, uint32_t elementCount, uint32_t offset)) {
setTagAndOffset(LIST, offset);
CAPNPROTO_DEBUG_ASSERT((elementCount >> 29) == 0, "Lists are limited to 2**29 elements.");
listRef.elementSizeAndCount.set(
(static_cast<uint32_t>(descriptor->elementSize) << 29) | elementCount);
}
CAPNPROTO_ALWAYS_INLINE(void setFar(uint32_t segmentId, uint32_t offset)) {
setTagAndOffset(FAR, offset);
farRef.segmentId.set(segmentId);
}
};
static_assert(sizeof(WireReference) == 8, "Layout of capnproto::WireReference is wrong.");
// =======================================================================================
template <typename T>
static CAPNPROTO_ALWAYS_INLINE(T divRoundingUp(T a, T b));
template <typename T>
static inline T divRoundingUp(T a, T b) {
return (a + b - 1) / b;
}
template <typename T>
static inline const void* offsetPtr(const void* ptr, int amount) {
return reinterpret_cast<const T*>(ptr) + amount;
}
template <typename T>
static inline void* offsetPtr(void* ptr, int amount) {
return reinterpret_cast<T*>(ptr) + amount;
}
struct WireHelpers {
static CAPNPROTO_ALWAYS_INLINE(void* allocate(
WireReference*& ref, Segment*& segment, uint32_t size)) {
void* ptr = segment->allocate(size);
if (ptr == nullptr) {
// Need to allocate in a new segment.
// Loop here just in case we ever make Segment::allocate() thread-safe -- in this case another
// thread could have grabbed the space between when we asked the arena for the segment and
// when we asked the segment to allocate space.
do {
segment = segment->getArena()->getSegmentWithAvailable(size + sizeof(WireReference));
ptr = segment->allocate(size + sizeof(WireReference));
} while (CAPNPROTO_EXPECT_FALSE(ptr == nullptr));
ref->setFar(segment->getSegmentId(), segment->getOffset(ptr));
ref = reinterpret_cast<WireReference*>(ptr);
// Allocated space follows new reference.
return ref + 1;
} else {
return ptr;
}
}
static CAPNPROTO_ALWAYS_INLINE(void followFars(WireReference*& ref, Segment*& segment)) {
if (ref->tag() == WireReference::FAR) {
segment = segment->getArena()->getWritableSegment(ref->farRef.segmentId.get());
ref = reinterpret_cast<WireReference*>(segment->getPtrUnchecked(ref->offset()));
}
}
static CAPNPROTO_ALWAYS_INLINE(bool followFars(
const WireReference*& ref, const Segment*& segment)) {
if (ref->tag() == WireReference::FAR) {
segment = segment->getArena()->getSegment(ref->farRef.segmentId.get());
ref = reinterpret_cast<const WireReference*>(
segment->getPtrChecked(ref->offset(), 0, sizeof(WireReference)));
return CAPNPROTO_EXPECT_TRUE(ref != nullptr);
} else {
return true;
}
}
static CAPNPROTO_ALWAYS_INLINE(bool isStructCompatible(
const internal::StructDescriptor* descriptor, const WireReference* ref)) {
if (ref->structRef.fieldCount.get() >= descriptor->fieldCount) {
// The incoming struct has all of the fields that we know about.
return ref->structRef.dataSize.get() >= descriptor->dataSize &&
ref->structRef.refCount.get() >= descriptor->referenceCount;
} else if (ref->structRef.fieldCount.get() > 0) {
// We know about more fields than the struct has, and the struct is non-empty.
const internal::FieldDescriptor* field =
&descriptor->fields[ref->structRef.fieldCount.get() - 1];
return ref->structRef.dataSize.get() >= field->requiredDataSize &&
ref->structRef.refCount.get() >= field->requiredReferenceSize;
} else {
// The incoming struct has no fields, so is necessarily compatible.
return true;
}
}
static CAPNPROTO_ALWAYS_INLINE(StructPtr initStructReference(
const internal::StructDescriptor* descriptor, WireReference* ref, Segment* segment)) {
if (ref->isNull()) {
// Calculate the size of the struct.
uint32_t size = (descriptor->dataSize + descriptor->referenceCount) * sizeof(uint64_t);
// Allocate space for the new struct.
void* ptr = allocate(ref, segment, size);
// Advance the pointer to point between the data and reference segments.
ptr = offsetPtr<uint64_t>(ptr, descriptor->dataSize);
// Initialize the reference.
ref->setStruct(descriptor, segment->getOffset(ptr));
// Build the StructPtr.
return StructPtr(descriptor, segment, ptr);
} else {
followFars(ref, segment);
CAPNPROTO_ASSERT(ref->tag() == WireReference::STRUCT,
"Called getStruct{Field,Element}() but existing reference is not a struct.");
CAPNPROTO_ASSERT(ref->structRef.fieldCount == fieldDescriptor->fieldCount,
"Trying to update struct with incorrect field count.");
CAPNPROTO_ASSERT(ref->structRef.dataSize == fieldDescriptor->dataSize,
"Trying to update struct with incorrect data size.");
CAPNPROTO_ASSERT(ref->structRef.refCount == fieldDescriptor->referenceCount,
"Trying to update struct with incorrect reference count.");
return StructPtr(descriptor, segment, segment->getPtrUnchecked(ref->offset()));
}
}
static CAPNPROTO_ALWAYS_INLINE(ListPtr initListReference(
const internal::ListDescriptor* descriptor, WireReference* ref,
Segment* segment, uint32_t elementCount)) {
if (descriptor->elementSize == internal::FieldSize::STRUCT) {
const internal::StructDescriptor* elementDescriptor =
descriptor->elementDescriptor->asStruct();
// Allocate the list, prefixed by a single WireReference.
WireReference* structRef = reinterpret_cast<WireReference*>(
allocate(ref, segment, sizeof(WireReference) +
elementDescriptor->wordSize() * elementCount * sizeof(uint64_t)));
void* ptr = offsetPtr<uint64_t>(structRef + 1, elementDescriptor->dataSize);
// Initialize the reference.
ref->setList(descriptor, elementCount, segment->getOffset(structRef));
// Initialize the struct reference.
structRef->setStruct(elementDescriptor, segment->getOffset(ptr));
// Build the ListPtr.
return ListPtr(descriptor, segment, ptr, elementCount);
} else {
// Calculate size of the list.
uint32_t size = divRoundingUp<uint32_t>(
static_cast<uint32_t>(sizeInBits(descriptor->elementSize)) * elementCount, 8);
// Allocate the list.
void* ptr = allocate(ref, segment, size);
// Initialize the reference.
ref->setList(descriptor, elementCount, segment->getOffset(ptr));
// Build the ListPtr.
return ListPtr(descriptor, segment, ptr, elementCount);
}
}
static CAPNPROTO_ALWAYS_INLINE(ListPtr getWritableListReference(
const internal::ListDescriptor* descriptor, WireReference* ref,
Segment* segment)) {
if (ref->isNull()) {
return ListPtr(descriptor, segment, nullptr, 0);
}
followFars(ref, segment);
CAPNPROTO_ASSERT(ref->tag() == WireReference::LIST,
"Called getList{Field,Element}() but existing reference is not a list.");
if (descriptor->elementSize == internal::FieldSize::STRUCT) {
WireReference* structRef = reinterpret_cast<WireReference*>(
segment->getPtrUnchecked(ref->offset()));
return ListPtr(descriptor, segment,
segment->getPtrUnchecked(structRef->offset()), ref->listRef.elementCount());
} else {
return ListPtr(descriptor, segment,
segment->getPtrUnchecked(ref->offset()), ref->listRef.elementCount());
}
}
static CAPNPROTO_ALWAYS_INLINE(StructReadPtr readStructReference(
const internal::StructDescriptor* descriptor, const WireReference* ref,
const Segment* segment, int recursionLimit)) {
do {
if (ref == nullptr || ref->isNull()) {
break;
}
if (CAPNPROTO_EXPECT_FALSE(recursionLimit == 0)) {
segment->getArena()->parseError(
"Message is too deeply-nested or contains cycles.");
break;
}
if (CAPNPROTO_EXPECT_FALSE(!followFars(ref, segment))) {
segment->getArena()->parseError(
"Message contains out-of-bounds far reference.");
break;
}
if (CAPNPROTO_EXPECT_FALSE(ref->tag() != WireReference::STRUCT)) {
segment->getArena()->parseError(
"Message contains non-struct reference where struct reference was expected.");
break;
}
if (CAPNPROTO_EXPECT_FALSE(!isStructCompatible(descriptor, ref))) {
segment->getArena()->parseError(
"Message contains struct that is too small for its field count.");
break;
}
const void* ptr = segment->getPtrChecked(ref->offset(),
ref->structRef.dataSize.get() * sizeof(uint8_t),
ref->structRef.refCount.get() * sizeof(WireReference));
if (CAPNPROTO_EXPECT_FALSE(ptr == nullptr)) {
segment->getArena()->parseError("Message contained out-of-bounds struct reference.");
break;
}
return StructReadPtr(descriptor, segment, ptr, descriptor->defaultData,
ref->structRef.fieldCount.get(), 0, recursionLimit - 1);
} while (false);
return StructReadPtr(descriptor, segment, nullptr, descriptor->defaultData, 0, 0, 0);
}
static CAPNPROTO_ALWAYS_INLINE(ListReadPtr readListReference(
const internal::ListDescriptor* descriptor, const WireReference* ref, const Segment* segment,
int recursionLimit)) {
do {
if (ref == nullptr || ref->isNull()) {
break;
}
if (CAPNPROTO_EXPECT_FALSE(recursionLimit == 0)) {
segment->getArena()->parseError(
"Message is too deeply-nested or contains cycles.");
break;
}
if (CAPNPROTO_EXPECT_FALSE(!followFars(ref, segment))) {
segment->getArena()->parseError(
"Message contains out-of-bounds far reference.");
break;
}
if (CAPNPROTO_EXPECT_FALSE(ref->tag() != WireReference::LIST)) {
segment->getArena()->parseError(
"Message contains non-list reference where list reference was expected.");
break;
}
if (ref->listRef.elementSize() == internal::FieldSize::STRUCT) {
// A struct list reference actually points to a struct reference which in turn points to the
// first struct in the list.
const void* ptrPtr =
segment->getPtrChecked(ref->offset(), 0, sizeof(WireReference));
if (CAPNPROTO_EXPECT_FALSE(ptrPtr == nullptr)) {
segment->getArena()->parseError(
"Message contains out-of-bounds list reference.");
break;
}
uint32_t size = ref->listRef.elementCount();
ref = reinterpret_cast<const WireReference*>(ptrPtr);
if (CAPNPROTO_EXPECT_FALSE(ref->tag() != WireReference::STRUCT)) {
segment->getArena()->parseError(
"Message contains struct list reference that does not point to a struct reference.");
break;
}
int step = (ref->structRef.dataSize.get() + ref->structRef.refCount.get()) *
sizeof(uint8_t);
const void* ptr = segment->getPtrChecked(ref->offset(),
ref->structRef.dataSize.get() * sizeof(uint8_t),
ref->structRef.refCount.get() * sizeof(WireReference) +
step * (size - 1));
if (CAPNPROTO_EXPECT_FALSE(ptr == nullptr)) {
segment->getArena()->parseError(
"Message contains out-of-bounds struct list reference.");
break;
}
// If a struct list was not expected, then presumably a non-struct list was upgraded to a
// struct list. We need to manipulate the pointer to point at the first field of the
// struct. Together with the "stepBits", this will allow the struct list to be accessed as
// if it were a primitive list without branching.
ptr = offsetPtr<uint8_t>(ptr, -byteOffsetForFieldZero(descriptor->elementSize));
// Check whether the size is compatible.
bool compatible = true;
switch (descriptor->elementSize) {
case internal::FieldSize::BIT:
case internal::FieldSize::BYTE:
case internal::FieldSize::TWO_BYTES:
case internal::FieldSize::FOUR_BYTES:
case internal::FieldSize::EIGHT_BYTES:
compatible = ref->structRef.dataSize.get() > 0;
break;
case internal::FieldSize::REFERENCE:
compatible = ref->structRef.refCount.get() > 0;
break;
case internal::FieldSize::KEY_REFERENCE:
compatible = ref->structRef.dataSize.get() > 0 &&
ref->structRef.refCount.get() > 0;
break;
case internal::FieldSize::STRUCT: {
compatible = isStructCompatible(descriptor->elementDescriptor->asStruct(), ref);
break;
}
}
if (CAPNPROTO_EXPECT_FALSE(!compatible)) {
segment->getArena()->parseError("A list had incompatible element type.");
break;
}
return ListReadPtr(descriptor, segment, ptr, size, step * 8,
ref->structRef.fieldCount.get(), recursionLimit - 1);
} else {
// The elements of the list are NOT structs.
int step = sizeInBits(ref->listRef.elementSize());
const void* ptr = segment->getPtrChecked(ref->offset(), 0,
divRoundingUp<uint64_t>(
implicit_cast<uint64_t>(ref->listRef.elementCount()) * step, 8));
if (CAPNPROTO_EXPECT_FALSE(ptr == nullptr)) {
segment->getArena()->parseError("Message contained out-of-bounds list reference.");
break;
}
if (descriptor->elementSize == ref->listRef.elementSize()) {
return ListReadPtr(descriptor, segment, ptr, ref->listRef.elementCount(),
sizeInBits(ref->listRef.elementSize()), 0, recursionLimit);
} else if (descriptor->elementSize == internal::FieldSize::STRUCT) {
// We were expecting a struct, but we received a list of some other type. Perhaps a
// non-struct list was recently upgraded to a struct list, but the sender is using the
// old version of the protocol. We need to verify that the struct's first field matches
// what the sender sent us.
const internal::StructDescriptor* elementDescriptor =
descriptor->elementDescriptor->asStruct();
if (CAPNPROTO_EXPECT_FALSE(
elementDescriptor->fieldCount == 0 ||
elementDescriptor->fields[0].size != ref->listRef.elementSize())) {
segment->getArena()->parseError("A list had incompatible element type.");
break;
}
// Adjust the pointer to point where we expect it for a struct.
ptr = offsetPtr<uint8_t>(ptr, byteOffsetForFieldZero(descriptor->elementSize));
return ListReadPtr(descriptor, segment, ptr, ref->listRef.elementCount(),
sizeInBits(ref->listRef.elementSize()), 1, recursionLimit);
} else {
segment->getArena()->parseError("A list had incompatible element type.");
break;
}
}
} while (false);
switch (descriptor->elementSize) {
case internal::FieldSize::REFERENCE:
case internal::FieldSize::KEY_REFERENCE:
case internal::FieldSize::STRUCT:
return ListReadPtr(descriptor, segment, nullptr, descriptor->defaultCount, 0, 0,
recursionLimit - 1);
default:
return ListReadPtr(descriptor, segment, descriptor->defaultData, descriptor->defaultCount,
internal::sizeInBits(descriptor->elementSize), 0, recursionLimit - 1);
}
}
};
// =======================================================================================
StructPtr StructPtr::getStructFieldInternal(int refIndex) const {
return WireHelpers::initStructReference(
descriptor->defaultReferences[refIndex]->asStruct(),
reinterpret_cast<WireReference*>(ptr) + refIndex, segment);
}
ListPtr StructPtr::initListFieldInternal(int refIndex, uint32_t elementCount) const {
return WireHelpers::initListReference(
descriptor->defaultReferences[refIndex]->asList(),
reinterpret_cast<WireReference*>(ptr) + refIndex, segment, elementCount);
}
ListPtr StructPtr::getListFieldInternal(int refIndex) const {
return WireHelpers::getWritableListReference(
descriptor->defaultReferences[refIndex]->asList(),
reinterpret_cast<WireReference*>(ptr) + refIndex, segment);
}
StructReadPtr StructPtr::asReadPtr() const {
return StructReadPtr(descriptor, segment, ptr, descriptor->defaultData,
descriptor->fieldCount, 0, 1 << 30);
}
StructReadPtr StructReadPtr::getStructFieldInternal(int fieldNumber, unsigned int refIndex) const {
return WireHelpers::readStructReference(
descriptor->defaultReferences[refIndex]->asStruct(),
fieldNumber < fieldCount
? reinterpret_cast<const WireReference*>(ptr) + refIndex
: nullptr,
segment, recursionLimit);
}
ListReadPtr StructReadPtr::getListFieldInternal(int fieldNumber, unsigned int refIndex) const {
return WireHelpers::readListReference(
descriptor->defaultReferences[refIndex]->asList(),
fieldNumber < fieldCount
? reinterpret_cast<const WireReference*>(ptr) + refIndex
: nullptr,
segment, recursionLimit);
}
StructPtr ListPtr::getStructElementInternal(unsigned int index, uint32_t elementWordSize) const {
return StructPtr(
descriptor->elementDescriptor->asStruct(), segment,
offsetPtr<uint64_t>(ptr, elementWordSize * index));
}
ListPtr ListPtr::initListElementInternal(unsigned int index, uint32_t size) const {
return WireHelpers::initListReference(
descriptor->elementDescriptor->asList(),
reinterpret_cast<WireReference*>(ptr) + index,
segment, size);
}
ListPtr ListPtr::getListElementInternal(unsigned int index) const {
return WireHelpers::getWritableListReference(
descriptor->elementDescriptor->asList(),
reinterpret_cast<WireReference*>(ptr) + index,
segment);
}
ListReadPtr ListPtr::asReadPtr() const {
return ListReadPtr(descriptor, segment, ptr, elementCount,
internal::sizeInBits(descriptor->elementSize),
descriptor->elementSize == internal::FieldSize::STRUCT
? descriptor->elementDescriptor->asStruct()->fieldCount : 0,
1 << 30);
}
StructReadPtr ListReadPtr::getStructElementInternal(unsigned int index) const {
const internal::StructDescriptor* elementDescriptor;
if (ptr == nullptr) {
elementDescriptor = descriptor->defaultReferences()[index]->asStruct();
} else {
elementDescriptor = descriptor->elementDescriptor->asStruct();
if (CAPNPROTO_EXPECT_FALSE(recursionLimit == 0)) {
segment->getArena()->parseError(
"Message is too deeply-nested or contains cycles.");
} else {
uint64_t indexBit = static_cast<uint64_t>(index) * stepBits;
return StructReadPtr(
elementDescriptor, segment, offsetPtr<uint8_t>(ptr, indexBit / 8),
descriptor->defaultData, structFieldCount, indexBit % 8, recursionLimit - 1);
}
}
return StructReadPtr(elementDescriptor, segment, nullptr, descriptor->defaultData, 0, 0, 0);
}
ListReadPtr ListReadPtr::getListElementInternal(unsigned int index, uint32_t size) const {
if (ptr == nullptr) {
return WireHelpers::readListReference(
descriptor->defaultReferences()[index]->asList(),
nullptr, segment, recursionLimit);
} else {
return WireHelpers::readListReference(
descriptor->elementDescriptor->asList(),
reinterpret_cast<const WireReference*>(offsetPtr<uint64_t>(ptr, index * (stepBits / 64))),
segment, recursionLimit);
}
}
} // namespace capnproto
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef CAPNPROTO_WIRE_FORMAT_H_
#define CAPNPROTO_WIRE_FORMAT_H_
#include <inttypes.h>
#include "macros.h"
namespace capnproto {
class Arena;
class Segment;
class StructPtr;
class StructReadPtr;
class ListPtr;
class ListReadPtr;
class Capability;
struct WireReference;
struct WireHelpers;
namespace internal {
class Descriptor;
class StructDescriptor;
class ListDescriptor;
namespace debug {
// These functions are only called inside debug asserts. They are defined out-of-line so that
// we don't have to #include descriptor.h from here.
bool fieldIsStruct(const StructDescriptor* descriptor, int fieldNumber, int refIndex);
bool fieldIsList(const StructDescriptor* descriptor, int fieldNumber, int refIndex);
bool fieldIsData(const StructDescriptor* descriptor, int fieldNumber, int dataOffset,
int bitSize);
bool dataFieldInRange(const StructDescriptor* descriptor, uint32_t dataOffset, uint32_t size);
bool bitFieldInRange(const StructDescriptor* descriptor, uint32_t offset);
bool refFieldIsStruct(const StructDescriptor* descriptor, int refIndex);
bool refFieldIsList(const StructDescriptor* descriptor, int refIndex);
bool elementsAreStructs(const ListDescriptor* descriptor);
bool elementsAreStructs(const ListDescriptor* descriptor, uint32_t wordSize);
bool elementsAreLists(const ListDescriptor* descriptor);
bool elementsAreData(const ListDescriptor* descriptor, int bitSize);
} // namespace debug
} // namespace internal
// -------------------------------------------------------------------
class StructPtr {
public:
template <typename T>
inline T getDataField(unsigned int offset) const;
// Get the data field value of the given type at the given offset. The offset is measured in
// multiples of the field size, determined by the type.
template <typename T>
inline void setDataField(unsigned int offset, T value) const;
// Set the data field value at the given offset. Be careful to use the correct type.
CAPNPROTO_ALWAYS_INLINE(StructPtr getStructField(int refIndex) const);
// Get the struct field at the given index in the reference segment. Allocates space for the
// struct if necessary.
CAPNPROTO_ALWAYS_INLINE(ListPtr initListField(int refIndex, uint32_t size) const);
// Allocate a new list of the given size for the field at the given index in the reference
// segment, and return a pointer to it.
CAPNPROTO_ALWAYS_INLINE(ListPtr getListField(int refIndex) const);
// Get the already-allocated list field for the given reference index. Returns an empty list --
// NOT necessarily the default value -- if the field is not initialized.
StructReadPtr asReadPtr() const;
// Get a StructReadPtr pointing at the same memory.
private:
const internal::StructDescriptor* descriptor; // Descriptor for the struct.
Segment* segment; // Memory segment in which the struct resides.
void* ptr; // Pointer to the location between the struct's data and reference segments.
inline StructPtr(const internal::StructDescriptor* descriptor, Segment* segment, void* ptr)
: descriptor(descriptor), segment(segment), ptr(ptr) {}
StructPtr getStructFieldInternal(int refIndex) const;
ListPtr initListFieldInternal(int refIndex, uint32_t size) const;
ListPtr getListFieldInternal(int refIndex) const;
// The public methods are inlined and simply wrap these "Internal" methods after doing debug
// asserts. This way, debugging is enabled by the caller's compiler flags rather than
// libcapnproto's debug flags.
friend class ListPtr;
friend struct WireHelpers;
};
class StructReadPtr {
public:
template <typename T>
inline T getDataField(int fieldNumber, unsigned int offset) const;
// Get the data field value of the given type at the given offset. The offset is measured in
// multiples of the field size, determined by the type.
CAPNPROTO_ALWAYS_INLINE(
StructReadPtr getStructField(int fieldNumber, unsigned int refIndex) const);
// Get the struct field at the given index in the reference segment, or the default value if not
// initialized.
CAPNPROTO_ALWAYS_INLINE(ListReadPtr getListField(int fieldNumber, unsigned int refIndex) const);
// Get the list field at the given index in the reference segment, or the default value if not
// initialized.
private:
const internal::StructDescriptor* descriptor; // Descriptor for the struct.
const Segment* segment; // Memory segment in which the struct resides.
const void* ptr[2];
// ptr[0] points to the location between the struct's data and reference segments.
// ptr[1] points to the end of the *default* data segment.
// We put these in an array so we can choose between them without a branch.
int fieldCount; // Number of fields the struct is reported to have.
int bit0Offset;
// A special hack: When accessing a boolean with field number zero, pretend its offset is this
// instead of the usual zero. This is needed to allow a boolean list to be upgraded to a list
// of structs.
int recursionLimit;
// Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
// Once this reaches zero, further pointers will be pruned.
inline StructReadPtr(const internal::StructDescriptor* descriptor, const Segment* segment,
const void* ptr, const void* defaultData, int fieldCount, int bit0Offset,
int recursionLimit)
: descriptor(descriptor), segment(segment), ptr{ptr, defaultData}, fieldCount(fieldCount),
bit0Offset(bit0Offset), recursionLimit(recursionLimit) {}
StructReadPtr getStructFieldInternal(int fieldNumber, unsigned int refIndex) const;
ListReadPtr getListFieldInternal(int fieldNumber, unsigned int refIndex) const;
// The public methods are inlined and simply wrap these "Internal" methods after doing debug
// asserts. This way, debugging is enabled by the caller's compiler flags rather than
// libcapnproto's debug flags.
friend class ListReadPtr;
friend class StructPtr;
friend struct WireHelpers;
};
// -------------------------------------------------------------------
class ListPtr {
public:
inline uint32_t size();
// The number of elements in the list.
template <typename T>
CAPNPROTO_ALWAYS_INLINE(T getDataElement(unsigned int index) const);
// Get the element of the given type at the given index.
template <typename T>
CAPNPROTO_ALWAYS_INLINE(void setDataElement(unsigned int index, T value) const);
// Set the element at the given index. Be careful to use the correct type.
CAPNPROTO_ALWAYS_INLINE(
StructPtr getStructElement(unsigned int index, uint32_t elementWordSize) const);
// Get the struct element at the given index. elementWordSize is the size, in 64-bit words, of
// each element.
CAPNPROTO_ALWAYS_INLINE(ListPtr initListElement(unsigned int index, uint32_t size) const);
// Create a new list element of the given size at the given index.
CAPNPROTO_ALWAYS_INLINE(ListPtr getListElement(unsigned int index) const);
// Get the existing list element at the given index.
ListReadPtr asReadPtr() const;
// Get a ListReadPtr pointing at the same memory.
private:
const internal::ListDescriptor* descriptor; // Descriptor for the list.
Segment* segment; // Memory segment in which the list resides.
void* ptr; // Pointer to the beginning of the list.
uint32_t elementCount; // Number of elements in the list.
inline ListPtr(const internal::ListDescriptor* descriptor, Segment* segment,
void* ptr, uint32_t size)
: descriptor(descriptor), segment(segment), ptr(ptr), elementCount(size) {}
StructPtr getStructElementInternal(unsigned int index, uint32_t elementWordSize) const;
ListPtr initListElementInternal(unsigned int index, uint32_t size) const;
ListPtr getListElementInternal(unsigned int index) const;
// The public methods are inlined and simply wrap these "Internal" methods after doing debug
// asserts. This way, debugging is enabled by the caller's compiler flags rather than
// libcapnproto's debug flags.
friend class StructPtr;
friend struct WireHelpers;
};
class ListReadPtr {
public:
inline uint32_t size();
// The number of elements in the list.
template <typename T>
CAPNPROTO_ALWAYS_INLINE(T getDataElement(unsigned int index) const);
// Get the element of the given type at the given index.
CAPNPROTO_ALWAYS_INLINE(StructReadPtr getStructElement(unsigned int index) const);
// Get the struct element at the given index.
CAPNPROTO_ALWAYS_INLINE(ListReadPtr getListElement(unsigned int index, uint32_t size) const);
// Get the list element at the given index.
private:
const internal::ListDescriptor* descriptor; // Descriptor for the list.
const Segment* segment; // Memory segment in which the list resides.
const void* ptr;
// Pointer to the data. If NULL, use defaultReferences. (Never NULL for data lists.)
uint32_t elementCount; // Number of elements in the list.
unsigned int stepBits;
// The distance between elements, in bits. This is usually the element size, but can be larger
// if the sender upgraded a data list to a struct list. It will always be aligned properly for
// the type. Unsigned so that division by a constant power of 2 is efficient.
int structFieldCount;
// If the elements are structs, the number of fields in each struct.
int recursionLimit;
// Limits the depth of message structures to guard against stack-overflow-based DoS attacks.
// Once this reaches zero, further pointers will be pruned.
inline ListReadPtr(const internal::ListDescriptor* descriptor, const Segment* segment,
const void* ptr, uint32_t size, int stepBits, int structFieldCount,
int recursionLimit)
: descriptor(descriptor), segment(segment), ptr(ptr), elementCount(size), stepBits(stepBits),
structFieldCount(structFieldCount), recursionLimit(recursionLimit) {}
StructReadPtr getStructElementInternal(unsigned int index) const;
ListReadPtr getListElementInternal(unsigned int index, uint32_t size) const;
// The public methods are inlined and simply wrap these "Internal" methods after doing debug
// asserts. This way, debugging is enabled by the caller's compiler flags rather than
// libcapnproto's debug flags.
friend class StructReadPtr;
friend class ListPtr;
friend struct WireHelpers;
};
// =======================================================================================
// Internal implementation details...
template <typename T>
class WireValue {
// Wraps a primitive value as it appears on the wire. Namely, values are little-endian on the
// wire, because little-endian is the most common endianness in modern CPUs.
//
// TODO: On big-endian systems, inject byte-swapping here. Most big-endian CPUs implement
// dedicated instructions for this, so use those rather than writing a bunch of shifts and
// masks. Note that GCC has e.g. __builtin__bswap32() for this.
//
// Note: In general, code that depends cares about byte ordering is bad. See:
// http://commandcenter.blogspot.com/2012/04/byte-order-fallacy.html
// Cap'n Proto is special because it is essentially doing compiler-like things, fussing over
// allocation and layout of memory, in order to squeeze out every last drop of performance.
public:
CAPNPROTO_ALWAYS_INLINE(WireValue()) {}
CAPNPROTO_ALWAYS_INLINE(WireValue(T value)): value(value) {}
CAPNPROTO_ALWAYS_INLINE(T get() const) { return value; }
CAPNPROTO_ALWAYS_INLINE(void set(T newValue)) { value = newValue; }
private:
T value;
};
// -------------------------------------------------------------------
template <typename T>
inline T StructPtr::getDataField(unsigned int offset) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::dataFieldInRange(descriptor, offset, sizeof(T)),
"StructPtr::getDataField() type mismatch.");
return reinterpret_cast<WireValue<T>*>(ptr)[-offset].get();
}
template <>
inline bool StructPtr::getDataField<bool>(unsigned int offset) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::bitFieldInRange(descriptor, offset),
"StructPtr::getDataField<bool>() type mismatch.");
uint8_t byte = *(reinterpret_cast<uint8_t*>(ptr) - (offset / 8) - 1);
return (byte & (1 << (offset % 8))) != 0;
}
template <typename T>
inline void StructPtr::setDataField(unsigned int offset, T value) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::dataFieldInRange(descriptor, offset, sizeof(T)),
"StructPtr::setDataField() type mismatch.");
reinterpret_cast<WireValue<T>*>(ptr)[-offset].set(value);
}
template <>
inline void StructPtr::setDataField<bool>(unsigned int offset, bool value) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::bitFieldInRange(descriptor, offset),
"StructPtr::setDataField<bool>() type mismatch.");
uint8_t* byte = reinterpret_cast<uint8_t*>(ptr) - (offset / 8) - 1;
*byte = (*byte & ~(1 << (offset % 8)))
| (static_cast<uint8_t>(value) << (offset % 8));
}
inline StructPtr StructPtr::getStructField(int refIndex) const {
CAPNPROTO_DEBUG_ASSERT(internal::debug::refFieldIsStruct(descriptor, refIndex),
"StructPtr::getStructField() type mismatch.");
return getStructFieldInternal(refIndex);
}
inline ListPtr StructPtr::initListField(int refIndex, uint32_t elementCount) const {
CAPNPROTO_DEBUG_ASSERT(internal::debug::refFieldIsList(descriptor, refIndex),
"StructPtr::initListField() type mismatch.");
return initListFieldInternal(refIndex, elementCount);
}
inline ListPtr StructPtr::getListField(int refIndex) const {
CAPNPROTO_DEBUG_ASSERT(internal::debug::refFieldIsList(descriptor, refIndex),
"StructPtr::initListField() type mismatch.");
return getListFieldInternal(refIndex);
}
// -------------------------------------------------------------------
template <typename T>
T StructReadPtr::getDataField(int fieldNumber, unsigned int offset) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::fieldIsData(descriptor, fieldNumber, offset, sizeof(T) * 8),
"StructReadPtr::getDataField() type mismatch.");
const void* dataPtr = ptr[fieldNumber >= fieldCount];
return reinterpret_cast<WireValue<T>*>(dataPtr)[-offset].get();
}
template <>
inline bool StructReadPtr::getDataField<bool>(int fieldNumber, unsigned int offset) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::fieldIsData(descriptor, fieldNumber, offset, 1),
"StructReadPtr::getDataField<bool>() type mismatch.");
// This branch should always be optimized away when inlining.
if (offset == 0) offset = bit0Offset;
const void* dataPtr = ptr[fieldNumber >= fieldCount];
uint8_t byte = *(reinterpret_cast<const uint8_t*>(dataPtr) - (offset / 8) - 1);
return (byte & (1 << (offset % 8))) != 0;
}
inline StructReadPtr StructReadPtr::getStructField(int fieldNumber, unsigned int refIndex) const {
CAPNPROTO_DEBUG_ASSERT(internal::debug::fieldIsStruct(descriptor, fieldNumber, refIndex),
"StructReadPtr::getStructField() type mismatch.");
return getStructFieldInternal(fieldNumber, refIndex);
}
inline ListReadPtr StructReadPtr::getListField(int fieldNumber, unsigned int refIndex) const {
CAPNPROTO_DEBUG_ASSERT(internal::debug::fieldIsList(descriptor, fieldNumber, refIndex),
"StructReadPtr::getListField() type mismatch.");
return getListFieldInternal(fieldNumber, refIndex);
}
// -------------------------------------------------------------------
inline uint32_t ListPtr::size() { return elementCount; }
template <typename T>
inline T ListPtr::getDataElement(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, sizeof(T) * 8),
"ListPtr::getDataElement() type mismatch.");
return reinterpret_cast<WireValue<T>*>(ptr)[index].get();
}
template <>
inline bool ListPtr::getDataElement<bool>(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, 1),
"ListPtr::getDataElement<bool>() type mismatch.");
uint8_t byte = *(reinterpret_cast<uint8_t*>(ptr) + (index / 8));
return (byte & (1 << (index % 8))) != 0;
}
template <typename T>
inline void ListPtr::setDataElement(unsigned int index, T value) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, sizeof(T) * 8),
"ListPtr::setDataElement() type mismatch.");
reinterpret_cast<WireValue<T>*>(ptr)[index].set(value);
}
template <>
inline void ListPtr::setDataElement<bool>(unsigned int index, bool value) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, 1),
"ListPtr::setDataElement<bool>() type mismatch.");
uint8_t* byte = reinterpret_cast<uint8_t*>(ptr) + (index / 8);
*byte = (*byte & ~(1 << (index % 8)))
| (static_cast<uint8_t>(value) << (index % 8));
}
inline StructPtr ListPtr::getStructElement(unsigned int index, uint32_t elementWordSize) const {
CAPNPROTO_DEBUG_ASSERT(index < elementCount, "List index out of range.");
CAPNPROTO_DEBUG_ASSERT(internal::debug::elementsAreStructs(descriptor, elementWordSize),
"ListPtr::getStructElement() type mismatch.");
return getStructElementInternal(index, elementWordSize);
}
inline ListPtr ListPtr::initListElement(unsigned int index, uint32_t size) const {
CAPNPROTO_DEBUG_ASSERT(index < elementCount, "List index out of range.");
CAPNPROTO_DEBUG_ASSERT(internal::debug::elementsAreLists(descriptor),
"ListPtr::initListElement() type mismatch.");
return initListElementInternal(index, size);
}
inline ListPtr ListPtr::getListElement(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(index < elementCount, "List index out of range.");
CAPNPROTO_DEBUG_ASSERT(internal::debug::elementsAreLists(descriptor),
"ListPtr::getListElement() type mismatch.");
return getListElementInternal(index);
}
// -------------------------------------------------------------------
inline uint32_t ListReadPtr::size() { return elementCount; }
template <typename T>
inline T ListReadPtr::getDataElement(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, sizeof(T) * 8),
"ListReadPtr::getDataElement() type mismatch.");
return *reinterpret_cast<const T*>(
reinterpret_cast<const uint8_t*>(ptr) + index * (stepBits / 8));
}
template <>
inline bool ListReadPtr::getDataElement<bool>(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(
internal::debug::elementsAreData(descriptor, 1),
"ListReadPtr::getDataElement<bool>() type mismatch.");
unsigned int bitIndex = index * stepBits;
uint8_t byte = *(reinterpret_cast<const uint8_t*>(ptr) + (bitIndex / 8));
return (byte & (1 << (bitIndex % 8))) != 0;
}
inline StructReadPtr ListReadPtr::getStructElement(unsigned int index) const {
CAPNPROTO_DEBUG_ASSERT(index < elementCount, "List index out of range.");
CAPNPROTO_DEBUG_ASSERT(internal::debug::elementsAreStructs(descriptor),
"ListReadPtr::getStructElement() type mismatch.");
return getStructElementInternal(index);
}
inline ListReadPtr ListReadPtr::getListElement(unsigned int index, uint32_t size) const {
CAPNPROTO_DEBUG_ASSERT(index < elementCount, "List index out of range.");
CAPNPROTO_DEBUG_ASSERT(internal::debug::elementsAreLists(descriptor),
"ListReadPtr::getListElement() type mismatch.");
return getListElementInternal(index, size);
}
} // namespace capnproto
#endif // CAPNPROTO_WIRE_FORMAT_H_
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment