Commit dec6bafa authored by Kenton Varda's avatar Kenton Varda Committed by GitHub

Merge pull request #437 from sandstorm-io/overflow-safe-2017

Actually implement integer overflow guards
parents 0c93d0ae 34102ec9
......@@ -45,7 +45,7 @@ kj::Own<ClientHook> AnyPointer::Reader::getPipelinedCap(
break;
case PipelineOp::Type::GET_POINTER_FIELD:
pointer = pointer.getStruct(nullptr).getPointerField(op.pointerIndex * POINTERS);
pointer = pointer.getStruct(nullptr).getPointerField(bounded(op.pointerIndex) * POINTERS);
break;
}
}
......
......@@ -208,9 +208,9 @@ struct AnyPointer {
// Note: Does not accept INLINE_COMPOSITE for elementSize.
inline List<AnyStruct>::Builder initAsListOfAnyStruct(
uint dataWordCount, uint pointerCount, uint elementCount);
uint16_t dataWordCount, uint16_t pointerCount, uint elementCount);
inline AnyStruct::Builder initAsAnyStruct(uint dataWordCount, uint pointerCount);
inline AnyStruct::Builder initAsAnyStruct(uint16_t dataWordCount, uint16_t pointerCount);
template <typename T>
inline void setAs(ReaderFor<T> value);
......@@ -398,10 +398,10 @@ struct List<AnyPointer, Kind::OTHER> {
inline Reader(): reader(ElementSize::POINTER) {}
inline explicit Reader(_::ListReader reader): reader(reader) {}
inline uint size() const { return reader.size() / ELEMENTS; }
inline uint size() const { return unbound(reader.size() / ELEMENTS); }
inline AnyPointer::Reader operator[](uint index) const {
KJ_IREQUIRE(index < size());
return AnyPointer::Reader(reader.getPointerElement(index * ELEMENTS));
return AnyPointer::Reader(reader.getPointerElement(bounded(index) * ELEMENTS));
}
typedef _::IndexingIterator<const Reader, typename AnyPointer::Reader> Iterator;
......@@ -430,10 +430,10 @@ struct List<AnyPointer, Kind::OTHER> {
inline operator Reader() const { return Reader(builder.asReader()); }
inline Reader asReader() const { return Reader(builder.asReader()); }
inline uint size() const { return builder.size() / ELEMENTS; }
inline uint size() const { return unbound(builder.size() / ELEMENTS); }
inline AnyPointer::Builder operator[](uint index) {
KJ_IREQUIRE(index < size());
return AnyPointer::Builder(builder.getPointerElement(index * ELEMENTS));
return AnyPointer::Builder(builder.getPointerElement(bounded(index) * ELEMENTS));
}
typedef _::IndexingIterator<Builder, typename AnyPointer::Builder> Iterator;
......@@ -563,10 +563,10 @@ public:
inline Reader(): reader(ElementSize::INLINE_COMPOSITE) {}
inline explicit Reader(_::ListReader reader): reader(reader) {}
inline uint size() const { return reader.size() / ELEMENTS; }
inline uint size() const { return unbound(reader.size() / ELEMENTS); }
inline AnyStruct::Reader operator[](uint index) const {
KJ_IREQUIRE(index < size());
return AnyStruct::Reader(reader.getStructElement(index * ELEMENTS));
return AnyStruct::Reader(reader.getStructElement(bounded(index) * ELEMENTS));
}
typedef _::IndexingIterator<const Reader, typename AnyStruct::Reader> Iterator;
......@@ -595,10 +595,10 @@ public:
inline operator Reader() const { return Reader(builder.asReader()); }
inline Reader asReader() const { return Reader(builder.asReader()); }
inline uint size() const { return builder.size() / ELEMENTS; }
inline uint size() const { return unbound(builder.size() / ELEMENTS); }
inline AnyStruct::Builder operator[](uint index) {
KJ_IREQUIRE(index < size());
return AnyStruct::Builder(builder.getStructElement(index * ELEMENTS));
return AnyStruct::Builder(builder.getStructElement(bounded(index) * ELEMENTS));
}
typedef _::IndexingIterator<Builder, typename AnyStruct::Builder> Iterator;
......@@ -628,7 +628,7 @@ public:
#endif
inline ElementSize getElementSize() { return _reader.getElementSize(); }
inline uint size() { return _reader.size() / ELEMENTS; }
inline uint size() { return unbound(_reader.size() / ELEMENTS); }
inline kj::ArrayPtr<const byte> getRawBytes() { return _reader.asRawBytes(); }
......@@ -664,7 +664,7 @@ public:
#endif
inline ElementSize getElementSize() { return _builder.getElementSize(); }
inline uint size() { return _builder.size() / ELEMENTS; }
inline uint size() { return unbound(_builder.size() / ELEMENTS); }
Equality equals(AnyList::Reader right);
inline bool operator==(AnyList::Reader right) {
......@@ -781,18 +781,21 @@ inline BuilderFor<T> AnyPointer::Builder::initAs(uint elementCount) {
inline AnyList::Builder AnyPointer::Builder::initAsAnyList(
ElementSize elementSize, uint elementCount) {
return AnyList::Builder(builder.initList(elementSize, elementCount * ELEMENTS));
return AnyList::Builder(builder.initList(elementSize, bounded(elementCount) * ELEMENTS));
}
inline List<AnyStruct>::Builder AnyPointer::Builder::initAsListOfAnyStruct(
uint dataWordCount, uint pointerCount, uint elementCount) {
return List<AnyStruct>::Builder(builder.initStructList(elementCount * ELEMENTS,
_::StructSize(dataWordCount * WORDS, pointerCount * POINTERS)));
uint16_t dataWordCount, uint16_t pointerCount, uint elementCount) {
return List<AnyStruct>::Builder(builder.initStructList(bounded(elementCount) * ELEMENTS,
_::StructSize(bounded(dataWordCount) * WORDS,
bounded(pointerCount) * POINTERS)));
}
inline AnyStruct::Builder AnyPointer::Builder::initAsAnyStruct(uint dataWordCount, uint pointerCount) {
inline AnyStruct::Builder AnyPointer::Builder::initAsAnyStruct(
uint16_t dataWordCount, uint16_t pointerCount) {
return AnyStruct::Builder(builder.initStruct(
_::StructSize(dataWordCount * WORDS, pointerCount * POINTERS)));
_::StructSize(bounded(dataWordCount) * WORDS,
bounded(pointerCount) * POINTERS)));
}
template <typename T>
......@@ -960,15 +963,16 @@ struct PointerHelpers<AnyStruct, Kind::OTHER> {
PointerBuilder builder, const word* defaultValue = nullptr) {
// TODO(someday): Allow specifying the size somehow?
return AnyStruct::Builder(builder.getStruct(
_::StructSize(0 * WORDS, 0 * POINTERS), defaultValue));
_::StructSize(ZERO * WORDS, ZERO * POINTERS), defaultValue));
}
static inline void set(PointerBuilder builder, AnyStruct::Reader value) {
builder.setStruct(value._reader);
}
static inline AnyStruct::Builder init(
PointerBuilder builder, uint dataWordCount, uint pointerCount) {
PointerBuilder builder, uint16_t dataWordCount, uint16_t pointerCount) {
return AnyStruct::Builder(builder.initStruct(
StructSize(dataWordCount * WORDS, pointerCount * POINTERS)));
StructSize(bounded(dataWordCount) * WORDS,
bounded(pointerCount) * POINTERS)));
}
// TODO(soon): implement these
......@@ -991,12 +995,15 @@ struct PointerHelpers<AnyList, Kind::OTHER> {
}
static inline AnyList::Builder init(
PointerBuilder builder, ElementSize elementSize, uint elementCount) {
return AnyList::Builder(builder.initList(elementSize, elementCount * ELEMENTS));
return AnyList::Builder(builder.initList(
elementSize, bounded(elementCount) * ELEMENTS));
}
static inline AnyList::Builder init(
PointerBuilder builder, uint dataWordCount, uint pointerCount, uint elementCount) {
PointerBuilder builder, uint16_t dataWordCount, uint16_t pointerCount, uint elementCount) {
return AnyList::Builder(builder.initStructList(
elementCount * ELEMENTS, StructSize(dataWordCount * WORDS, pointerCount * POINTERS)));
bounded(elementCount) * ELEMENTS,
StructSize(bounded(dataWordCount) * WORDS,
bounded(pointerCount) * POINTERS)));
}
// TODO(soon): implement these
......@@ -1007,13 +1014,13 @@ struct PointerHelpers<AnyList, Kind::OTHER> {
template <>
struct OrphanGetImpl<AnyStruct, Kind::OTHER> {
static inline AnyStruct::Builder apply(_::OrphanBuilder& builder) {
return AnyStruct::Builder(builder.asStruct(_::StructSize(0 * WORDS, 0 * POINTERS)));
return AnyStruct::Builder(builder.asStruct(_::StructSize(ZERO * WORDS, ZERO * POINTERS)));
}
static inline AnyStruct::Reader applyReader(const _::OrphanBuilder& builder) {
return AnyStruct::Reader(builder.asStructReader(_::StructSize(0 * WORDS, 0 * POINTERS)));
return AnyStruct::Reader(builder.asStructReader(_::StructSize(ZERO * WORDS, ZERO * POINTERS)));
}
static inline void truncateListOf(_::OrphanBuilder& builder, ElementCount size) {
builder.truncate(size, _::StructSize(0 * WORDS, 0 * POINTERS));
builder.truncate(size, _::StructSize(ZERO * WORDS, ZERO * POINTERS));
}
};
......
......@@ -42,7 +42,7 @@ void ReadLimiter::unread(WordCount64 amount) {
// the limit value was not updated correctly for one or more reads, and therefore unread() could
// overflow it even if it is only unreading bytes that were actually read.
uint64_t oldValue = limit;
uint64_t newValue = oldValue + amount / WORDS;
uint64_t newValue = oldValue + unbound(amount / WORDS);
if (newValue > oldValue) {
limit = newValue;
}
......@@ -57,10 +57,24 @@ void SegmentBuilder::throwNotWritable() {
// =======================================================================================
ReaderArena::ReaderArena(MessageReader* message)
static SegmentWordCount verifySegmentSize(size_t size) {
auto gsize = bounded(size) * WORDS;
return assertMaxBits<SEGMENT_WORD_COUNT_BITS>(gsize, [&]() {
KJ_FAIL_REQUIRE("segment is too large", size);
});
}
inline ReaderArena::ReaderArena(MessageReader* message, const word* firstSegment,
SegmentWordCount firstSegmentSize)
: message(message),
readLimiter(message->getOptions().traversalLimitInWords * WORDS),
segment0(this, SegmentId(0), message->getSegment(0), &readLimiter) {}
readLimiter(bounded(message->getOptions().traversalLimitInWords) * WORDS),
segment0(this, SegmentId(0), firstSegment, firstSegmentSize, &readLimiter) {}
inline ReaderArena::ReaderArena(MessageReader* message, kj::ArrayPtr<const word> firstSegment)
: ReaderArena(message, firstSegment.begin(), verifySegmentSize(firstSegment.size())) {}
ReaderArena::ReaderArena(MessageReader* message)
: ReaderArena(message, message->getSegment(0)) {}
ReaderArena::~ReaderArena() noexcept(false) {}
......@@ -89,6 +103,8 @@ SegmentReader* ReaderArena::tryGetSegment(SegmentId id) {
return nullptr;
}
SegmentWordCount newSegmentSize = verifySegmentSize(newSegment.size());
if (*lock == nullptr) {
// OK, the segment exists, so allocate the map.
auto s = kj::heap<SegmentMap>();
......@@ -96,7 +112,8 @@ SegmentReader* ReaderArena::tryGetSegment(SegmentId id) {
*lock = kj::mv(s);
}
auto segment = kj::heap<SegmentReader>(this, id, newSegment, &readLimiter);
auto segment = kj::heap<SegmentReader>(
this, id, newSegment.begin(), newSegmentSize, &readLimiter);
SegmentReader* result = segment;
segments->insert(std::make_pair(id.value, mv(segment)));
return result;
......@@ -116,14 +133,17 @@ BuilderArena::BuilderArena(MessageBuilder* message)
BuilderArena::BuilderArena(MessageBuilder* message,
kj::ArrayPtr<MessageBuilder::SegmentInit> segments)
: message(message),
segment0(this, SegmentId(0), segments[0].space, &this->dummyLimiter, segments[0].wordsUsed) {
segment0(this, SegmentId(0), segments[0].space.begin(),
verifySegmentSize(segments[0].space.size()),
&this->dummyLimiter, verifySegmentSize(segments[0].wordsUsed)) {
if (segments.size() > 1) {
kj::Vector<kj::Own<SegmentBuilder>> builders(segments.size() - 1);
uint i = 1;
for (auto& segment: segments.slice(1, segments.size())) {
builders.add(kj::heap<SegmentBuilder>(
this, SegmentId(i++), segment.space, &this->dummyLimiter, segment.wordsUsed));
this, SegmentId(i++), segment.space.begin(), verifySegmentSize(segment.space.size()),
&this->dummyLimiter, verifySegmentSize(segment.wordsUsed)));
}
kj::Vector<kj::ArrayPtr<const word>> forOutput;
......@@ -155,15 +175,16 @@ SegmentBuilder* BuilderArena::getSegment(SegmentId id) {
}
}
BuilderArena::AllocateResult BuilderArena::allocate(WordCount amount) {
BuilderArena::AllocateResult BuilderArena::allocate(SegmentWordCount amount) {
if (segment0.getArena() == nullptr) {
// We're allocating the first segment.
kj::ArrayPtr<word> ptr = message->allocateSegment(amount / WORDS);
kj::ArrayPtr<word> ptr = message->allocateSegment(unbound(amount / WORDS));
auto actualSize = verifySegmentSize(ptr.size());
// Re-allocate segment0 in-place. This is a bit of a hack, but we have not returned any
// pointers to this segment yet, so it should be fine.
kj::dtor(segment0);
kj::ctor(segment0, this, SegmentId(0), ptr, &this->dummyLimiter);
kj::ctor(segment0, this, SegmentId(0), ptr.begin(), actualSize, &this->dummyLimiter);
segmentWithSpace = &segment0;
return AllocateResult { &segment0, segment0.allocate(amount) };
......@@ -183,7 +204,7 @@ BuilderArena::AllocateResult BuilderArena::allocate(WordCount amount) {
}
// Need to allocate a new segment.
SegmentBuilder* result = addSegmentInternal(message->allocateSegment(amount / WORDS));
SegmentBuilder* result = addSegmentInternal(message->allocateSegment(unbound(amount / WORDS)));
// Check this new segment first the next time we need to allocate.
segmentWithSpace = result;
......@@ -204,6 +225,8 @@ SegmentBuilder* BuilderArena::addSegmentInternal(kj::ArrayPtr<T> content) {
KJ_REQUIRE(segment0.getArena() != nullptr,
"Can't allocate external segments before allocating the root segment.");
auto contentSize = verifySegmentSize(content.size());
MultiSegmentState* segmentState;
KJ_IF_MAYBE(s, moreSegments) {
segmentState = *s;
......@@ -214,7 +237,8 @@ SegmentBuilder* BuilderArena::addSegmentInternal(kj::ArrayPtr<T> content) {
}
kj::Own<SegmentBuilder> newBuilder = kj::heap<SegmentBuilder>(
this, SegmentId(segmentState->builders.size() + 1), content, &this->dummyLimiter);
this, SegmentId(segmentState->builders.size() + 1),
content.begin(), contentSize, &this->dummyLimiter);
SegmentBuilder* result = newBuilder.get();
segmentState->builders.add(kj::mv(newBuilder));
......
......@@ -34,6 +34,7 @@
#include <kj/mutex.h>
#include <kj/exception.h>
#include <kj/vector.h>
#include <kj/units.h>
#include "common.h"
#include "message.h"
#include "layout.h"
......@@ -85,7 +86,7 @@ public:
inline void reset(WordCount64 limit);
KJ_ALWAYS_INLINE(bool canRead(WordCount amount, Arena* arena));
KJ_ALWAYS_INLINE(bool canRead(WordCount64 amount, Arena* arena));
void unread(WordCount64 amount);
// Adds back some words to the limit. Useful when the caller knows they are double-reading
......@@ -113,7 +114,7 @@ public:
class SegmentReader {
public:
inline SegmentReader(Arena* arena, SegmentId id, kj::ArrayPtr<const word> ptr,
inline SegmentReader(Arena* arena, SegmentId id, const word* ptr, SegmentWordCount size,
ReadLimiter* readLimiter);
KJ_ALWAYS_INLINE(bool containsInterval(const void* from, const void* to));
......@@ -129,8 +130,8 @@ public:
inline SegmentId getSegmentId();
inline const word* getStartPtr();
inline WordCount getOffsetTo(const word* ptr);
inline WordCount getSize();
inline SegmentWordCount getOffsetTo(const word* ptr);
inline SegmentWordCount getSize();
inline kj::ArrayPtr<const word> getArray();
......@@ -140,7 +141,7 @@ public:
private:
Arena* arena;
SegmentId id;
kj::ArrayPtr<const word> ptr;
kj::ArrayPtr<const word> ptr; // size guaranteed to fit in SEGMENT_WORD_COUNT_BITS bits
ReadLimiter* readLimiter;
KJ_DISALLOW_COPY(SegmentReader);
......@@ -150,19 +151,19 @@ private:
class SegmentBuilder: public SegmentReader {
public:
inline SegmentBuilder(BuilderArena* arena, SegmentId id, kj::ArrayPtr<word> ptr,
ReadLimiter* readLimiter, size_t wordsUsed = 0);
inline SegmentBuilder(BuilderArena* arena, SegmentId id, kj::ArrayPtr<const word> ptr,
inline SegmentBuilder(BuilderArena* arena, SegmentId id, word* ptr, SegmentWordCount size,
ReadLimiter* readLimiter, SegmentWordCount wordsUsed = ZERO * WORDS);
inline SegmentBuilder(BuilderArena* arena, SegmentId id, const word* ptr, SegmentWordCount size,
ReadLimiter* readLimiter);
inline SegmentBuilder(BuilderArena* arena, SegmentId id, decltype(nullptr),
ReadLimiter* readLimiter);
KJ_ALWAYS_INLINE(word* allocate(WordCount amount));
KJ_ALWAYS_INLINE(word* allocate(SegmentWordCount amount));
KJ_ALWAYS_INLINE(void checkWritable());
// Throw an exception if the segment is read-only (meaning it is a reference to external data).
KJ_ALWAYS_INLINE(word* getPtrUnchecked(WordCount offset));
KJ_ALWAYS_INLINE(word* getPtrUnchecked(SegmentWordCount offset));
// Get a writable pointer into the segment. Throws an exception if the segment is read-only (i.e.
// a reference to external immutable data).
......@@ -210,7 +211,7 @@ public:
class ReaderArena final: public Arena {
public:
ReaderArena(MessageReader* message);
explicit ReaderArena(MessageReader* message);
~ReaderArena() noexcept(false);
KJ_DISALLOW_COPY(ReaderArena);
......@@ -234,6 +235,9 @@ private:
// TODO(perf): Thread-local thing instead? Some kind of lockless map? Or do sharing of data
// in a different way, where you have to construct a new MessageReader in each thread (but
// possibly backed by the same data)?
ReaderArena(MessageReader* message, kj::ArrayPtr<const word> firstSegment);
ReaderArena(MessageReader* message, const word* firstSegment, SegmentWordCount firstSegmentSize);
};
class BuilderArena final: public Arena {
......@@ -277,7 +281,7 @@ public:
word* words;
};
AllocateResult allocate(WordCount amount);
AllocateResult allocate(SegmentWordCount amount);
// Find a segment with at least the given amount of space available and allocate the space.
// Note that allocating directly from a particular segment is much faster, but allocating from
// the arena is guaranteed to succeed. Therefore callers should try to allocate from a specific
......@@ -339,34 +343,37 @@ private:
inline ReadLimiter::ReadLimiter()
: limit(kj::maxValue) {}
inline ReadLimiter::ReadLimiter(WordCount64 limit): limit(limit / WORDS) {}
inline ReadLimiter::ReadLimiter(WordCount64 limit): limit(unbound(limit / WORDS)) {}
inline void ReadLimiter::reset(WordCount64 limit) { this->limit = limit / WORDS; }
inline void ReadLimiter::reset(WordCount64 limit) { this->limit = unbound(limit / WORDS); }
inline bool ReadLimiter::canRead(WordCount amount, Arena* arena) {
inline bool ReadLimiter::canRead(WordCount64 amount, Arena* arena) {
// Be careful not to store an underflowed value into `limit`, even if multiple threads are
// decrementing it.
uint64_t current = limit;
if (KJ_UNLIKELY(amount / WORDS > current)) {
if (KJ_UNLIKELY(unbound(amount / WORDS) > current)) {
arena->reportReadLimitReached();
return false;
} else {
limit = current - amount / WORDS;
limit = current - unbound(amount / WORDS);
return true;
}
}
// -------------------------------------------------------------------
inline SegmentReader::SegmentReader(Arena* arena, SegmentId id, kj::ArrayPtr<const word> ptr,
ReadLimiter* readLimiter)
: arena(arena), id(id), ptr(ptr), readLimiter(readLimiter) {}
inline SegmentReader::SegmentReader(Arena* arena, SegmentId id, const word* ptr,
SegmentWordCount size, ReadLimiter* readLimiter)
: arena(arena), id(id), ptr(kj::arrayPtr(ptr, unbound(size / WORDS))),
readLimiter(readLimiter) {}
inline bool SegmentReader::containsInterval(const void* from, const void* to) {
return from >= this->ptr.begin() && to <= this->ptr.end() && from <= to &&
readLimiter->canRead(
intervalLength(reinterpret_cast<const byte*>(from),
reinterpret_cast<const byte*>(to)) / BYTES_PER_WORD,
reinterpret_cast<const byte*>(to),
MAX_SEGMENT_WORDS * BYTES_PER_WORD)
/ BYTES_PER_WORD,
arena);
}
......@@ -377,32 +384,37 @@ inline bool SegmentReader::amplifiedRead(WordCount virtualAmount) {
inline Arena* SegmentReader::getArena() { return arena; }
inline SegmentId SegmentReader::getSegmentId() { return id; }
inline const word* SegmentReader::getStartPtr() { return ptr.begin(); }
inline WordCount SegmentReader::getOffsetTo(const word* ptr) {
return intervalLength(this->ptr.begin(), ptr);
inline SegmentWordCount SegmentReader::getOffsetTo(const word* ptr) {
KJ_IREQUIRE(this->ptr.begin() <= ptr && ptr <= this->ptr.end());
return intervalLength(this->ptr.begin(), ptr, MAX_SEGMENT_WORDS);
}
inline SegmentWordCount SegmentReader::getSize() {
return assumeBits<SEGMENT_WORD_COUNT_BITS>(ptr.size()) * WORDS;
}
inline WordCount SegmentReader::getSize() { return ptr.size() * WORDS; }
inline kj::ArrayPtr<const word> SegmentReader::getArray() { return ptr; }
inline void SegmentReader::unread(WordCount64 amount) { readLimiter->unread(amount); }
// -------------------------------------------------------------------
inline SegmentBuilder::SegmentBuilder(
BuilderArena* arena, SegmentId id, kj::ArrayPtr<word> ptr, ReadLimiter* readLimiter,
size_t wordsUsed)
: SegmentReader(arena, id, ptr, readLimiter), pos(ptr.begin() + wordsUsed), readOnly(false) {}
BuilderArena* arena, SegmentId id, word* ptr, SegmentWordCount size,
ReadLimiter* readLimiter, SegmentWordCount wordsUsed)
: SegmentReader(arena, id, ptr, size, readLimiter),
pos(ptr + wordsUsed), readOnly(false) {}
inline SegmentBuilder::SegmentBuilder(
BuilderArena* arena, SegmentId id, kj::ArrayPtr<const word> ptr, ReadLimiter* readLimiter)
: SegmentReader(arena, id, ptr, readLimiter),
BuilderArena* arena, SegmentId id, const word* ptr, SegmentWordCount size,
ReadLimiter* readLimiter)
: SegmentReader(arena, id, ptr, size, readLimiter),
// const_cast is safe here because the member won't ever be dereferenced because it appears
// to point to the end of the segment anyway.
pos(const_cast<word*>(ptr.end())),
readOnly(true) {}
pos(const_cast<word*>(ptr + size)), readOnly(true) {}
inline SegmentBuilder::SegmentBuilder(BuilderArena* arena, SegmentId id, decltype(nullptr),
ReadLimiter* readLimiter)
: SegmentReader(arena, id, nullptr, readLimiter), pos(nullptr), readOnly(false) {}
: SegmentReader(arena, id, nullptr, ZERO * WORDS, readLimiter),
pos(nullptr), readOnly(false) {}
inline word* SegmentBuilder::allocate(WordCount amount) {
if (intervalLength(pos, ptr.end()) < amount) {
inline word* SegmentBuilder::allocate(SegmentWordCount amount) {
if (intervalLength(pos, ptr.end(), MAX_SEGMENT_WORDS) < amount) {
// Not enough space in the segment for this allocation.
return nullptr;
} else {
......@@ -417,7 +429,7 @@ inline void SegmentBuilder::checkWritable() {
if (KJ_UNLIKELY(readOnly)) throwNotWritable();
}
inline word* SegmentBuilder::getPtrUnchecked(WordCount offset) {
inline word* SegmentBuilder::getPtrUnchecked(SegmentWordCount offset) {
return const_cast<word*>(ptr.begin() + offset);
}
......@@ -432,7 +444,7 @@ inline kj::ArrayPtr<const word> SegmentBuilder::currentlyAllocated() {
}
inline void SegmentBuilder::reset() {
word* start = getPtrUnchecked(0 * WORDS);
word* start = getPtrUnchecked(ZERO * WORDS);
memset(start, 0, (pos - start) * sizeof(word));
pos = start;
}
......
......@@ -650,10 +650,11 @@ struct List<T, Kind::INTERFACE> {
Reader() = default;
inline explicit Reader(_::ListReader reader): reader(reader) {}
inline uint size() const { return reader.size() / ELEMENTS; }
inline uint size() const { return unbound(reader.size() / ELEMENTS); }
inline typename T::Client operator[](uint index) const {
KJ_IREQUIRE(index < size());
return typename T::Client(reader.getPointerElement(index * ELEMENTS).getCapability());
return typename T::Client(reader.getPointerElement(
bounded(index) * ELEMENTS).getCapability());
}
typedef _::IndexingIterator<const Reader, typename T::Client> Iterator;
......@@ -682,22 +683,23 @@ struct List<T, Kind::INTERFACE> {
inline operator Reader() const { return Reader(builder.asReader()); }
inline Reader asReader() const { return Reader(builder.asReader()); }
inline uint size() const { return builder.size() / ELEMENTS; }
inline uint size() const { return unbound(builder.size() / ELEMENTS); }
inline typename T::Client operator[](uint index) {
KJ_IREQUIRE(index < size());
return typename T::Client(builder.getPointerElement(index * ELEMENTS).getCapability());
return typename T::Client(builder.getPointerElement(
bounded(index) * ELEMENTS).getCapability());
}
inline void set(uint index, typename T::Client value) {
KJ_IREQUIRE(index < size());
builder.getPointerElement(index * ELEMENTS).setCapability(kj::mv(value.hook));
builder.getPointerElement(bounded(index) * ELEMENTS).setCapability(kj::mv(value.hook));
}
inline void adopt(uint index, Orphan<T>&& value) {
KJ_IREQUIRE(index < size());
builder.getPointerElement(index * ELEMENTS).adopt(kj::mv(value));
builder.getPointerElement(bounded(index) * ELEMENTS).adopt(kj::mv(value));
}
inline Orphan<T> disown(uint index) {
KJ_IREQUIRE(index < size());
return Orphan<T>(builder.getPointerElement(index * ELEMENTS).disown());
return Orphan<T>(builder.getPointerElement(bounded(index) * ELEMENTS).disown());
}
typedef _::IndexingIterator<Builder, typename T::Client> Iterator;
......@@ -713,7 +715,7 @@ struct List<T, Kind::INTERFACE> {
private:
inline static _::ListBuilder initPointer(_::PointerBuilder builder, uint size) {
return builder.initList(ElementSize::POINTER, size * ELEMENTS);
return builder.initList(ElementSize::POINTER, bounded(size) * ELEMENTS);
}
inline static _::ListBuilder getFromPointer(_::PointerBuilder builder, const word* defaultValue) {
return builder.getList(ElementSize::POINTER, defaultValue);
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -369,7 +369,7 @@ public:
inline ListSchema getSchema() const { return schema; }
inline uint size() const { return reader.size() / ELEMENTS; }
inline uint size() const { return unbound(reader.size() / ELEMENTS); }
DynamicValue::Reader operator[](uint index) const;
typedef _::IndexingIterator<const Reader, DynamicValue::Reader> Iterator;
......@@ -411,7 +411,7 @@ public:
inline ListSchema getSchema() const { return schema; }
inline uint size() const { return builder.size() / ELEMENTS; }
inline uint size() const { return unbound(builder.size() / ELEMENTS); }
DynamicValue::Builder operator[](uint index);
void set(uint index, const DynamicValue::Reader& value);
DynamicValue::Builder init(uint index, uint size);
......
......@@ -311,8 +311,8 @@ inline constexpr uint sizeInWords() {
// Return the size, in words, of a Struct type, if allocated free-standing (not in a list).
// May be useful for pre-computing space needed in order to precisely allocate messages.
return (WordCount32(_::structSize<T>().data) +
_::structSize<T>().pointers * WORDS_PER_POINTER) / WORDS;
return unbound((upgradeBound<uint>(_::structSize<T>().data) +
_::structSize<T>().pointers * WORDS_PER_POINTER) / WORDS);
}
} // namespace capnp
......
......@@ -29,7 +29,18 @@
namespace kj {
template <typename T, typename U>
String KJ_STRINGIFY(kj::Quantity<T, U> value) {
return kj::str(value / kj::unit<kj::Quantity<T, U>>());
return kj::str(unboundAs<uint64_t>(value / kj::unit<kj::Quantity<T, U>>()));
}
// Hack: Allow direct comparisons and multiplications so that we don't have to rewrite the code
// below.
template <uint64_t maxN, typename T>
inline constexpr Bounded<65535, T> operator*(uint a, Bounded<maxN, T> b) {
return assumeBits<16>(a * unbound(b));
}
template <uint b>
inline constexpr Bounded<65535, uint> operator*(uint a, BoundedConst<b>) {
return assumeBits<16>(a * b);
}
}
#endif
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -135,7 +135,7 @@ _::SegmentBuilder* MessageBuilder::getRootSegment() {
KJ_ASSERT(allocation.segment->getSegmentId() == _::SegmentId(0),
"First allocated word of new arena was not in segment ID 0.");
KJ_ASSERT(allocation.words == allocation.segment->getPtrUnchecked(0 * WORDS),
KJ_ASSERT(allocation.words == allocation.segment->getPtrUnchecked(ZERO * WORDS),
"First allocated word of new arena was not the first word in its segment.");
return allocation.segment;
}
......@@ -144,7 +144,7 @@ _::SegmentBuilder* MessageBuilder::getRootSegment() {
AnyPointer::Builder MessageBuilder::getRootInternal() {
_::SegmentBuilder* rootSegment = getRootSegment();
return AnyPointer::Builder(_::PointerBuilder::getRoot(
rootSegment, arena()->getLocalCapTable(), rootSegment->getPtrUnchecked(0 * WORDS)));
rootSegment, arena()->getLocalCapTable(), rootSegment->getPtrUnchecked(ZERO * WORDS)));
}
kj::ArrayPtr<const kj::ArrayPtr<const word>> MessageBuilder::getSegmentsForOutput() {
......
......@@ -307,17 +307,17 @@ inline ReaderFor<T> Orphan<T>::getReader() const {
template <typename T>
inline void Orphan<T>::truncate(uint size) {
_::OrphanGetImpl<ListElementType<T>>::truncateListOf(builder, size * ELEMENTS);
_::OrphanGetImpl<ListElementType<T>>::truncateListOf(builder, bounded(size) * ELEMENTS);
}
template <>
inline void Orphan<Text>::truncate(uint size) {
builder.truncateText(size * ELEMENTS);
builder.truncateText(bounded(size) * ELEMENTS);
}
template <>
inline void Orphan<Data>::truncate(uint size) {
builder.truncate(size * ELEMENTS, ElementSize::BYTE);
builder.truncate(bounded(size) * ELEMENTS, ElementSize::BYTE);
}
template <typename T>
......@@ -350,7 +350,7 @@ struct Orphanage::NewOrphanListImpl<List<T, k>> {
static inline _::OrphanBuilder apply(
_::BuilderArena* arena, _::CapTableBuilder* capTable, uint size) {
return _::OrphanBuilder::initList(
arena, capTable, size * ELEMENTS, _::ElementSizeForType<T>::value);
arena, capTable, bounded(size) * ELEMENTS, _::ElementSizeForType<T>::value);
}
};
......@@ -359,7 +359,7 @@ struct Orphanage::NewOrphanListImpl<List<T, Kind::STRUCT>> {
static inline _::OrphanBuilder apply(
_::BuilderArena* arena, _::CapTableBuilder* capTable, uint size) {
return _::OrphanBuilder::initStructList(
arena, capTable, size * ELEMENTS, _::structSize<T>());
arena, capTable, bounded(size) * ELEMENTS, _::structSize<T>());
}
};
......@@ -367,7 +367,7 @@ template <>
struct Orphanage::NewOrphanListImpl<Text> {
static inline _::OrphanBuilder apply(
_::BuilderArena* arena, _::CapTableBuilder* capTable, uint size) {
return _::OrphanBuilder::initText(arena, capTable, size * BYTES);
return _::OrphanBuilder::initText(arena, capTable, bounded(size) * BYTES);
}
};
......@@ -375,7 +375,7 @@ template <>
struct Orphanage::NewOrphanListImpl<Data> {
static inline _::OrphanBuilder apply(
_::BuilderArena* arena, _::CapTableBuilder* capTable, uint size) {
return _::OrphanBuilder::initData(arena, capTable, size * BYTES);
return _::OrphanBuilder::initData(arena, capTable, bounded(size) * BYTES);
}
};
......
This diff is collapsed.
......@@ -113,12 +113,12 @@ struct PointerHelpers<T, Kind::BLOB> {
static inline typename T::Reader get(PointerReader reader,
const void* defaultValue = nullptr,
uint defaultBytes = 0) {
return reader.getBlob<T>(defaultValue, defaultBytes * BYTES);
return reader.getBlob<T>(defaultValue, bounded(defaultBytes) * BYTES);
}
static inline typename T::Builder get(PointerBuilder builder,
const void* defaultValue = nullptr,
uint defaultBytes = 0) {
return builder.getBlob<T>(defaultValue, defaultBytes * BYTES);
return builder.getBlob<T>(defaultValue, bounded(defaultBytes) * BYTES);
}
static inline void set(PointerBuilder builder, typename T::Reader value) {
builder.setBlob<T>(value);
......@@ -127,7 +127,7 @@ struct PointerHelpers<T, Kind::BLOB> {
builder.setBlob<T>(value);
}
static inline typename T::Builder init(PointerBuilder builder, uint size) {
return builder.initBlob<T>(size * BYTES);
return builder.initBlob<T>(bounded(size) * BYTES);
}
static inline void adopt(PointerBuilder builder, Orphan<T>&& value) {
builder.adopt(kj::mv(value.builder));
......
......@@ -600,119 +600,121 @@ private:
inline ::capnp::rpc::twoparty::Side VatId::Reader::getSide() const {
return _reader.getDataField< ::capnp::rpc::twoparty::Side>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline ::capnp::rpc::twoparty::Side VatId::Builder::getSide() {
return _builder.getDataField< ::capnp::rpc::twoparty::Side>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline void VatId::Builder::setSide( ::capnp::rpc::twoparty::Side value) {
_builder.setDataField< ::capnp::rpc::twoparty::Side>(
0 * ::capnp::ELEMENTS, value);
::capnp::bounded<0>() * ::capnp::ELEMENTS, value);
}
inline ::uint32_t ProvisionId::Reader::getJoinId() const {
return _reader.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline ::uint32_t ProvisionId::Builder::getJoinId() {
return _builder.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline void ProvisionId::Builder::setJoinId( ::uint32_t value) {
_builder.setDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS, value);
::capnp::bounded<0>() * ::capnp::ELEMENTS, value);
}
inline ::uint32_t JoinKeyPart::Reader::getJoinId() const {
return _reader.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline ::uint32_t JoinKeyPart::Builder::getJoinId() {
return _builder.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline void JoinKeyPart::Builder::setJoinId( ::uint32_t value) {
_builder.setDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS, value);
::capnp::bounded<0>() * ::capnp::ELEMENTS, value);
}
inline ::uint16_t JoinKeyPart::Reader::getPartCount() const {
return _reader.getDataField< ::uint16_t>(
2 * ::capnp::ELEMENTS);
::capnp::bounded<2>() * ::capnp::ELEMENTS);
}
inline ::uint16_t JoinKeyPart::Builder::getPartCount() {
return _builder.getDataField< ::uint16_t>(
2 * ::capnp::ELEMENTS);
::capnp::bounded<2>() * ::capnp::ELEMENTS);
}
inline void JoinKeyPart::Builder::setPartCount( ::uint16_t value) {
_builder.setDataField< ::uint16_t>(
2 * ::capnp::ELEMENTS, value);
::capnp::bounded<2>() * ::capnp::ELEMENTS, value);
}
inline ::uint16_t JoinKeyPart::Reader::getPartNum() const {
return _reader.getDataField< ::uint16_t>(
3 * ::capnp::ELEMENTS);
::capnp::bounded<3>() * ::capnp::ELEMENTS);
}
inline ::uint16_t JoinKeyPart::Builder::getPartNum() {
return _builder.getDataField< ::uint16_t>(
3 * ::capnp::ELEMENTS);
::capnp::bounded<3>() * ::capnp::ELEMENTS);
}
inline void JoinKeyPart::Builder::setPartNum( ::uint16_t value) {
_builder.setDataField< ::uint16_t>(
3 * ::capnp::ELEMENTS, value);
::capnp::bounded<3>() * ::capnp::ELEMENTS, value);
}
inline ::uint32_t JoinResult::Reader::getJoinId() const {
return _reader.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline ::uint32_t JoinResult::Builder::getJoinId() {
return _builder.getDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS);
::capnp::bounded<0>() * ::capnp::ELEMENTS);
}
inline void JoinResult::Builder::setJoinId( ::uint32_t value) {
_builder.setDataField< ::uint32_t>(
0 * ::capnp::ELEMENTS, value);
::capnp::bounded<0>() * ::capnp::ELEMENTS, value);
}
inline bool JoinResult::Reader::getSucceeded() const {
return _reader.getDataField<bool>(
32 * ::capnp::ELEMENTS);
::capnp::bounded<32>() * ::capnp::ELEMENTS);
}
inline bool JoinResult::Builder::getSucceeded() {
return _builder.getDataField<bool>(
32 * ::capnp::ELEMENTS);
::capnp::bounded<32>() * ::capnp::ELEMENTS);
}
inline void JoinResult::Builder::setSucceeded(bool value) {
_builder.setDataField<bool>(
32 * ::capnp::ELEMENTS, value);
::capnp::bounded<32>() * ::capnp::ELEMENTS, value);
}
inline bool JoinResult::Reader::hasCap() const {
return !_reader.getPointerField(0 * ::capnp::POINTERS).isNull();
return !_reader.getPointerField(
::capnp::bounded<0>() * ::capnp::POINTERS).isNull();
}
inline bool JoinResult::Builder::hasCap() {
return !_builder.getPointerField(0 * ::capnp::POINTERS).isNull();
return !_builder.getPointerField(
::capnp::bounded<0>() * ::capnp::POINTERS).isNull();
}
inline ::capnp::AnyPointer::Reader JoinResult::Reader::getCap() const {
return ::capnp::AnyPointer::Reader(
_reader.getPointerField(0 * ::capnp::POINTERS));
return ::capnp::AnyPointer::Reader(_reader.getPointerField(
::capnp::bounded<0>() * ::capnp::POINTERS));
}
inline ::capnp::AnyPointer::Builder JoinResult::Builder::getCap() {
return ::capnp::AnyPointer::Builder(
_builder.getPointerField(0 * ::capnp::POINTERS));
return ::capnp::AnyPointer::Builder(_builder.getPointerField(
::capnp::bounded<0>() * ::capnp::POINTERS));
}
inline ::capnp::AnyPointer::Builder JoinResult::Builder::initCap() {
auto result = ::capnp::AnyPointer::Builder(
_builder.getPointerField(0 * ::capnp::POINTERS));
auto result = ::capnp::AnyPointer::Builder(_builder.getPointerField(
::capnp::bounded<0>() * ::capnp::POINTERS));
result.clear();
return result;
}
......
This diff is collapsed.
This diff is collapsed.
......@@ -37,6 +37,14 @@ KJ_TEST("kj::size() on native arrays") {
KJ_EXPECT(expected == 4u);
}
struct ImplicitToInt {
int i;
operator int() const {
return i;
}
};
TEST(Common, Maybe) {
{
Maybe<int> m = 123;
......@@ -166,6 +174,23 @@ TEST(Common, Maybe) {
EXPECT_EQ(0, *v); // avoid unused warning
}
}
{
// Test a case where an implicit conversion didn't used to happen correctly.
Maybe<ImplicitToInt> m(ImplicitToInt { 123 });
Maybe<uint> m2(m);
Maybe<uint> m3(kj::mv(m));
KJ_IF_MAYBE(v, m2) {
EXPECT_EQ(123, *v);
} else {
ADD_FAILURE();
}
KJ_IF_MAYBE(v, m3) {
EXPECT_EQ(123, *v);
} else {
ADD_FAILURE();
}
}
}
TEST(Common, MaybeConstness) {
......@@ -445,5 +470,19 @@ TEST(Common, ArrayAsBytes) {
}
}
KJ_TEST("kj::range()") {
uint expected = 5;
for (uint i: range(5, 10)) {
KJ_EXPECT(i == expected++);
}
KJ_EXPECT(expected == 10);
expected = 0;
for (uint i: range(0, 8)) {
KJ_EXPECT(i == expected++);
}
KJ_EXPECT(expected == 8);
}
} // namespace
} // namespace kj
This diff is collapsed.
......@@ -158,7 +158,7 @@ private:
template <typename T>
class Locked {
// Return type for `MutexGuarded<T>::lock()`. `Locked<T>` provides access to the guarded object
// Return type for `MutexGuarded<T>::lock()`. `Locked<T>` provides access to the bounded object
// and unlocks the mutex when it goes out of scope.
public:
......@@ -208,7 +208,7 @@ private:
template <typename T>
class MutexGuarded {
// An object of type T, guarded by a mutex. In order to access the object, you must lock it.
// An object of type T, bounded by a mutex. In order to access the object, you must lock it.
//
// Write locks are not "recursive" -- trying to lock again in a thread that already holds a lock
// will deadlock. Recursive write locks are usually a sign of bad design.
......@@ -223,7 +223,7 @@ class MutexGuarded {
public:
template <typename... Params>
explicit MutexGuarded(Params&&... params);
// Initialize the mutex-guarded object by passing the given parameters to its constructor.
// Initialize the mutex-bounded object by passing the given parameters to its constructor.
Locked<T> lockExclusive() const;
// Exclusively locks the object and returns it. The returned `Locked<T>` can be passed by
......
This diff is collapsed.
......@@ -20,7 +20,12 @@
// THE SOFTWARE.
#include "units.h"
#include "debug.h"
namespace kj {
void ThrowOverflow::operator()() const {
KJ_FAIL_REQUIRE("integer overflow");
}
} // namespace kj
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment