Commit 850af66a authored by Kenton Varda's avatar Kenton Varda

Add KJ_ prefix to logging/assert macros.

parent a7933e07
...@@ -168,7 +168,7 @@ kj::ArrayPtr<const kj::ArrayPtr<const word>> BuilderArena::getSegmentsForOutput( ...@@ -168,7 +168,7 @@ kj::ArrayPtr<const kj::ArrayPtr<const word>> BuilderArena::getSegmentsForOutput(
return kj::arrayPtr(&segment0ForOutput, 1); return kj::arrayPtr(&segment0ForOutput, 1);
} }
} else { } else {
DASSERT(moreSegments->forOutput.size() == moreSegments->builders.size() + 1, KJ_DASSERT(moreSegments->forOutput.size() == moreSegments->builders.size() + 1,
"moreSegments->forOutput wasn't resized correctly when the last builder was added.", "moreSegments->forOutput wasn't resized correctly when the last builder was added.",
moreSegments->forOutput.size(), moreSegments->builders.size()); moreSegments->forOutput.size(), moreSegments->builders.size());
......
...@@ -181,7 +181,7 @@ struct UseScratch { ...@@ -181,7 +181,7 @@ struct UseScratch {
word* words; word* words;
ScratchSpace() { ScratchSpace() {
REQUIRE(scratchCounter < 6, "Too many scratch spaces needed at once."); KJ_REQUIRE(scratchCounter < 6, "Too many scratch spaces needed at once.");
words = scratchSpace + scratchCounter++ * SCRATCH_SIZE; words = scratchSpace + scratchCounter++ * SCRATCH_SIZE;
} }
~ScratchSpace() { ~ScratchSpace() {
......
...@@ -128,8 +128,8 @@ void TextBlob::allocate(size_t textSize, size_t branchCount, ...@@ -128,8 +128,8 @@ void TextBlob::allocate(size_t textSize, size_t branchCount,
} }
void TextBlob::fill(char* textPos, Branch* branchesPos) { void TextBlob::fill(char* textPos, Branch* branchesPos) {
ASSERT(textPos == text.end(), textPos - text.end()); KJ_ASSERT(textPos == text.end(), textPos - text.end());
ASSERT(branchesPos == branches.end(), branchesPos - branches.end()); KJ_ASSERT(branchesPos == branches.end(), branchesPos - branches.end());
} }
template <typename First, typename... Rest> template <typename First, typename... Rest>
...@@ -217,7 +217,7 @@ Text::Reader getUnqualifiedName(Schema schema) { ...@@ -217,7 +217,7 @@ Text::Reader getUnqualifiedName(Schema schema) {
return nested.getName(); return nested.getName();
} }
} }
FAIL_REQUIRE("A schema Node's supposed scope did not contain the node as a NestedNode."); KJ_FAIL_REQUIRE("A schema Node's supposed scope did not contain the node as a NestedNode.");
return "(?)"; return "(?)";
} }
...@@ -365,22 +365,22 @@ TextBlob genValue(schema::Type::Reader type, schema::Value::Reader value, Schema ...@@ -365,22 +365,22 @@ TextBlob genValue(schema::Type::Reader type, schema::Value::Reader value, Schema
case schema::Value::Body::TEXT_VALUE: return text(DynamicValue::Reader(body.getTextValue())); case schema::Value::Body::TEXT_VALUE: return text(DynamicValue::Reader(body.getTextValue()));
case schema::Value::Body::DATA_VALUE: return text(DynamicValue::Reader(body.getDataValue())); case schema::Value::Body::DATA_VALUE: return text(DynamicValue::Reader(body.getDataValue()));
case schema::Value::Body::LIST_VALUE: { case schema::Value::Body::LIST_VALUE: {
REQUIRE(type.getBody().which() == schema::Type::Body::LIST_TYPE, "type/value mismatch"); KJ_REQUIRE(type.getBody().which() == schema::Type::Body::LIST_TYPE, "type/value mismatch");
auto value = body.getListValue<DynamicList>( auto value = body.getListValue<DynamicList>(
ListSchema::of(type.getBody().getListType(), scope)); ListSchema::of(type.getBody().getListType(), scope));
return text(value); return text(value);
} }
case schema::Value::Body::ENUM_VALUE: { case schema::Value::Body::ENUM_VALUE: {
REQUIRE(type.getBody().which() == schema::Type::Body::ENUM_TYPE, "type/value mismatch"); KJ_REQUIRE(type.getBody().which() == schema::Type::Body::ENUM_TYPE, "type/value mismatch");
auto enumNode = scope.getDependency(type.getBody().getEnumType()).asEnum().getProto(); auto enumNode = scope.getDependency(type.getBody().getEnumType()).asEnum().getProto();
auto enumType = enumNode.getBody().getEnumNode(); auto enumType = enumNode.getBody().getEnumNode();
auto enumerants = enumType.getEnumerants(); auto enumerants = enumType.getEnumerants();
REQUIRE(body.getEnumValue() < enumerants.size(), KJ_REQUIRE(body.getEnumValue() < enumerants.size(),
"Enum value out-of-range.", body.getEnumValue(), enumNode.getDisplayName()); "Enum value out-of-range.", body.getEnumValue(), enumNode.getDisplayName());
return text(enumerants[body.getEnumValue()].getName()); return text(enumerants[body.getEnumValue()].getName());
} }
case schema::Value::Body::STRUCT_VALUE: { case schema::Value::Body::STRUCT_VALUE: {
REQUIRE(type.getBody().which() == schema::Type::Body::STRUCT_TYPE, "type/value mismatch"); KJ_REQUIRE(type.getBody().which() == schema::Type::Body::STRUCT_TYPE, "type/value mismatch");
auto value = body.getStructValue<DynamicStruct>( auto value = body.getStructValue<DynamicStruct>(
scope.getDependency(type.getBody().getStructType()).asStruct()); scope.getDependency(type.getBody().getStructType()).asStruct());
return text(value); return text(value);
...@@ -400,7 +400,7 @@ TextBlob genAnnotation(schema::Annotation::Reader annotation, ...@@ -400,7 +400,7 @@ TextBlob genAnnotation(schema::Annotation::Reader annotation,
const char* prefix = " ", const char* suffix = "") { const char* prefix = " ", const char* suffix = "") {
auto decl = schemaLoader.get(annotation.getId()); auto decl = schemaLoader.get(annotation.getId());
auto body = decl.getProto().getBody(); auto body = decl.getProto().getBody();
REQUIRE(body.which() == schema::Node::Body::ANNOTATION_NODE); KJ_REQUIRE(body.which() == schema::Node::Body::ANNOTATION_NODE);
auto annDecl = body.getAnnotationNode(); auto annDecl = body.getAnnotationNode();
return text(prefix, "$", nodeName(decl, scope), "(", return text(prefix, "$", nodeName(decl, scope), "(",
...@@ -468,12 +468,12 @@ TextBlob genDecl(Schema schema, Text::Reader name, uint64_t scopeId, Indent inde ...@@ -468,12 +468,12 @@ TextBlob genDecl(Schema schema, Text::Reader name, uint64_t scopeId, Indent inde
auto proto = schema.getProto(); auto proto = schema.getProto();
if (proto.getScopeId() != scopeId) { if (proto.getScopeId() != scopeId) {
// This appears to be an alias for something declared elsewhere. // This appears to be an alias for something declared elsewhere.
FAIL_REQUIRE("Aliases not implemented."); KJ_FAIL_REQUIRE("Aliases not implemented.");
} }
switch (proto.getBody().which()) { switch (proto.getBody().which()) {
case schema::Node::Body::FILE_NODE: case schema::Node::Body::FILE_NODE:
FAIL_REQUIRE("Encountered nested file node."); KJ_FAIL_REQUIRE("Encountered nested file node.");
break; break;
case schema::Node::Body::STRUCT_NODE: { case schema::Node::Body::STRUCT_NODE: {
auto body = proto.getBody().getStructNode(); auto body = proto.getBody().getStructNode();
...@@ -578,7 +578,7 @@ TextBlob genNestedDecls(Schema schema, Indent indent) { ...@@ -578,7 +578,7 @@ TextBlob genNestedDecls(Schema schema, Indent indent) {
TextBlob genFile(Schema file) { TextBlob genFile(Schema file) {
auto proto = file.getProto(); auto proto = file.getProto();
auto body = proto.getBody(); auto body = proto.getBody();
REQUIRE(body.which() == schema::Node::Body::FILE_NODE, "Expected a file node.", KJ_REQUIRE(body.which() == schema::Node::Body::FILE_NODE, "Expected a file node.",
(uint)body.which()); (uint)body.which());
return text( return text(
......
This diff is collapsed.
...@@ -279,7 +279,7 @@ UnionState initUnion(Func&& initializer) { ...@@ -279,7 +279,7 @@ UnionState initUnion(Func&& initializer) {
initializer(builder.getRoot<StructType>()); initializer(builder.getRoot<StructType>());
kj::ArrayPtr<const word> segment = builder.getSegmentsForOutput()[0]; kj::ArrayPtr<const word> segment = builder.getSegmentsForOutput()[0];
ASSERT(segment.size() > 2, segment.size()); KJ_ASSERT(segment.size() > 2, segment.size());
// Find the offset of the first set bit after the union discriminants. // Find the offset of the first set bit after the union discriminants.
int offset = 0; int offset = 0;
......
...@@ -102,12 +102,12 @@ struct WirePointer { ...@@ -102,12 +102,12 @@ struct WirePointer {
} }
KJ_ALWAYS_INLINE(WordCount farPositionInSegment() const) { KJ_ALWAYS_INLINE(WordCount farPositionInSegment() const) {
DREQUIRE(kind() == FAR, KJ_DREQUIRE(kind() == FAR,
"positionInSegment() should only be called on FAR pointers."); "positionInSegment() should only be called on FAR pointers.");
return (offsetAndKind.get() >> 3) * WORDS; return (offsetAndKind.get() >> 3) * WORDS;
} }
KJ_ALWAYS_INLINE(bool isDoubleFar() const) { KJ_ALWAYS_INLINE(bool isDoubleFar() const) {
DREQUIRE(kind() == FAR, KJ_DREQUIRE(kind() == FAR,
"isDoubleFar() should only be called on FAR pointers."); "isDoubleFar() should only be called on FAR pointers.");
return (offsetAndKind.get() >> 2) & 1; return (offsetAndKind.get() >> 2) & 1;
} }
...@@ -155,12 +155,12 @@ struct WirePointer { ...@@ -155,12 +155,12 @@ struct WirePointer {
} }
KJ_ALWAYS_INLINE(void set(FieldSize es, ElementCount ec)) { KJ_ALWAYS_INLINE(void set(FieldSize es, ElementCount ec)) {
DREQUIRE(ec < (1 << 29) * ELEMENTS, "Lists are limited to 2**29 elements."); KJ_DREQUIRE(ec < (1 << 29) * ELEMENTS, "Lists are limited to 2**29 elements.");
elementSizeAndCount.set(((ec / ELEMENTS) << 3) | static_cast<int>(es)); elementSizeAndCount.set(((ec / ELEMENTS) << 3) | static_cast<int>(es));
} }
KJ_ALWAYS_INLINE(void setInlineComposite(WordCount wc)) { KJ_ALWAYS_INLINE(void setInlineComposite(WordCount wc)) {
DREQUIRE(wc < (1 << 29) * WORDS, "Inline composite lists are limited to 2**29 words."); KJ_DREQUIRE(wc < (1 << 29) * WORDS, "Inline composite lists are limited to 2**29 words.");
elementSizeAndCount.set(((wc / WORDS) << 3) | elementSizeAndCount.set(((wc / WORDS) << 3) |
static_cast<int>(FieldSize::INLINE_COMPOSITE)); static_cast<int>(FieldSize::INLINE_COMPOSITE));
} }
...@@ -380,7 +380,7 @@ struct WireHelpers { ...@@ -380,7 +380,7 @@ struct WireHelpers {
case FieldSize::INLINE_COMPOSITE: { case FieldSize::INLINE_COMPOSITE: {
WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr); WirePointer* elementTag = reinterpret_cast<WirePointer*>(ptr);
ASSERT(elementTag->kind() == WirePointer::STRUCT, KJ_ASSERT(elementTag->kind() == WirePointer::STRUCT,
"Don't know how to handle non-STRUCT inline composite."); "Don't know how to handle non-STRUCT inline composite.");
WordCount dataSize = elementTag->structRef.dataSize.get(); WordCount dataSize = elementTag->structRef.dataSize.get();
WirePointerCount pointerCount = elementTag->structRef.ptrCount.get(); WirePointerCount pointerCount = elementTag->structRef.ptrCount.get();
...@@ -553,6 +553,7 @@ struct WireHelpers { ...@@ -553,6 +553,7 @@ struct WireHelpers {
} }
// ----------------------------------------------------------------- // -----------------------------------------------------------------
// Copy from an unchecked message.
static KJ_ALWAYS_INLINE( static KJ_ALWAYS_INLINE(
void copyStruct(SegmentBuilder* segment, word* dst, const word* src, void copyStruct(SegmentBuilder* segment, word* dst, const word* src,
...@@ -640,7 +641,7 @@ struct WireHelpers { ...@@ -640,7 +641,7 @@ struct WireHelpers {
const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS; const word* srcElement = srcPtr + POINTER_SIZE_IN_WORDS;
word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS; word* dstElement = dstPtr + POINTER_SIZE_IN_WORDS;
ASSERT(srcTag->kind() == WirePointer::STRUCT, KJ_ASSERT(srcTag->kind() == WirePointer::STRUCT,
"INLINE_COMPOSITE of lists is not yet supported."); "INLINE_COMPOSITE of lists is not yet supported.");
uint n = srcTag->inlineCompositeListElementCount() / ELEMENTS; uint n = srcTag->inlineCompositeListElementCount() / ELEMENTS;
...@@ -656,7 +657,7 @@ struct WireHelpers { ...@@ -656,7 +657,7 @@ struct WireHelpers {
break; break;
} }
default: default:
FAIL_REQUIRE("Copy source message contained unexpected kind."); KJ_FAIL_REQUIRE("Copy source message contained unexpected kind.");
break; break;
} }
...@@ -689,7 +690,7 @@ struct WireHelpers { ...@@ -689,7 +690,7 @@ struct WireHelpers {
// Darn, need a double-far. // Darn, need a double-far.
SegmentBuilder* farSegment = srcSegment->getArena()->getSegmentWithAvailable(2 * WORDS); SegmentBuilder* farSegment = srcSegment->getArena()->getSegmentWithAvailable(2 * WORDS);
landingPad = reinterpret_cast<WirePointer*>(farSegment->allocate(2 * WORDS)); landingPad = reinterpret_cast<WirePointer*>(farSegment->allocate(2 * WORDS));
DASSERT(landingPad != nullptr, KJ_DASSERT(landingPad != nullptr,
"getSegmentWithAvailable() returned segment without space available."); "getSegmentWithAvailable() returned segment without space available.");
landingPad[0].setFar(false, srcSegment->getOffsetTo(src->target())); landingPad[0].setFar(false, srcSegment->getOffsetTo(src->target()));
...@@ -796,7 +797,7 @@ struct WireHelpers { ...@@ -796,7 +797,7 @@ struct WireHelpers {
static KJ_ALWAYS_INLINE(ListBuilder initListPointer( static KJ_ALWAYS_INLINE(ListBuilder initListPointer(
WirePointer* ref, SegmentBuilder* segment, ElementCount elementCount, WirePointer* ref, SegmentBuilder* segment, ElementCount elementCount,
FieldSize elementSize)) { FieldSize elementSize)) {
DREQUIRE(elementSize != FieldSize::INLINE_COMPOSITE, KJ_DREQUIRE(elementSize != FieldSize::INLINE_COMPOSITE,
"Should have called initStructListPointer() instead."); "Should have called initStructListPointer() instead.");
BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS; BitCount dataSize = dataBitsPerElement(elementSize) * ELEMENTS;
...@@ -848,7 +849,7 @@ struct WireHelpers { ...@@ -848,7 +849,7 @@ struct WireHelpers {
static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer( static KJ_ALWAYS_INLINE(ListBuilder getWritableListPointer(
WirePointer* origRef, SegmentBuilder* origSegment, FieldSize elementSize, WirePointer* origRef, SegmentBuilder* origSegment, FieldSize elementSize,
const word* defaultValue)) { const word* defaultValue)) {
DREQUIRE(elementSize != FieldSize::INLINE_COMPOSITE, KJ_DREQUIRE(elementSize != FieldSize::INLINE_COMPOSITE,
"Use getStructList{Element,Field}() for structs."); "Use getStructList{Element,Field}() for structs.");
if (origRef->isNull()) { if (origRef->isNull()) {
...@@ -886,7 +887,7 @@ struct WireHelpers { ...@@ -886,7 +887,7 @@ struct WireHelpers {
// Read the tag to get the actual element count. // Read the tag to get the actual element count.
WirePointer* tag = reinterpret_cast<WirePointer*>(ptr); WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
REQUIRE(tag->kind() == WirePointer::STRUCT, KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
"INLINE_COMPOSITE list with non-STRUCT elements not supported."); "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
ptr += POINTER_SIZE_IN_WORDS; ptr += POINTER_SIZE_IN_WORDS;
...@@ -915,7 +916,7 @@ struct WireHelpers { ...@@ -915,7 +916,7 @@ struct WireHelpers {
break; break;
case FieldSize::INLINE_COMPOSITE: case FieldSize::INLINE_COMPOSITE:
FAIL_ASSERT("Can't get here."); KJ_FAIL_ASSERT("Can't get here.");
break; break;
} }
...@@ -1148,11 +1149,11 @@ struct WireHelpers { ...@@ -1148,11 +1149,11 @@ struct WireHelpers {
} else { } else {
// If oldSize were POINTER or EIGHT_BYTES then the preferred size must be // If oldSize were POINTER or EIGHT_BYTES then the preferred size must be
// INLINE_COMPOSITE because any other compatible size would not require an upgrade. // INLINE_COMPOSITE because any other compatible size would not require an upgrade.
ASSERT(oldSize < FieldSize::EIGHT_BYTES); KJ_ASSERT(oldSize < FieldSize::EIGHT_BYTES);
// If the preferred size were BIT then oldSize must be VOID, but we handled that case // If the preferred size were BIT then oldSize must be VOID, but we handled that case
// above. // above.
ASSERT(elementSize.preferredListEncoding >= FieldSize::BIT); KJ_ASSERT(elementSize.preferredListEncoding >= FieldSize::BIT);
// OK, so the expected list elements are all data and between 1 byte and 1 word each, // OK, so the expected list elements are all data and between 1 byte and 1 word each,
// and the old element are data between 1 bit and 4 bytes. We're upgrading from one // and the old element are data between 1 bit and 4 bytes. We're upgrading from one
...@@ -1226,9 +1227,9 @@ struct WireHelpers { ...@@ -1226,9 +1227,9 @@ struct WireHelpers {
} else { } else {
word* ptr = followFars(ref, segment); word* ptr = followFars(ref, segment);
REQUIRE(ref->kind() == WirePointer::LIST, KJ_REQUIRE(ref->kind() == WirePointer::LIST,
"Called getText{Field,Element}() but existing pointer is not a list."); "Called getText{Field,Element}() but existing pointer is not a list.");
REQUIRE(ref->listRef.elementSize() == FieldSize::BYTE, KJ_REQUIRE(ref->listRef.elementSize() == FieldSize::BYTE,
"Called getText{Field,Element}() but existing list pointer is not byte-sized."); "Called getText{Field,Element}() but existing list pointer is not byte-sized.");
// Subtract 1 from the size for the NUL terminator. // Subtract 1 from the size for the NUL terminator.
...@@ -1263,9 +1264,9 @@ struct WireHelpers { ...@@ -1263,9 +1264,9 @@ struct WireHelpers {
} else { } else {
word* ptr = followFars(ref, segment); word* ptr = followFars(ref, segment);
REQUIRE(ref->kind() == WirePointer::LIST, KJ_REQUIRE(ref->kind() == WirePointer::LIST,
"Called getData{Field,Element}() but existing pointer is not a list."); "Called getData{Field,Element}() but existing pointer is not a list.");
REQUIRE(ref->listRef.elementSize() == FieldSize::BYTE, KJ_REQUIRE(ref->listRef.elementSize() == FieldSize::BYTE,
"Called getData{Field,Element}() but existing list pointer is not byte-sized."); "Called getData{Field,Element}() but existing list pointer is not byte-sized.");
return Data::Builder(reinterpret_cast<char*>(ptr), ref->listRef.elementCount() / ELEMENTS); return Data::Builder(reinterpret_cast<char*>(ptr), ref->listRef.elementCount() / ELEMENTS);
...@@ -1291,7 +1292,7 @@ struct WireHelpers { ...@@ -1291,7 +1292,7 @@ struct WireHelpers {
if (ref->listRef.elementSize() == FieldSize::INLINE_COMPOSITE) { if (ref->listRef.elementSize() == FieldSize::INLINE_COMPOSITE) {
// Read the tag to get the actual element count. // Read the tag to get the actual element count.
WirePointer* tag = reinterpret_cast<WirePointer*>(ptr); WirePointer* tag = reinterpret_cast<WirePointer*>(ptr);
REQUIRE(tag->kind() == WirePointer::STRUCT, KJ_REQUIRE(tag->kind() == WirePointer::STRUCT,
"INLINE_COMPOSITE list with non-STRUCT elements not supported."); "INLINE_COMPOSITE list with non-STRUCT elements not supported.");
// First list element is at tag + 1 pointer. // First list element is at tag + 1 pointer.
...@@ -1364,7 +1365,7 @@ struct WireHelpers { ...@@ -1364,7 +1365,7 @@ struct WireHelpers {
case 32: elementSize = FieldSize::FOUR_BYTES; break; case 32: elementSize = FieldSize::FOUR_BYTES; break;
case 64: elementSize = FieldSize::EIGHT_BYTES; break; case 64: elementSize = FieldSize::EIGHT_BYTES; break;
default: default:
FAIL_ASSERT("invalid list step size", value.step * ELEMENTS / BITS); KJ_FAIL_ASSERT("invalid list step size", value.step * ELEMENTS / BITS);
break; break;
} }
...@@ -1951,7 +1952,7 @@ ObjectReader StructReader::getObjectField( ...@@ -1951,7 +1952,7 @@ ObjectReader StructReader::getObjectField(
} }
const word* StructReader::getUncheckedPointer(WirePointerCount ptrIndex) const { const word* StructReader::getUncheckedPointer(WirePointerCount ptrIndex) const {
REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages."); KJ_REQUIRE(segment == nullptr, "getUncheckedPointer() only allowed on unchecked messages.");
return reinterpret_cast<const word*>(pointers + ptrIndex); return reinterpret_cast<const word*>(pointers + ptrIndex);
} }
...@@ -2142,7 +2143,7 @@ StructReader ListReader::getStructElement(ElementCount index) const { ...@@ -2142,7 +2143,7 @@ StructReader ListReader::getStructElement(ElementCount index) const {
reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE); reinterpret_cast<const WirePointer*>(structData + structDataSize / BITS_PER_BYTE);
// This check should pass if there are no bugs in the list pointer validation code. // This check should pass if there are no bugs in the list pointer validation code.
DASSERT(structPointerCount == 0 * POINTERS || KJ_DASSERT(structPointerCount == 0 * POINTERS ||
(uintptr_t)structPointers % sizeof(WirePointer) == 0, (uintptr_t)structPointers % sizeof(WirePointer) == 0,
"Pointer segment of struct list element not aligned."); "Pointer segment of struct list element not aligned.");
...@@ -2153,7 +2154,7 @@ StructReader ListReader::getStructElement(ElementCount index) const { ...@@ -2153,7 +2154,7 @@ StructReader ListReader::getStructElement(ElementCount index) const {
} }
static const WirePointer* checkAlignment(const void* ptr) { static const WirePointer* checkAlignment(const void* ptr) {
DASSERT((uintptr_t)ptr % sizeof(WirePointer) == 0, KJ_DASSERT((uintptr_t)ptr % sizeof(WirePointer) == 0,
"Pointer segment of struct list element not aligned."); "Pointer segment of struct list element not aligned.");
return reinterpret_cast<const WirePointer*>(ptr); return reinterpret_cast<const WirePointer*>(ptr);
} }
......
...@@ -80,10 +80,10 @@ internal::SegmentBuilder* MessageBuilder::getRootSegment() { ...@@ -80,10 +80,10 @@ internal::SegmentBuilder* MessageBuilder::getRootSegment() {
WordCount ptrSize = 1 * POINTERS * WORDS_PER_POINTER; WordCount ptrSize = 1 * POINTERS * WORDS_PER_POINTER;
internal::SegmentBuilder* segment = arena()->getSegmentWithAvailable(ptrSize); internal::SegmentBuilder* segment = arena()->getSegmentWithAvailable(ptrSize);
ASSERT(segment->getSegmentId() == internal::SegmentId(0), KJ_ASSERT(segment->getSegmentId() == internal::SegmentId(0),
"First allocated word of new arena was not in segment ID 0."); "First allocated word of new arena was not in segment ID 0.");
word* location = segment->allocate(ptrSize); word* location = segment->allocate(ptrSize);
ASSERT(location == segment->getPtrUnchecked(0 * WORDS), KJ_ASSERT(location == segment->getPtrUnchecked(0 * WORDS),
"First allocated word of new arena was not the first word in its segment."); "First allocated word of new arena was not the first word in its segment.");
return segment; return segment;
} }
...@@ -146,10 +146,10 @@ MallocMessageBuilder::MallocMessageBuilder( ...@@ -146,10 +146,10 @@ MallocMessageBuilder::MallocMessageBuilder(
kj::ArrayPtr<word> firstSegment, AllocationStrategy allocationStrategy) kj::ArrayPtr<word> firstSegment, AllocationStrategy allocationStrategy)
: nextSize(firstSegment.size()), allocationStrategy(allocationStrategy), : nextSize(firstSegment.size()), allocationStrategy(allocationStrategy),
ownFirstSegment(false), returnedFirstSegment(false), firstSegment(firstSegment.begin()) { ownFirstSegment(false), returnedFirstSegment(false), firstSegment(firstSegment.begin()) {
REQUIRE(firstSegment.size() > 0, "First segment size must be non-zero."); KJ_REQUIRE(firstSegment.size() > 0, "First segment size must be non-zero.");
// Checking just the first word should catch most cases of failing to zero the segment. // Checking just the first word should catch most cases of failing to zero the segment.
REQUIRE(*reinterpret_cast<uint64_t*>(firstSegment.begin()) == 0, KJ_REQUIRE(*reinterpret_cast<uint64_t*>(firstSegment.begin()) == 0,
"First segment must be zeroed."); "First segment must be zeroed.");
} }
...@@ -161,7 +161,7 @@ MallocMessageBuilder::~MallocMessageBuilder() { ...@@ -161,7 +161,7 @@ MallocMessageBuilder::~MallocMessageBuilder() {
// Must zero first segment. // Must zero first segment.
kj::ArrayPtr<const kj::ArrayPtr<const word>> segments = getSegmentsForOutput(); kj::ArrayPtr<const kj::ArrayPtr<const word>> segments = getSegmentsForOutput();
if (segments.size() > 0) { if (segments.size() > 0) {
ASSERT(segments[0].begin() == firstSegment, KJ_ASSERT(segments[0].begin() == firstSegment,
"First segment in getSegmentsForOutput() is not the first segment allocated?"); "First segment in getSegmentsForOutput() is not the first segment allocated?");
memset(firstSegment, 0, segments[0].size() * sizeof(word)); memset(firstSegment, 0, segments[0].size() * sizeof(word));
} }
...@@ -218,12 +218,12 @@ FlatMessageBuilder::FlatMessageBuilder(kj::ArrayPtr<word> array): array(array), ...@@ -218,12 +218,12 @@ FlatMessageBuilder::FlatMessageBuilder(kj::ArrayPtr<word> array): array(array),
FlatMessageBuilder::~FlatMessageBuilder() {} FlatMessageBuilder::~FlatMessageBuilder() {}
void FlatMessageBuilder::requireFilled() { void FlatMessageBuilder::requireFilled() {
REQUIRE(getSegmentsForOutput()[0].end() == array.end(), KJ_REQUIRE(getSegmentsForOutput()[0].end() == array.end(),
"FlatMessageBuilder's buffer was too large."); "FlatMessageBuilder's buffer was too large.");
} }
kj::ArrayPtr<word> FlatMessageBuilder::allocateSegment(uint minimumSize) { kj::ArrayPtr<word> FlatMessageBuilder::allocateSegment(uint minimumSize) {
REQUIRE(!allocated, "FlatMessageBuilder's buffer was not large enough."); KJ_REQUIRE(!allocated, "FlatMessageBuilder's buffer was not large enough.");
allocated = true; allocated = true;
return array; return array;
} }
......
...@@ -84,7 +84,7 @@ public: ...@@ -84,7 +84,7 @@ public:
nodeName = node.getDisplayName(); nodeName = node.getDisplayName();
dependencies.clear(); dependencies.clear();
CONTEXT("validating schema node", nodeName, (uint)node.getBody().which()); KJ_CONTEXT("validating schema node", nodeName, (uint)node.getBody().which());
switch (node.getBody().which()) { switch (node.getBody().which()) {
case schema::Node::Body::FILE_NODE: case schema::Node::Body::FILE_NODE:
...@@ -119,7 +119,7 @@ public: ...@@ -119,7 +119,7 @@ public:
for (auto& dep: dependencies) { for (auto& dep: dependencies) {
result[pos++] = dep.second; result[pos++] = dep.second;
} }
DASSERT(pos == *count); KJ_DASSERT(pos == *count);
return result; return result;
} }
...@@ -131,7 +131,7 @@ public: ...@@ -131,7 +131,7 @@ public:
for (auto& member: members) { for (auto& member: members) {
result[pos++] = internal::RawSchema::MemberInfo(member.first.first, member.second); result[pos++] = internal::RawSchema::MemberInfo(member.first.first, member.second);
} }
DASSERT(pos == *count); KJ_DASSERT(pos == *count);
return result; return result;
} }
...@@ -218,7 +218,7 @@ private: ...@@ -218,7 +218,7 @@ private:
uint index = 0; uint index = 0;
for (auto member: members) { for (auto member: members) {
CONTEXT("validating struct member", member.getName()); KJ_CONTEXT("validating struct member", member.getName());
validate(member, sawCodeOrder, sawOrdinal, dataSizeInBits, pointerCount, 0, index++); validate(member, sawCodeOrder, sawOrdinal, dataSizeInBits, pointerCount, 0, index++);
} }
} }
...@@ -270,7 +270,7 @@ private: ...@@ -270,7 +270,7 @@ private:
uint subIndex = 0; uint subIndex = 0;
for (auto uMember: uMembers) { for (auto uMember: uMembers) {
CONTEXT("validating union member", uMember.getName()); KJ_CONTEXT("validating union member", uMember.getName());
VALIDATE_SCHEMA( VALIDATE_SCHEMA(
uMember.getBody().which() == schema::StructNode::Member::Body::FIELD_MEMBER, uMember.getBody().which() == schema::StructNode::Member::Body::FIELD_MEMBER,
"Union members must be fields."); "Union members must be fields.");
...@@ -307,7 +307,7 @@ private: ...@@ -307,7 +307,7 @@ private:
uint index = 0; uint index = 0;
for (auto method: methods) { for (auto method: methods) {
CONTEXT("validating method", method.getName()); KJ_CONTEXT("validating method", method.getName());
validateMemberName(method.getName(), 0, index++); validateMemberName(method.getName(), 0, index++);
VALIDATE_SCHEMA(method.getCodeOrder() < methods.size() && VALIDATE_SCHEMA(method.getCodeOrder() < methods.size() &&
...@@ -317,7 +317,7 @@ private: ...@@ -317,7 +317,7 @@ private:
auto params = method.getParams(); auto params = method.getParams();
for (auto param: params) { for (auto param: params) {
CONTEXT("validating parameter", param.getName()); KJ_CONTEXT("validating parameter", param.getName());
uint dummy1; uint dummy1;
bool dummy2; bool dummy2;
validate(param.getType(), param.getDefaultValue(), &dummy1, &dummy2); validate(param.getType(), param.getDefaultValue(), &dummy1, &dummy2);
...@@ -444,10 +444,10 @@ public: ...@@ -444,10 +444,10 @@ public:
bool shouldReplace(schema::Node::Reader existingNode, schema::Node::Reader replacement, bool shouldReplace(schema::Node::Reader existingNode, schema::Node::Reader replacement,
bool replacementIsNative) { bool replacementIsNative) {
CONTEXT("checking compatibility with previously-loaded node of the same id", KJ_CONTEXT("checking compatibility with previously-loaded node of the same id",
existingNode.getDisplayName()); existingNode.getDisplayName());
DREQUIRE(existingNode.getId() == replacement.getId()); KJ_DREQUIRE(existingNode.getId() == replacement.getId());
nodeName = existingNode.getDisplayName(); nodeName = existingNode.getDisplayName();
compatibility = EQUIVALENT; compatibility = EQUIVALENT;
...@@ -593,7 +593,7 @@ private: ...@@ -593,7 +593,7 @@ private:
void checkCompatibility(schema::StructNode::Member::Reader member, void checkCompatibility(schema::StructNode::Member::Reader member,
schema::StructNode::Member::Reader replacement) { schema::StructNode::Member::Reader replacement) {
CONTEXT("comparing struct member", member.getName()); KJ_CONTEXT("comparing struct member", member.getName());
switch (member.getBody().which()) { switch (member.getBody().which()) {
case schema::StructNode::Member::Body::FIELD_MEMBER: { case schema::StructNode::Member::Body::FIELD_MEMBER: {
...@@ -665,7 +665,7 @@ private: ...@@ -665,7 +665,7 @@ private:
void checkCompatibility(schema::InterfaceNode::Method::Reader method, void checkCompatibility(schema::InterfaceNode::Method::Reader method,
schema::InterfaceNode::Method::Reader replacement) { schema::InterfaceNode::Method::Reader replacement) {
CONTEXT("comparing method", method.getName()); KJ_CONTEXT("comparing method", method.getName());
auto params = method.getParams(); auto params = method.getParams();
auto replacementParams = replacement.getParams(); auto replacementParams = replacement.getParams();
...@@ -681,7 +681,7 @@ private: ...@@ -681,7 +681,7 @@ private:
auto param = params[i]; auto param = params[i];
auto replacementParam = replacementParams[i]; auto replacementParam = replacementParams[i];
CONTEXT("comparing parameter", param.getName()); KJ_CONTEXT("comparing parameter", param.getName());
checkCompatibility(param.getType(), replacementParam.getType(), checkCompatibility(param.getType(), replacementParam.getType(),
NO_UPGRADE_TO_STRUCT); NO_UPGRADE_TO_STRUCT);
...@@ -1026,7 +1026,7 @@ internal::RawSchema* SchemaLoader::Impl::loadNative(const internal::RawSchema* n ...@@ -1026,7 +1026,7 @@ internal::RawSchema* SchemaLoader::Impl::loadNative(const internal::RawSchema* n
if (slot == nullptr) { if (slot == nullptr) {
slot = allocate<internal::RawSchema>(); slot = allocate<internal::RawSchema>();
} else if (slot->canCastTo != nullptr) { } else if (slot->canCastTo != nullptr) {
REQUIRE(slot->canCastTo == nativeSchema, KJ_REQUIRE(slot->canCastTo == nativeSchema,
"two different compiled-in type have the same type ID", "two different compiled-in type have the same type ID",
reader.getId(), reader.getDisplayName(), reader.getId(), reader.getDisplayName(),
readMessageUnchecked<schema::Node>(slot->canCastTo->encodedNode).getDisplayName()); readMessageUnchecked<schema::Node>(slot->canCastTo->encodedNode).getDisplayName());
...@@ -1079,7 +1079,7 @@ internal::RawSchema* SchemaLoader::Impl::loadEmpty( ...@@ -1079,7 +1079,7 @@ internal::RawSchema* SchemaLoader::Impl::loadEmpty(
case schema::Node::Body::FILE_NODE: case schema::Node::Body::FILE_NODE:
case schema::Node::Body::CONST_NODE: case schema::Node::Body::CONST_NODE:
case schema::Node::Body::ANNOTATION_NODE: case schema::Node::Body::ANNOTATION_NODE:
FAIL_REQUIRE("Not a type."); KJ_FAIL_REQUIRE("Not a type.");
break; break;
} }
...@@ -1111,7 +1111,7 @@ SchemaLoader::~SchemaLoader() {} ...@@ -1111,7 +1111,7 @@ SchemaLoader::~SchemaLoader() {}
Schema SchemaLoader::get(uint64_t id) const { Schema SchemaLoader::get(uint64_t id) const {
internal::RawSchema* raw = impl->tryGet(id); internal::RawSchema* raw = impl->tryGet(id);
REQUIRE(raw != nullptr, "no schema node loaded for id", id); KJ_REQUIRE(raw != nullptr, "no schema node loaded for id", id);
return Schema(raw); return Schema(raw);
} }
......
...@@ -51,33 +51,33 @@ Schema Schema::getDependency(uint64_t id) const { ...@@ -51,33 +51,33 @@ Schema Schema::getDependency(uint64_t id) const {
} }
} }
FAIL_REQUIRE("Requested ID not found in dependency table.", id); KJ_FAIL_REQUIRE("Requested ID not found in dependency table.", id);
return Schema(); return Schema();
} }
StructSchema Schema::asStruct() const { StructSchema Schema::asStruct() const {
REQUIRE(getProto().getBody().which() == schema::Node::Body::STRUCT_NODE, KJ_REQUIRE(getProto().getBody().which() == schema::Node::Body::STRUCT_NODE,
"Tried to use non-struct schema as a struct.", "Tried to use non-struct schema as a struct.",
getProto().getDisplayName()); getProto().getDisplayName());
return StructSchema(raw); return StructSchema(raw);
} }
EnumSchema Schema::asEnum() const { EnumSchema Schema::asEnum() const {
REQUIRE(getProto().getBody().which() == schema::Node::Body::ENUM_NODE, KJ_REQUIRE(getProto().getBody().which() == schema::Node::Body::ENUM_NODE,
"Tried to use non-enum schema as an enum.", "Tried to use non-enum schema as an enum.",
getProto().getDisplayName()); getProto().getDisplayName());
return EnumSchema(raw); return EnumSchema(raw);
} }
InterfaceSchema Schema::asInterface() const { InterfaceSchema Schema::asInterface() const {
REQUIRE(getProto().getBody().which() == schema::Node::Body::INTERFACE_NODE, KJ_REQUIRE(getProto().getBody().which() == schema::Node::Body::INTERFACE_NODE,
"Tried to use non-interface schema as an interface.", "Tried to use non-interface schema as an interface.",
getProto().getDisplayName()); getProto().getDisplayName());
return InterfaceSchema(raw); return InterfaceSchema(raw);
} }
void Schema::requireUsableAs(const internal::RawSchema* expected) { void Schema::requireUsableAs(const internal::RawSchema* expected) {
REQUIRE(raw == expected || KJ_REQUIRE(raw == expected ||
(raw != nullptr && expected != nullptr && raw->canCastTo == expected), (raw != nullptr && expected != nullptr && raw->canCastTo == expected),
"This schema is not compatible with the requested native type."); "This schema is not compatible with the requested native type.");
} }
...@@ -132,7 +132,7 @@ StructSchema::Member StructSchema::getMemberByName(Text::Reader name) const { ...@@ -132,7 +132,7 @@ StructSchema::Member StructSchema::getMemberByName(Text::Reader name) const {
KJ_IF_MAYBE(member, findMemberByName(name)) { KJ_IF_MAYBE(member, findMemberByName(name)) {
return *member; return *member;
} else { } else {
FAIL_REQUIRE("struct has no such member", name); KJ_FAIL_REQUIRE("struct has no such member", name);
} }
} }
...@@ -142,7 +142,7 @@ kj::Maybe<StructSchema::Union> StructSchema::Member::getContainingUnion() const ...@@ -142,7 +142,7 @@ kj::Maybe<StructSchema::Union> StructSchema::Member::getContainingUnion() const
} }
StructSchema::Union StructSchema::Member::asUnion() const { StructSchema::Union StructSchema::Member::asUnion() const {
REQUIRE(proto.getBody().which() == schema::StructNode::Member::Body::UNION_MEMBER, KJ_REQUIRE(proto.getBody().which() == schema::StructNode::Member::Body::UNION_MEMBER,
"Tried to use non-union struct member as a union.", "Tried to use non-union struct member as a union.",
parent.getProto().getDisplayName(), proto.getName()); parent.getProto().getDisplayName(), proto.getName());
return Union(*this); return Union(*this);
...@@ -160,7 +160,7 @@ StructSchema::Member StructSchema::Union::getMemberByName(Text::Reader name) con ...@@ -160,7 +160,7 @@ StructSchema::Member StructSchema::Union::getMemberByName(Text::Reader name) con
KJ_IF_MAYBE(member, findMemberByName(name)) { KJ_IF_MAYBE(member, findMemberByName(name)) {
return *member; return *member;
} else { } else {
FAIL_REQUIRE("union has no such member", name); KJ_FAIL_REQUIRE("union has no such member", name);
} }
} }
...@@ -178,7 +178,7 @@ EnumSchema::Enumerant EnumSchema::getEnumerantByName(Text::Reader name) const { ...@@ -178,7 +178,7 @@ EnumSchema::Enumerant EnumSchema::getEnumerantByName(Text::Reader name) const {
KJ_IF_MAYBE(enumerant, findEnumerantByName(name)) { KJ_IF_MAYBE(enumerant, findEnumerantByName(name)) {
return *enumerant; return *enumerant;
} else { } else {
FAIL_REQUIRE("enum has no such enumerant", name); KJ_FAIL_REQUIRE("enum has no such enumerant", name);
} }
} }
...@@ -196,7 +196,7 @@ InterfaceSchema::Method InterfaceSchema::getMethodByName(Text::Reader name) cons ...@@ -196,7 +196,7 @@ InterfaceSchema::Method InterfaceSchema::getMethodByName(Text::Reader name) cons
KJ_IF_MAYBE(method, findMethodByName(name)) { KJ_IF_MAYBE(method, findMethodByName(name)) {
return *method; return *method;
} else { } else {
FAIL_REQUIRE("interface has no such method", name); KJ_FAIL_REQUIRE("interface has no such method", name);
} }
} }
...@@ -224,11 +224,11 @@ ListSchema ListSchema::of(schema::Type::Body::Which primitiveType) { ...@@ -224,11 +224,11 @@ ListSchema ListSchema::of(schema::Type::Body::Which primitiveType) {
case schema::Type::Body::ENUM_TYPE: case schema::Type::Body::ENUM_TYPE:
case schema::Type::Body::INTERFACE_TYPE: case schema::Type::Body::INTERFACE_TYPE:
case schema::Type::Body::LIST_TYPE: case schema::Type::Body::LIST_TYPE:
FAIL_REQUIRE("Must use one of the other ListSchema::of() overloads for complex types."); KJ_FAIL_REQUIRE("Must use one of the other ListSchema::of() overloads for complex types.");
break; break;
case schema::Type::Body::OBJECT_TYPE: case schema::Type::Body::OBJECT_TYPE:
FAIL_REQUIRE("List(Object) not supported."); KJ_FAIL_REQUIRE("List(Object) not supported.");
break; break;
} }
...@@ -267,7 +267,7 @@ ListSchema ListSchema::of(schema::Type::Reader elementType, Schema context) { ...@@ -267,7 +267,7 @@ ListSchema ListSchema::of(schema::Type::Reader elementType, Schema context) {
return of(of(body.getListType(), context)); return of(of(body.getListType(), context));
case schema::Type::Body::OBJECT_TYPE: case schema::Type::Body::OBJECT_TYPE:
FAIL_REQUIRE("List(Object) not supported."); KJ_FAIL_REQUIRE("List(Object) not supported.");
return ListSchema(); return ListSchema();
} }
...@@ -276,31 +276,31 @@ ListSchema ListSchema::of(schema::Type::Reader elementType, Schema context) { ...@@ -276,31 +276,31 @@ ListSchema ListSchema::of(schema::Type::Reader elementType, Schema context) {
} }
StructSchema ListSchema::getStructElementType() const { StructSchema ListSchema::getStructElementType() const {
REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::STRUCT_TYPE, KJ_REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::STRUCT_TYPE,
"ListSchema::getStructElementType(): The elements are not structs."); "ListSchema::getStructElementType(): The elements are not structs.");
return elementSchema.asStruct(); return elementSchema.asStruct();
} }
EnumSchema ListSchema::getEnumElementType() const { EnumSchema ListSchema::getEnumElementType() const {
REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::ENUM_TYPE, KJ_REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::ENUM_TYPE,
"ListSchema::getEnumElementType(): The elements are not enums."); "ListSchema::getEnumElementType(): The elements are not enums.");
return elementSchema.asEnum(); return elementSchema.asEnum();
} }
InterfaceSchema ListSchema::getInterfaceElementType() const { InterfaceSchema ListSchema::getInterfaceElementType() const {
REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::INTERFACE_TYPE, KJ_REQUIRE(nestingDepth == 0 && elementType == schema::Type::Body::INTERFACE_TYPE,
"ListSchema::getInterfaceElementType(): The elements are not interfaces."); "ListSchema::getInterfaceElementType(): The elements are not interfaces.");
return elementSchema.asInterface(); return elementSchema.asInterface();
} }
ListSchema ListSchema::getListElementType() const { ListSchema ListSchema::getListElementType() const {
REQUIRE(nestingDepth > 0, KJ_REQUIRE(nestingDepth > 0,
"ListSchema::getListElementType(): The elements are not lists."); "ListSchema::getListElementType(): The elements are not lists.");
return ListSchema(elementType, nestingDepth - 1, elementSchema); return ListSchema(elementType, nestingDepth - 1, elementSchema);
} }
void ListSchema::requireUsableAs(ListSchema expected) { void ListSchema::requireUsableAs(ListSchema expected) {
REQUIRE(elementType == expected.elementType && nestingDepth == expected.nestingDepth, KJ_REQUIRE(elementType == expected.elementType && nestingDepth == expected.nestingDepth,
"This schema is not compatible with the requested native type."); "This schema is not compatible with the requested native type.");
elementSchema.requireUsableAs(expected.elementSchema.raw); elementSchema.requireUsableAs(expected.elementSchema.raw);
} }
......
...@@ -62,7 +62,7 @@ public: ...@@ -62,7 +62,7 @@ public:
} }
size_t read(void* buffer, size_t minBytes, size_t maxBytes) override { size_t read(void* buffer, size_t minBytes, size_t maxBytes) override {
ASSERT(maxBytes <= data.size() - readPos, "Overran end of stream."); KJ_ASSERT(maxBytes <= data.size() - readPos, "Overran end of stream.");
size_t amount = std::min(maxBytes, std::max(minBytes, preferredReadSize)); size_t amount = std::min(maxBytes, std::max(minBytes, preferredReadSize));
memcpy(buffer, data.data() + readPos, amount); memcpy(buffer, data.data() + readPos, amount);
readPos += amount; readPos += amount;
...@@ -70,7 +70,7 @@ public: ...@@ -70,7 +70,7 @@ public:
} }
void skip(size_t bytes) override { void skip(size_t bytes) override {
ASSERT(bytes <= data.size() - readPos, "Overran end of stream."); KJ_ASSERT(bytes <= data.size() - readPos, "Overran end of stream.");
readPos += bytes; readPos += bytes;
} }
......
...@@ -39,8 +39,8 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) { ...@@ -39,8 +39,8 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) {
return 0; return 0;
} }
DREQUIRE(minBytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned."); KJ_DREQUIRE(minBytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned.");
DREQUIRE(maxBytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned."); KJ_DREQUIRE(maxBytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned.");
uint8_t* __restrict__ out = reinterpret_cast<uint8_t*>(dst); uint8_t* __restrict__ out = reinterpret_cast<uint8_t*>(dst);
uint8_t* const outEnd = reinterpret_cast<uint8_t*>(dst) + maxBytes; uint8_t* const outEnd = reinterpret_cast<uint8_t*>(dst) + maxBytes;
...@@ -66,7 +66,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) { ...@@ -66,7 +66,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) {
for (;;) { for (;;) {
uint8_t tag; uint8_t tag;
DASSERT((out - reinterpret_cast<uint8_t*>(dst)) % sizeof(word) == 0, KJ_DASSERT((out - reinterpret_cast<uint8_t*>(dst)) % sizeof(word) == 0,
"Output pointer should always be aligned here."); "Output pointer should always be aligned here.");
if (BUFFER_REMAINING < 10) { if (BUFFER_REMAINING < 10) {
...@@ -122,7 +122,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) { ...@@ -122,7 +122,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) {
} }
if (tag == 0) { if (tag == 0) {
DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here."); KJ_DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here.");
uint runLength = *in++ * sizeof(word); uint runLength = *in++ * sizeof(word);
...@@ -134,7 +134,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) { ...@@ -134,7 +134,7 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) {
out += runLength; out += runLength;
} else if (tag == 0xffu) { } else if (tag == 0xffu) {
DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here."); KJ_DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here.");
uint runLength = *in++ * sizeof(word); uint runLength = *in++ * sizeof(word);
...@@ -177,8 +177,8 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) { ...@@ -177,8 +177,8 @@ size_t PackedInputStream::read(void* dst, size_t minBytes, size_t maxBytes) {
} }
} }
FAIL_ASSERT("Can't get here."); KJ_FAIL_ASSERT("Can't get here.");
return 0; // GCC knows FAIL_ASSERT doesn't return, but Eclipse CDT still warns... return 0; // GCC knows KJ_FAIL_ASSERT doesn't return, but Eclipse CDT still warns...
#undef REFRESH_BUFFER #undef REFRESH_BUFFER
} }
...@@ -190,7 +190,7 @@ void PackedInputStream::skip(size_t bytes) { ...@@ -190,7 +190,7 @@ void PackedInputStream::skip(size_t bytes) {
return; return;
} }
DREQUIRE(bytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned."); KJ_DREQUIRE(bytes % sizeof(word) == 0, "PackedInputStream reads must be word-aligned.");
kj::ArrayPtr<const byte> buffer = inner.getReadBuffer(); kj::ArrayPtr<const byte> buffer = inner.getReadBuffer();
const uint8_t* __restrict__ in = reinterpret_cast<const uint8_t*>(buffer.begin()); const uint8_t* __restrict__ in = reinterpret_cast<const uint8_t*>(buffer.begin());
...@@ -248,7 +248,7 @@ void PackedInputStream::skip(size_t bytes) { ...@@ -248,7 +248,7 @@ void PackedInputStream::skip(size_t bytes) {
} }
if (tag == 0) { if (tag == 0) {
DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here."); KJ_DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here.");
uint runLength = *in++ * sizeof(word); uint runLength = *in++ * sizeof(word);
...@@ -260,7 +260,7 @@ void PackedInputStream::skip(size_t bytes) { ...@@ -260,7 +260,7 @@ void PackedInputStream::skip(size_t bytes) {
bytes -= runLength; bytes -= runLength;
} else if (tag == 0xffu) { } else if (tag == 0xffu) {
DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here."); KJ_DASSERT(BUFFER_REMAINING > 0, "Should always have non-empty buffer here.");
uint runLength = *in++ * sizeof(word); uint runLength = *in++ * sizeof(word);
...@@ -298,7 +298,7 @@ void PackedInputStream::skip(size_t bytes) { ...@@ -298,7 +298,7 @@ void PackedInputStream::skip(size_t bytes) {
} }
} }
FAIL_ASSERT("Can't get here."); KJ_FAIL_ASSERT("Can't get here.");
} }
// ------------------------------------------------------------------- // -------------------------------------------------------------------
......
...@@ -96,7 +96,7 @@ public: ...@@ -96,7 +96,7 @@ public:
} }
size_t read(void* buffer, size_t minBytes, size_t maxBytes) override { size_t read(void* buffer, size_t minBytes, size_t maxBytes) override {
ASSERT(maxBytes <= data.size() - readPos, "Overran end of stream."); KJ_ASSERT(maxBytes <= data.size() - readPos, "Overran end of stream.");
size_t amount = std::min(maxBytes, std::max(minBytes, preferredReadSize)); size_t amount = std::min(maxBytes, std::max(minBytes, preferredReadSize));
memcpy(buffer, data.data() + readPos, amount); memcpy(buffer, data.data() + readPos, amount);
readPos += amount; readPos += amount;
...@@ -104,7 +104,7 @@ public: ...@@ -104,7 +104,7 @@ public:
} }
void skip(size_t bytes) override { void skip(size_t bytes) override {
ASSERT(bytes <= data.size() - readPos, "Overran end of stream."); KJ_ASSERT(bytes <= data.size() - readPos, "Overran end of stream.");
readPos += bytes; readPos += bytes;
} }
......
...@@ -40,7 +40,7 @@ public: ...@@ -40,7 +40,7 @@ public:
// implements snappy::Source --------------------------------------- // implements snappy::Source ---------------------------------------
size_t Available() const override { size_t Available() const override {
FAIL_ASSERT("Snappy doesn't actually call this."); KJ_FAIL_ASSERT("Snappy doesn't actually call this.");
return 0; return 0;
} }
...@@ -121,7 +121,7 @@ void SnappyInputStream::refill() { ...@@ -121,7 +121,7 @@ void SnappyInputStream::refill() {
SnappyOutputStream::SnappyOutputStream( SnappyOutputStream::SnappyOutputStream(
OutputStream& inner, kj::ArrayPtr<byte> buffer, kj::ArrayPtr<byte> compressedBuffer) OutputStream& inner, kj::ArrayPtr<byte> buffer, kj::ArrayPtr<byte> compressedBuffer)
: inner(inner) { : inner(inner) {
DASSERT(SNAPPY_COMPRESSED_BUFFER_SIZE >= snappy::MaxCompressedLength(snappy::kBlockSize), KJ_DASSERT(SNAPPY_COMPRESSED_BUFFER_SIZE >= snappy::MaxCompressedLength(snappy::kBlockSize),
"snappy::MaxCompressedLength() changed?"); "snappy::MaxCompressedLength() changed?");
if (buffer.size() < SNAPPY_BUFFER_SIZE) { if (buffer.size() < SNAPPY_BUFFER_SIZE) {
...@@ -159,7 +159,7 @@ void SnappyOutputStream::flush() { ...@@ -159,7 +159,7 @@ void SnappyOutputStream::flush() {
snappy::UncheckedByteArraySink sink(reinterpret_cast<char*>(compressedBuffer.begin())); snappy::UncheckedByteArraySink sink(reinterpret_cast<char*>(compressedBuffer.begin()));
size_t n = snappy::Compress(&source, &sink); size_t n = snappy::Compress(&source, &sink);
ASSERT(n <= compressedBuffer.size(), KJ_ASSERT(n <= compressedBuffer.size(),
"Critical security bug: Snappy compression overran its output buffer."); "Critical security bug: Snappy compression overran its output buffer.");
inner.write(compressedBuffer.begin(), n); inner.write(compressedBuffer.begin(), n);
......
...@@ -104,7 +104,7 @@ public: ...@@ -104,7 +104,7 @@ public:
~TestInputStream() {} ~TestInputStream() {}
size_t read(void* buffer, size_t minBytes, size_t maxBytes) override { size_t read(void* buffer, size_t minBytes, size_t maxBytes) override {
ASSERT(maxBytes <= size_t(end - pos), "Overran end of stream."); KJ_ASSERT(maxBytes <= size_t(end - pos), "Overran end of stream.");
size_t amount = lazy ? minBytes : maxBytes; size_t amount = lazy ? minBytes : maxBytes;
memcpy(buffer, pos, amount); memcpy(buffer, pos, amount);
pos += amount; pos += amount;
......
...@@ -88,7 +88,7 @@ kj::ArrayPtr<const word> FlatArrayMessageReader::getSegment(uint id) { ...@@ -88,7 +88,7 @@ kj::ArrayPtr<const word> FlatArrayMessageReader::getSegment(uint id) {
} }
kj::Array<word> messageToFlatArray(kj::ArrayPtr<const kj::ArrayPtr<const word>> segments) { kj::Array<word> messageToFlatArray(kj::ArrayPtr<const kj::ArrayPtr<const word>> segments) {
REQUIRE(segments.size() > 0, "Tried to serialize uninitialized message."); KJ_REQUIRE(segments.size() > 0, "Tried to serialize uninitialized message.");
size_t totalSize = segments.size() / 2 + 1; size_t totalSize = segments.size() / 2 + 1;
...@@ -122,7 +122,7 @@ kj::Array<word> messageToFlatArray(kj::ArrayPtr<const kj::ArrayPtr<const word>> ...@@ -122,7 +122,7 @@ kj::Array<word> messageToFlatArray(kj::ArrayPtr<const kj::ArrayPtr<const word>>
dst += segment.size(); dst += segment.size();
} }
DASSERT(dst == result.end(), "Buffer overrun/underrun bug in code above."); KJ_DASSERT(dst == result.end(), "Buffer overrun/underrun bug in code above.");
return kj::mv(result); return kj::mv(result);
} }
...@@ -237,7 +237,7 @@ kj::ArrayPtr<const word> InputStreamMessageReader::getSegment(uint id) { ...@@ -237,7 +237,7 @@ kj::ArrayPtr<const word> InputStreamMessageReader::getSegment(uint id) {
// ------------------------------------------------------------------- // -------------------------------------------------------------------
void writeMessage(kj::OutputStream& output, kj::ArrayPtr<const kj::ArrayPtr<const word>> segments) { void writeMessage(kj::OutputStream& output, kj::ArrayPtr<const kj::ArrayPtr<const word>> segments) {
REQUIRE(segments.size() > 0, "Tried to serialize uninitialized message."); KJ_REQUIRE(segments.size() > 0, "Tried to serialize uninitialized message.");
internal::WireValue<uint32_t> table[(segments.size() + 2) & ~size_t(1)]; internal::WireValue<uint32_t> table[(segments.size() + 2) & ~size_t(1)];
......
...@@ -33,11 +33,11 @@ namespace { ...@@ -33,11 +33,11 @@ namespace {
struct TestObject { struct TestObject {
TestObject() { TestObject() {
index = count; index = count;
ASSERT(index != throwAt); KJ_ASSERT(index != throwAt);
++count; ++count;
} }
TestObject(const TestObject& other) { TestObject(const TestObject& other) {
ASSERT(other.index != throwAt); KJ_ASSERT(other.index != throwAt);
index = -1; index = -1;
copiedCount++; copiedCount++;
} }
...@@ -47,7 +47,7 @@ struct TestObject { ...@@ -47,7 +47,7 @@ struct TestObject {
} else { } else {
--count; --count;
EXPECT_EQ(index, count); EXPECT_EQ(index, count);
ASSERT(count != throwAt); KJ_ASSERT(count != throwAt);
} }
} }
......
...@@ -125,8 +125,8 @@ void inlineRequireFailure( ...@@ -125,8 +125,8 @@ void inlineRequireFailure(
#define KJ_IREQUIRE(condition, ...) \ #define KJ_IREQUIRE(condition, ...) \
if (KJ_EXPECT_TRUE(condition)); else ::kj::internal::inlineRequireFailure( \ if (KJ_EXPECT_TRUE(condition)); else ::kj::internal::inlineRequireFailure( \
__FILE__, __LINE__, #condition, #__VA_ARGS__, ##__VA_ARGS__) __FILE__, __LINE__, #condition, #__VA_ARGS__, ##__VA_ARGS__)
// Version of REQUIRE() which is safe to use in headers that are #included by users. Used to check // Version of KJ_REQUIRE() which is safe to use in headers that are #included by users. Used to
// preconditions inside inline methods. KJ_INLINE_DPRECOND is particularly useful in that // check preconditions inside inline methods. KJ_INLINE_DPRECOND is particularly useful in that
// it will be enabled depending on whether the application is compiled in debug mode rather than // it will be enabled depending on whether the application is compiled in debug mode rather than
// whether libkj is. // whether libkj is.
#endif #endif
......
...@@ -217,7 +217,7 @@ void ArrayOutputStream::write(const void* src, size_t size) { ...@@ -217,7 +217,7 @@ void ArrayOutputStream::write(const void* src, size_t size) {
// Oh goody, the caller wrote directly into our buffer. // Oh goody, the caller wrote directly into our buffer.
fillPos += size; fillPos += size;
} else { } else {
REQUIRE(size <= (size_t)(array.end() - fillPos), KJ_REQUIRE(size <= (size_t)(array.end() - fillPos),
"ArrayOutputStream's backing array was not large enough for the data written."); "ArrayOutputStream's backing array was not large enough for the data written.");
memcpy(fillPos, src, size); memcpy(fillPos, src, size);
fillPos += size; fillPos += size;
...@@ -240,7 +240,7 @@ size_t FdInputStream::read(void* buffer, size_t minBytes, size_t maxBytes) { ...@@ -240,7 +240,7 @@ size_t FdInputStream::read(void* buffer, size_t minBytes, size_t maxBytes) {
byte* max = pos + maxBytes; byte* max = pos + maxBytes;
while (pos < min) { while (pos < min) {
ssize_t n = SYSCALL(::read(fd, pos, max - pos), fd); ssize_t n = KJ_SYSCALL(::read(fd, pos, max - pos), fd);
VALIDATE_INPUT(n > 0, "Premature EOF") { VALIDATE_INPUT(n > 0, "Premature EOF") {
return minBytes; return minBytes;
} }
...@@ -256,8 +256,8 @@ void FdOutputStream::write(const void* buffer, size_t size) { ...@@ -256,8 +256,8 @@ void FdOutputStream::write(const void* buffer, size_t size) {
const char* pos = reinterpret_cast<const char*>(buffer); const char* pos = reinterpret_cast<const char*>(buffer);
while (size > 0) { while (size > 0) {
ssize_t n = SYSCALL(::write(fd, pos, size), fd); ssize_t n = KJ_SYSCALL(::write(fd, pos, size), fd);
ASSERT(n > 0, "write() returned zero."); KJ_ASSERT(n > 0, "write() returned zero.");
pos += n; pos += n;
size -= n; size -= n;
} }
...@@ -280,8 +280,8 @@ void FdOutputStream::write(ArrayPtr<const ArrayPtr<const byte>> pieces) { ...@@ -280,8 +280,8 @@ void FdOutputStream::write(ArrayPtr<const ArrayPtr<const byte>> pieces) {
} }
while (current < iov.end()) { while (current < iov.end()) {
ssize_t n = SYSCALL(::writev(fd, current, iov.end() - current), fd); ssize_t n = KJ_SYSCALL(::writev(fd, current, iov.end() - current), fd);
ASSERT(n > 0, "writev() returned zero."); KJ_ASSERT(n > 0, "writev() returned zero.");
while (static_cast<size_t>(n) >= current->iov_len) { while (static_cast<size_t>(n) >= current->iov_len) {
n -= current->iov_len; n -= current->iov_len;
......
...@@ -88,7 +88,7 @@ TEST(Logging, Log) { ...@@ -88,7 +88,7 @@ TEST(Logging, Log) {
MockExceptionCallback::ScopedRegistration reg(mockCallback); MockExceptionCallback::ScopedRegistration reg(mockCallback);
int line; int line;
LOG(WARNING, "Hello world!"); line = __LINE__; KJ_LOG(WARNING, "Hello world!"); line = __LINE__;
EXPECT_EQ("log message: warning: " + fileLine(__FILE__, line) + ": Hello world!\n", EXPECT_EQ("log message: warning: " + fileLine(__FILE__, line) + ": Hello world!\n",
mockCallback.text); mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
...@@ -96,13 +96,13 @@ TEST(Logging, Log) { ...@@ -96,13 +96,13 @@ TEST(Logging, Log) {
int i = 123; int i = 123;
const char* str = "foo"; const char* str = "foo";
LOG(ERROR, i, str); line = __LINE__; KJ_LOG(ERROR, i, str); line = __LINE__;
EXPECT_EQ("log message: error: " + fileLine(__FILE__, line) + ": i = 123; str = foo\n", EXPECT_EQ("log message: error: " + fileLine(__FILE__, line) + ": i = 123; str = foo\n",
mockCallback.text); mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
ASSERT(1 == 1); KJ_ASSERT(1 == 1);
EXPECT_THROW(ASSERT(1 == 2), MockException); line = __LINE__; EXPECT_THROW(KJ_ASSERT(1 == 2), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: expected " EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: expected "
"1 == 2\n", mockCallback.text); "1 == 2\n", mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
...@@ -118,17 +118,17 @@ TEST(Logging, Log) { ...@@ -118,17 +118,17 @@ TEST(Logging, Log) {
EXPECT_TRUE(recovered); EXPECT_TRUE(recovered);
mockCallback.text.clear(); mockCallback.text.clear();
EXPECT_THROW(ASSERT(1 == 2, i, "hi", str), MockException); line = __LINE__; EXPECT_THROW(KJ_ASSERT(1 == 2, i, "hi", str), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: expected " EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: expected "
"1 == 2; i = 123; hi; str = foo\n", mockCallback.text); "1 == 2; i = 123; hi; str = foo\n", mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
EXPECT_THROW(REQUIRE(1 == 2, i, "hi", str), MockException); line = __LINE__; EXPECT_THROW(KJ_REQUIRE(1 == 2, i, "hi", str), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": precondition not met: expected " EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": precondition not met: expected "
"1 == 2; i = 123; hi; str = foo\n", mockCallback.text); "1 == 2; i = 123; hi; str = foo\n", mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
EXPECT_THROW(ASSERT(false, "foo"), MockException); line = __LINE__; EXPECT_THROW(KJ_ASSERT(false, "foo"), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: foo\n", EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": bug in code: foo\n",
mockCallback.text); mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
...@@ -142,9 +142,9 @@ TEST(Logging, Syscall) { ...@@ -142,9 +142,9 @@ TEST(Logging, Syscall) {
int i = 123; int i = 123;
const char* str = "foo"; const char* str = "foo";
int fd = SYSCALL(dup(STDIN_FILENO)); int fd = KJ_SYSCALL(dup(STDIN_FILENO));
SYSCALL(close(fd)); KJ_SYSCALL(close(fd));
EXPECT_THROW(SYSCALL(close(fd), i, "bar", str), MockException); line = __LINE__; EXPECT_THROW(KJ_SYSCALL(close(fd), i, "bar", str), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": error from OS: close(fd): " EXPECT_EQ("fatal exception: " + fileLine(__FILE__, line) + ": error from OS: close(fd): "
+ strerror(EBADF) + "; i = 123; bar; str = foo\n", mockCallback.text); + strerror(EBADF) + "; i = 123; bar; str = foo\n", mockCallback.text);
mockCallback.text.clear(); mockCallback.text.clear();
...@@ -163,8 +163,8 @@ TEST(Logging, Context) { ...@@ -163,8 +163,8 @@ TEST(Logging, Context) {
MockExceptionCallback::ScopedRegistration reg(mockCallback); MockExceptionCallback::ScopedRegistration reg(mockCallback);
{ {
CONTEXT("foo"); int cline = __LINE__; KJ_CONTEXT("foo"); int cline = __LINE__;
EXPECT_THROW(FAIL_ASSERT("bar"), MockException); int line = __LINE__; EXPECT_THROW(KJ_FAIL_ASSERT("bar"), MockException); int line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n" EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n"
+ fileLine(__FILE__, line) + ": bug in code: bar\n", + fileLine(__FILE__, line) + ": bug in code: bar\n",
...@@ -174,8 +174,8 @@ TEST(Logging, Context) { ...@@ -174,8 +174,8 @@ TEST(Logging, Context) {
{ {
int i = 123; int i = 123;
const char* str = "qux"; const char* str = "qux";
CONTEXT("baz", i, "corge", str); int cline2 = __LINE__; KJ_CONTEXT("baz", i, "corge", str); int cline2 = __LINE__;
EXPECT_THROW(FAIL_ASSERT("bar"), MockException); line = __LINE__; EXPECT_THROW(KJ_FAIL_ASSERT("bar"), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n" EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n"
+ fileLine(__FILE__, cline2) + ": context: baz; i = 123; corge; str = qux\n" + fileLine(__FILE__, cline2) + ": context: baz; i = 123; corge; str = qux\n"
...@@ -185,8 +185,8 @@ TEST(Logging, Context) { ...@@ -185,8 +185,8 @@ TEST(Logging, Context) {
} }
{ {
CONTEXT("grault"); int cline2 = __LINE__; KJ_CONTEXT("grault"); int cline2 = __LINE__;
EXPECT_THROW(FAIL_ASSERT("bar"), MockException); line = __LINE__; EXPECT_THROW(KJ_FAIL_ASSERT("bar"), MockException); line = __LINE__;
EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n" EXPECT_EQ("fatal exception: " + fileLine(__FILE__, cline) + ": context: foo\n"
+ fileLine(__FILE__, cline2) + ": context: grault\n" + fileLine(__FILE__, cline2) + ": context: grault\n"
......
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
// This file declares convenient macros for debug logging and error handling. The macros make // This file declares convenient macros for debug logging and error handling. The macros make
// it excessively easy to extract useful context information from code. Example: // it excessively easy to extract useful context information from code. Example:
// //
// ASSERT(a == b, a, b, "a and b must be the same."); // KJ_ASSERT(a == b, a, b, "a and b must be the same.");
// //
// On failure, this will throw an exception whose description looks like: // On failure, this will throw an exception whose description looks like:
// //
...@@ -34,35 +34,35 @@ ...@@ -34,35 +34,35 @@
// //
// The macros available are: // The macros available are:
// //
// * `LOG(severity, ...)`: Just writes a log message, to stderr by default (but you can intercept // * `KJ_LOG(severity, ...)`: Just writes a log message, to stderr by default (but you can
// messages by implementing an ExceptionCallback). `severity` is `INFO`, `WARNING`, `ERROR`, or // intercept messages by implementing an ExceptionCallback). `severity` is `INFO`, `WARNING`,
// `FATAL`. If the severity is not higher than the global logging threshold, nothing will be // `ERROR`, or `FATAL`. If the severity is not higher than the global logging threshold, nothing
// written and in fact the log message won't even be evaluated. // will be written and in fact the log message won't even be evaluated.
// //
// * `ASSERT(condition, ...)`: Throws an exception if `condition` is false, or aborts if exceptions // * `KJ_ASSERT(condition, ...)`: Throws an exception if `condition` is false, or aborts if
// are disabled. This macro should be used to check for bugs in the surrounding code and its // exceptions are disabled. This macro should be used to check for bugs in the surrounding code
// dependencies, but NOT to check for invalid input. // and its dependencies, but NOT to check for invalid input.
// //
// * `REQUIRE(condition, ...)`: Like `ASSERT` but used to check preconditions -- e.g. to validate // * `KJ_REQUIRE(condition, ...)`: Like `KJ_ASSERT` but used to check preconditions -- e.g. to
// parameters passed from a caller. A failure indicates that the caller is buggy. // validate parameters passed from a caller. A failure indicates that the caller is buggy.
// //
// * `RECOVERABLE_ASSERT(condition, ...) { ... }`: Like `ASSERT` except that if exceptions are // * `RECOVERABLE_ASSERT(condition, ...) { ... }`: Like `KJ_ASSERT` except that if exceptions are
// disabled, instead of aborting, the following code block will be executed. This block should // disabled, instead of aborting, the following code block will be executed. This block should
// do whatever it can to fill in dummy values so that the code can continue executing, even if // do whatever it can to fill in dummy values so that the code can continue executing, even if
// this means the eventual output will be garbage. // this means the eventual output will be garbage.
// //
// * `RECOVERABLE_REQUIRE(condition, ...) { ... }`: Like `RECOVERABLE_ASSERT` and `REQUIRE`. // * `RECOVERABLE_REQUIRE(condition, ...) { ... }`: Like `RECOVERABLE_ASSERT` and `KJ_REQUIRE`.
// //
// * `VALIDATE_INPUT(condition, ...) { ... }`: Like `RECOVERABLE_PRECOND` but used to validate // * `VALIDATE_INPUT(condition, ...) { ... }`: Like `RECOVERABLE_PRECOND` but used to validate
// input that may have come from the user or some other untrusted source. Recoverability is // input that may have come from the user or some other untrusted source. Recoverability is
// required in this case. // required in this case.
// //
// * `SYSCALL(code, ...)`: Executes `code` assuming it makes a system call. A negative return // * `KJ_SYSCALL(code, ...)`: Executes `code` assuming it makes a system call. A negative return
// value is considered an error. EINTR is handled by retrying. Other errors are handled by // value is considered an error. EINTR is handled by retrying. Other errors are handled by
// throwing an exception. The macro also returns the call's result. For example, the following // throwing an exception. The macro also returns the call's result. For example, the following
// calls `open()` and includes the file name in any error message: // calls `open()` and includes the file name in any error message:
// //
// int fd = SYSCALL(open(filename, O_RDONLY), filename); // int fd = KJ_SYSCALL(open(filename, O_RDONLY), filename);
// //
// * `RECOVERABLE_SYSCALL(code, ...) { ... }`: Like `RECOVERABLE_ASSERT` and `SYSCALL`. Note that // * `RECOVERABLE_SYSCALL(code, ...) { ... }`: Like `RECOVERABLE_ASSERT` and `SYSCALL`. Note that
// unfortunately this macro cannot return a value since it implements control flow, but you can // unfortunately this macro cannot return a value since it implements control flow, but you can
...@@ -74,13 +74,13 @@ ...@@ -74,13 +74,13 @@
// fd = SYSCALL(open("/dev/null", O_RDONLY)); // fd = SYSCALL(open("/dev/null", O_RDONLY));
// } // }
// //
// * `CONTEXT(...)`: Notes additional contextual information relevant to any exceptions thrown // * `KJ_CONTEXT(...)`: Notes additional contextual information relevant to any exceptions thrown
// from within the current scope. That is, until control exits the block in which CONTEXT() // from within the current scope. That is, until control exits the block in which KJ_CONTEXT()
// is used, if any exception is generated, it will contain the given information in its context // is used, if any exception is generated, it will contain the given information in its context
// chain. This is helpful because it can otherwise be very difficult to come up with error // chain. This is helpful because it can otherwise be very difficult to come up with error
// messages that make sense within low-level helper code. Note that the parameters to CONTEXT() // messages that make sense within low-level helper code. Note that the parameters to
// are only evaluated if an exception is thrown. This means that any variables used must remain // KJ_CONTEXT() are only evaluated if an exception is thrown. This implies that any variables
// valid until the end of the scope. // used must remain valid until the end of the scope.
// //
// Notes: // Notes:
// * Do not write expressions with side-effects in the message content part of the macro, as the // * Do not write expressions with side-effects in the message content part of the macro, as the
...@@ -210,12 +210,12 @@ private: ...@@ -210,12 +210,12 @@ private:
ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity); ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity);
#define LOG(severity, ...) \ #define KJ_LOG(severity, ...) \
if (!::kj::Log::shouldLog(::kj::Log::Severity::severity)) {} else \ if (!::kj::Log::shouldLog(::kj::Log::Severity::severity)) {} else \
::kj::Log::log(__FILE__, __LINE__, ::kj::Log::Severity::severity, \ ::kj::Log::log(__FILE__, __LINE__, ::kj::Log::Severity::severity, \
#__VA_ARGS__, __VA_ARGS__) #__VA_ARGS__, __VA_ARGS__)
#define FAULT(nature, cond, ...) \ #define KJ_FAULT(nature, cond, ...) \
if (KJ_EXPECT_TRUE(cond)) {} else \ if (KJ_EXPECT_TRUE(cond)) {} else \
::kj::Log::fatalFault(__FILE__, __LINE__, \ ::kj::Log::fatalFault(__FILE__, __LINE__, \
::kj::Exception::Nature::nature, #cond, #__VA_ARGS__, ##__VA_ARGS__) ::kj::Exception::Nature::nature, #cond, #__VA_ARGS__, ##__VA_ARGS__)
...@@ -226,19 +226,19 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity); ...@@ -226,19 +226,19 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity);
::kj::Exception::Nature::nature, #cond, #__VA_ARGS__, ##__VA_ARGS__), false) {} \ ::kj::Exception::Nature::nature, #cond, #__VA_ARGS__, ##__VA_ARGS__), false) {} \
else else
#define ASSERT(...) FAULT(LOCAL_BUG, __VA_ARGS__) #define KJ_ASSERT(...) KJ_FAULT(LOCAL_BUG, __VA_ARGS__)
#define RECOVERABLE_ASSERT(...) RECOVERABLE_FAULT(LOCAL_BUG, __VA_ARGS__) #define RECOVERABLE_ASSERT(...) RECOVERABLE_FAULT(LOCAL_BUG, __VA_ARGS__)
#define REQUIRE(...) FAULT(PRECONDITION, __VA_ARGS__) #define KJ_REQUIRE(...) KJ_FAULT(PRECONDITION, __VA_ARGS__)
#define RECOVERABLE_REQUIRE(...) RECOVERABLE_FAULT(PRECONDITION, __VA_ARGS__) #define RECOVERABLE_REQUIRE(...) RECOVERABLE_FAULT(PRECONDITION, __VA_ARGS__)
#define VALIDATE_INPUT(...) RECOVERABLE_FAULT(INPUT, __VA_ARGS__) #define VALIDATE_INPUT(...) RECOVERABLE_FAULT(INPUT, __VA_ARGS__)
#define FAIL_ASSERT(...) ASSERT(false, ##__VA_ARGS__) #define KJ_FAIL_ASSERT(...) KJ_ASSERT(false, ##__VA_ARGS__)
#define FAIL_RECOVERABLE_ASSERT(...) RECOVERABLE_ASSERT(false, ##__VA_ARGS__) #define FAIL_RECOVERABLE_ASSERT(...) RECOVERABLE_ASSERT(false, ##__VA_ARGS__)
#define FAIL_REQUIRE(...) REQUIRE(false, ##__VA_ARGS__) #define KJ_FAIL_REQUIRE(...) KJ_REQUIRE(false, ##__VA_ARGS__)
#define FAIL_RECOVERABLE_REQUIRE(...) RECOVERABLE_REQUIRE(false, ##__VA_ARGS__) #define FAIL_RECOVERABLE_REQUIRE(...) RECOVERABLE_REQUIRE(false, ##__VA_ARGS__)
#define FAIL_VALIDATE_INPUT(...) VALIDATE_INPUT(false, ##__VA_ARGS__) #define FAIL_VALIDATE_INPUT(...) VALIDATE_INPUT(false, ##__VA_ARGS__)
#define SYSCALL(call, ...) \ #define KJ_SYSCALL(call, ...) \
::kj::Log::syscall( \ ::kj::Log::syscall( \
[&](){return (call);}, __FILE__, __LINE__, #call, #__VA_ARGS__, ##__VA_ARGS__) [&](){return (call);}, __FILE__, __LINE__, #call, #__VA_ARGS__, ##__VA_ARGS__)
...@@ -263,7 +263,7 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity); ...@@ -263,7 +263,7 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity);
_errorNumber, __FILE__, __LINE__, #code, #__VA_ARGS__, ##__VA_ARGS__); \ _errorNumber, __FILE__, __LINE__, #code, #__VA_ARGS__, ##__VA_ARGS__); \
} while (false) } while (false)
#define CONTEXT(...) \ #define KJ_CONTEXT(...) \
auto _kjContextFunc = [&](::kj::Exception& exception) { \ auto _kjContextFunc = [&](::kj::Exception& exception) { \
return ::kj::Log::addContextTo(exception, \ return ::kj::Log::addContextTo(exception, \
__FILE__, __LINE__, #__VA_ARGS__, ##__VA_ARGS__); \ __FILE__, __LINE__, #__VA_ARGS__, ##__VA_ARGS__); \
...@@ -271,17 +271,17 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity); ...@@ -271,17 +271,17 @@ ArrayPtr<const char> KJ_STRINGIFY(Log::Severity severity);
::kj::Log::ContextImpl<decltype(_kjContextFunc)> _kjContext(_kjContextFunc) ::kj::Log::ContextImpl<decltype(_kjContextFunc)> _kjContext(_kjContextFunc)
#ifdef NDEBUG #ifdef NDEBUG
#define DLOG(...) do {} while (false) #define KJ_DLOG(...) do {} while (false)
#define DASSERT(...) do {} while (false) #define KJ_DASSERT(...) do {} while (false)
#define RECOVERABLE_DASSERT(...) do {} while (false) #define KJ_RECOVERABLE_DASSERT(...) do {} while (false)
#define DREQUIRE(...) do {} while (false) #define KJ_DREQUIRE(...) do {} while (false)
#define RECOVERABLE_DREQUIRE(...) do {} while (false) #define KJ_RECOVERABLE_DREQUIRE(...) do {} while (false)
#else #else
#define DLOG LOG #define KJ_DLOG LOG
#define DASSERT ASSERT #define KJ_DASSERT KJ_ASSERT
#define RECOVERABLE_DASSERT RECOVERABLE_ASSERT #define KJ_RECOVERABLE_DASSERT RECOVERABLE_ASSERT
#define DREQUIRE REQUIRE #define KJ_DREQUIRE KJ_REQUIRE
#define RECOVERABLE_DREQUIRE RECOVERABLE_REQUIRE #define KJ_RECOVERABLE_DREQUIRE RECOVERABLE_REQUIRE
#endif #endif
template <typename... Params> template <typename... Params>
......
...@@ -232,7 +232,7 @@ char* DoubleToBuffer(double value, char* buffer) { ...@@ -232,7 +232,7 @@ char* DoubleToBuffer(double value, char* buffer) {
// The snprintf should never overflow because the buffer is significantly // The snprintf should never overflow because the buffer is significantly
// larger than the precision we asked for. // larger than the precision we asked for.
DASSERT(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize); KJ_DASSERT(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize);
// We need to make parsed_value volatile in order to force the compiler to // We need to make parsed_value volatile in order to force the compiler to
// write it out to the stack. Otherwise, it may keep the value in a // write it out to the stack. Otherwise, it may keep the value in a
...@@ -246,7 +246,7 @@ char* DoubleToBuffer(double value, char* buffer) { ...@@ -246,7 +246,7 @@ char* DoubleToBuffer(double value, char* buffer) {
snprintf(buffer, kDoubleToBufferSize, "%.*g", DBL_DIG+2, value); snprintf(buffer, kDoubleToBufferSize, "%.*g", DBL_DIG+2, value);
// Should never overflow; see above. // Should never overflow; see above.
DASSERT(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize); KJ_DASSERT(snprintf_result > 0 && snprintf_result < kDoubleToBufferSize);
} }
DelocalizeRadix(buffer); DelocalizeRadix(buffer);
...@@ -288,7 +288,7 @@ char* FloatToBuffer(float value, char* buffer) { ...@@ -288,7 +288,7 @@ char* FloatToBuffer(float value, char* buffer) {
// The snprintf should never overflow because the buffer is significantly // The snprintf should never overflow because the buffer is significantly
// larger than the precision we asked for. // larger than the precision we asked for.
DASSERT(snprintf_result > 0 && snprintf_result < kFloatToBufferSize); KJ_DASSERT(snprintf_result > 0 && snprintf_result < kFloatToBufferSize);
float parsed_value; float parsed_value;
if (!safe_strtof(buffer, &parsed_value) || parsed_value != value) { if (!safe_strtof(buffer, &parsed_value) || parsed_value != value) {
...@@ -296,7 +296,7 @@ char* FloatToBuffer(float value, char* buffer) { ...@@ -296,7 +296,7 @@ char* FloatToBuffer(float value, char* buffer) {
snprintf(buffer, kFloatToBufferSize, "%.*g", FLT_DIG+2, value); snprintf(buffer, kFloatToBufferSize, "%.*g", FLT_DIG+2, value);
// Should never overflow; see above. // Should never overflow; see above.
DASSERT(snprintf_result > 0 && snprintf_result < kFloatToBufferSize); KJ_DASSERT(snprintf_result > 0 && snprintf_result < kFloatToBufferSize);
} }
DelocalizeRadix(buffer); DelocalizeRadix(buffer);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment