Commit c0ec1d2c authored by Kenton Varda's avatar Kenton Varda Committed by GitHub

Merge pull request #449 from dwrensha/canonical-list-padding

canonicalization: make sure that pad bits of primitive lists are zeroed
parents 4b03d3d3 09b28b97
......@@ -332,6 +332,61 @@ KJ_TEST("isCanonical requires truncation of 0-valued struct fields in all list m
KJ_ASSERT(!nonTruncated.isCanonical());
}
KJ_TEST("primitive list with nonzero padding") {
AlignedData<3> segment = {{
// Struct, one pointer field.
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
// List of three byte-sized elements.
0x01, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00,
// Fourth byte is non-zero!
0x01, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00,
}};
kj::ArrayPtr<const word> segments[1] = {kj::arrayPtr(segment.words, 3)};
SegmentArrayMessageReader message(kj::arrayPtr(segments, 1));
KJ_ASSERT(!message.isCanonical());
auto canonicalWords = canonicalize(message.getRoot<test::TestAnyPointer>());
AlignedData<3> canonicalSegment = {{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00,
0x01, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
ASSERT_EQ(canonicalWords.asBytes(), kj::arrayPtr(canonicalSegment.bytes, 3 * 8));
}
KJ_TEST("bit list with nonzero padding") {
AlignedData<3> segment = {{
// Struct, one pointer field.
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
// List of eleven bit-sized elements.
0x01, 0x00, 0x00, 0x00, 0x59, 0x00, 0x00, 0x00,
// Twelfth bit is non-zero!
0xee, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
kj::ArrayPtr<const word> segments[1] = {kj::arrayPtr(segment.words, 3)};
SegmentArrayMessageReader message(kj::arrayPtr(segments, 1));
KJ_ASSERT(!message.isCanonical());
auto canonicalWords = canonicalize(message.getRoot<test::TestAnyPointer>());
AlignedData<3> canonicalSegment = {{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00, 0x59, 0x00, 0x00, 0x00,
0xee, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
ASSERT_EQ(canonicalWords.asBytes(), kj::arrayPtr(canonicalSegment.bytes, 3 * 8));
}
} // namespace
} // namespace _ (private)
} // namespace capnp
......@@ -1811,7 +1811,19 @@ struct WireHelpers {
} else {
// List of data.
ref->listRef.set(value.elementSize, value.elementCount);
copyMemory(ptr, reinterpret_cast<const word*>(value.ptr), totalSize);
auto wholeByteSize =
assertMax<kj::maxValueForBits<SEGMENT_WORD_COUNT_BITS + 3>() - 1>(
upgradeBound<uint64_t>(value.elementCount) * value.step / BITS_PER_BYTE,
[]() { KJ_FAIL_ASSERT("encountered impossibly long data ListReader"); });
copyMemory(reinterpret_cast<byte*>(ptr), value.ptr, wholeByteSize);
auto leftoverBits =
(upgradeBound<uint64_t>(value.elementCount) * value.step) % (BYTES * BITS_PER_BYTE);
if (leftoverBits > ZERO * BITS) {
// We need to copy a partial byte.
uint8_t mask = (1 << unboundAs<uint8_t>(leftoverBits / BITS)) - 1;
*((reinterpret_cast<byte*>(ptr)) + wholeByteSize) = mask & *(value.ptr + wholeByteSize);
}
}
return { segment, ptr };
......@@ -3150,7 +3162,28 @@ bool ListReader::isCanonical(const word **readHead, const WirePointer *ref) {
auto bitSize = upgradeBound<uint64_t>(this->elementCount) *
dataBitsPerElement(this->elementSize);
*readHead += WireHelpers::roundBitsUpToWords(bitSize);
auto truncatedByteSize = bitSize / BITS_PER_BYTE;
auto byteReadHead = reinterpret_cast<const uint8_t*>(*readHead) + truncatedByteSize;
auto readHeadEnd = *readHead + WireHelpers::roundBitsUpToWords(bitSize);
auto leftoverBits = bitSize % (BYTES * BITS_PER_BYTE);
if (leftoverBits > ZERO * BITS) {
auto mask = ~((1 << unboundAs<uint8_t>(leftoverBits / BITS)) - 1);
if (mask & *byteReadHead) {
return false;
}
byteReadHead += 1;
}
while (byteReadHead != reinterpret_cast<const uint8_t*>(readHeadEnd)) {
if (*byteReadHead != 0) {
return false;
}
byteReadHead += 1;
}
*readHead = readHeadEnd;
return true;
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment