Commit fbb5f0f1 authored by David Renshaw's avatar David Renshaw

canonicalization: make sure that pad bits of primitive lists are zeroed

parent 83b8dccf
......@@ -332,6 +332,61 @@ KJ_TEST("isCanonical requires truncation of 0-valued struct fields in all list m
KJ_ASSERT(!nonTruncated.isCanonical());
}
KJ_TEST("primitive list with nonzero padding") {
AlignedData<3> segment = {{
// Struct, one pointer field.
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
// List of three byte-sized elements.
0x01, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00,
// Fourth byte is non-zero!
0x01, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00,
}};
kj::ArrayPtr<const word> segments[1] = {kj::arrayPtr(segment.words, 3)};
SegmentArrayMessageReader message(kj::arrayPtr(segments, 1));
KJ_ASSERT(!message.isCanonical());
auto canonicalWords = canonicalize(message.getRoot<test::TestAnyPointer>());
AlignedData<3> canonicalSegment = {{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00, 0x1a, 0x00, 0x00, 0x00,
0x01, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
ASSERT_EQ(canonicalWords.asBytes(), kj::arrayPtr(canonicalSegment.bytes, 3 * 8));
}
KJ_TEST("bit list with nonzero padding") {
AlignedData<3> segment = {{
// Struct, one pointer field.
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
// List of eleven bit-sized elements.
0x01, 0x00, 0x00, 0x00, 0x59, 0x00, 0x00, 0x00,
// Twelfth bit is non-zero!
0xee, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
kj::ArrayPtr<const word> segments[1] = {kj::arrayPtr(segment.words, 3)};
SegmentArrayMessageReader message(kj::arrayPtr(segments, 1));
KJ_ASSERT(!message.isCanonical());
auto canonicalWords = canonicalize(message.getRoot<test::TestAnyPointer>());
AlignedData<3> canonicalSegment = {{
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
0x01, 0x00, 0x00, 0x00, 0x59, 0x00, 0x00, 0x00,
0xee, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
}};
ASSERT_EQ(canonicalWords.asBytes(), kj::arrayPtr(canonicalSegment.bytes, 3 * 8));
}
} // namespace
} // namespace _ (private)
} // namespace capnp
......@@ -1811,7 +1811,15 @@ struct WireHelpers {
} else {
// List of data.
ref->listRef.set(value.elementSize, value.elementCount);
copyMemory(ptr, reinterpret_cast<const word*>(value.ptr), totalSize);
auto wholeByteSize = value.elementCount * value.step / BITS_PER_BYTE;
copyMemory(reinterpret_cast<byte*>(ptr), value.ptr, wholeByteSize);
auto leftoverBits = value.elementCount * value.step % BITS_PER_BYTE;
if (leftoverBits > 0) {
// We need to copy a partial byte.
uint8_t mask = (1 << leftoverBits) - 1;
(reinterpret_cast<byte*>(ptr))[wholeByteSize] = mask & value.ptr[wholeByteSize];
}
}
return { segment, ptr };
......@@ -3150,7 +3158,27 @@ bool ListReader::isCanonical(const word **readHead, const WirePointer *ref) {
auto bitSize = upgradeBound<uint64_t>(this->elementCount) *
dataBitsPerElement(this->elementSize);
*readHead += WireHelpers::roundBitsUpToWords(bitSize);
auto truncatedByteSize = bitSize / BITS_PER_BYTE;
auto byteReadHead = reinterpret_cast<const uint8_t*>(*readHead) + truncatedByteSize;
auto readHeadEnd = *readHead + WireHelpers::roundBitsUpToWords(bitSize);
auto leftoverBits = bitSize % 8;
if (leftoverBits > 0) {
uint8_t mask = ~((1 << leftoverBits) - 1);
if (mask & *byteReadHead) {
return false;
}
byteReadHead += 1;
}
while (byteReadHead != reinterpret_cast<const uint8_t*>(readHeadEnd)) {
if (*byteReadHead != 0) {
return false;
}
byteReadHead += 1;
}
*readHead = readHeadEnd;
return true;
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment