Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
C
capnproto
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
capnproto
Commits
00616e70
Commit
00616e70
authored
Jun 06, 2013
by
Kenton Varda
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Replace all std::unique_ptr<T> with kj::Own<T>.
parent
03974a96
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
173 additions
and
58 deletions
+173
-58
arena.c++
c++/src/capnproto/arena.c++
+50
-37
arena.h
c++/src/capnproto/arena.h
+4
-5
message.c++
c++/src/capnproto/message.c++
+10
-5
message.h
c++/src/capnproto/message.h
+2
-3
serialize.c++
c++/src/capnproto/serialize.c++
+2
-1
common-test.c++
c++/src/kj/common-test.c++
+20
-0
common.h
c++/src/kj/common.h
+9
-0
exception.c++
c++/src/kj/exception.c++
+4
-6
exception.h
c++/src/kj/exception.h
+1
-1
memory.h
c++/src/kj/memory.h
+71
-0
No files found.
c++/src/capnproto/arena.c++
View file @
00616e70
...
...
@@ -66,11 +66,13 @@ SegmentReader* ReaderArena::tryGetSegment(SegmentId id) {
// first lookup, unlock it before calling getSegment(), then take a writer lock to update the
// map. Bleh, lazy initialization is sad.
if
(
moreSegments
!=
nullptr
)
{
auto
iter
=
moreSegments
->
find
(
id
.
value
);
if
(
iter
!=
moreSegments
->
end
())
{
SegmentMap
*
segments
=
nullptr
;
KJ_IF_MAYBE
(
s
,
moreSegments
)
{
auto
iter
=
s
->
find
(
id
.
value
);
if
(
iter
!=
s
->
end
())
{
return
iter
->
second
.
get
();
}
segments
=
s
;
}
kj
::
ArrayPtr
<
const
word
>
newSegment
=
message
->
getSegment
(
id
.
value
);
...
...
@@ -80,12 +82,15 @@ SegmentReader* ReaderArena::tryGetSegment(SegmentId id) {
if
(
moreSegments
==
nullptr
)
{
// OK, the segment exists, so allocate the map.
moreSegments
=
std
::
unique_ptr
<
SegmentMap
>
(
new
SegmentMap
);
auto
s
=
kj
::
heap
<
SegmentMap
>
();
segments
=
s
;
moreSegments
=
mv
(
s
);
}
std
::
unique_ptr
<
SegmentReader
>*
slot
=
&
(
*
moreSegments
)[
id
.
value
];
*
slot
=
std
::
unique_ptr
<
SegmentReader
>
(
new
SegmentReader
(
this
,
id
,
newSegment
,
&
readLimiter
));
return
slot
->
get
();
auto
segment
=
kj
::
heap
<
SegmentReader
>
(
this
,
id
,
newSegment
,
&
readLimiter
);
SegmentReader
*
result
=
segment
;
segments
->
insert
(
std
::
make_pair
(
id
.
value
,
mv
(
segment
)));
return
result
;
}
void
ReaderArena
::
reportReadLimitReached
()
{
...
...
@@ -101,11 +106,14 @@ BuilderArena::BuilderArena(MessageBuilder* message)
BuilderArena
::~
BuilderArena
()
{}
SegmentBuilder
*
BuilderArena
::
getSegment
(
SegmentId
id
)
{
// This method is allowed to
crash
if the segment ID is not valid.
// This method is allowed to
fail
if the segment ID is not valid.
if
(
id
==
SegmentId
(
0
))
{
return
&
segment0
;
}
else
KJ_IF_MAYBE
(
s
,
moreSegments
)
{
KJ_REQUIRE
(
id
.
value
-
1
<
s
->
builders
.
size
(),
"invalid segment id"
,
id
.
value
);
return
s
->
builders
[
id
.
value
-
1
].
get
();
}
else
{
return
moreSegments
->
builders
[
id
.
value
-
1
].
get
(
);
KJ_FAIL_REQUIRE
(
"invalid segment id"
,
id
.
value
);
}
}
...
...
@@ -126,29 +134,33 @@ SegmentBuilder* BuilderArena::getSegmentWithAvailable(WordCount minimumAvailable
return
&
segment0
;
}
if
(
moreSegments
==
nullptr
)
{
moreSegments
=
std
::
unique_ptr
<
MultiSegmentState
>
(
new
MultiSegmentState
());
}
else
{
MultiSegmentState
*
segmentState
;
KJ_IF_MAYBE
(
s
,
moreSegments
)
{
// TODO(perf): Check for available space in more than just the last segment. We don't
// want this to be O(n), though, so we'll need to maintain some sort of table. Complicating
// matters, we want SegmentBuilders::allocate() to be fast, so we can't update any such
// table when allocation actually happens. Instead, we could have a priority queue based
// on the last-known available size, and then re-check the size when we pop segments off it
// and shove them to the back of the queue if they have become too small.
if
(
moreSegment
s
->
builders
.
back
()
->
available
()
>=
minimumAvailable
)
{
return
moreSegment
s
->
builders
.
back
().
get
();
if
(
s
->
builders
.
back
()
->
available
()
>=
minimumAvailable
)
{
return
s
->
builders
.
back
().
get
();
}
segmentState
=
s
;
}
else
{
auto
newSegmentState
=
kj
::
heap
<
MultiSegmentState
>
();
segmentState
=
newSegmentState
;
moreSegments
=
kj
::
mv
(
newSegmentState
);
}
std
::
unique_ptr
<
SegmentBuilder
>
newBuilder
=
std
::
unique_ptr
<
SegmentBuilder
>
(
new
SegmentBuilder
(
this
,
SegmentId
(
moreSegments
->
builders
.
size
()
+
1
),
message
->
allocateSegment
(
minimumAvailable
/
WORDS
),
&
this
->
dummyLimiter
)
);
kj
::
Own
<
SegmentBuilder
>
newBuilder
=
kj
::
heap
<
SegmentBuilder
>
(
this
,
SegmentId
(
segmentState
->
builders
.
size
()
+
1
),
message
->
allocateSegment
(
minimumAvailable
/
WORDS
),
&
this
->
dummyLimiter
);
SegmentBuilder
*
result
=
newBuilder
.
get
();
moreSegments
->
builders
.
push_back
(
std
::
move
(
newBuilder
));
segmentState
->
builders
.
push_back
(
kj
::
mv
(
newBuilder
));
// Keep forOutput the right size so that we don't have to re-allocate during
// getSegmentsForOutput(), which callers might reasonably expect is a thread-safe method.
moreSegments
->
forOutput
.
resize
(
moreSegments
->
builders
.
size
()
+
1
);
segmentState
->
forOutput
.
resize
(
segmentState
->
builders
.
size
()
+
1
);
return
result
;
}
...
...
@@ -160,7 +172,20 @@ kj::ArrayPtr<const kj::ArrayPtr<const word>> BuilderArena::getSegmentsForOutput(
// segments is actually changing due to an activity in another thread, then the caller has a
// problem regardless of locking here.
if
(
moreSegments
==
nullptr
)
{
KJ_IF_MAYBE
(
segmentState
,
moreSegments
)
{
KJ_DASSERT
(
segmentState
->
forOutput
.
size
()
==
segmentState
->
builders
.
size
()
+
1
,
"segmentState->forOutput wasn't resized correctly when the last builder was added."
,
segmentState
->
forOutput
.
size
(),
segmentState
->
builders
.
size
());
kj
::
ArrayPtr
<
kj
::
ArrayPtr
<
const
word
>>
result
(
&
segmentState
->
forOutput
[
0
],
segmentState
->
forOutput
.
size
());
uint
i
=
0
;
result
[
i
++
]
=
segment0
.
currentlyAllocated
();
for
(
auto
&
builder
:
segmentState
->
builders
)
{
result
[
i
++
]
=
builder
->
currentlyAllocated
();
}
return
result
;
}
else
{
if
(
segment0
.
getArena
()
==
nullptr
)
{
// We haven't actually allocated any segments yet.
return
nullptr
;
...
...
@@ -169,19 +194,6 @@ kj::ArrayPtr<const kj::ArrayPtr<const word>> BuilderArena::getSegmentsForOutput(
segment0ForOutput
=
segment0
.
currentlyAllocated
();
return
kj
::
arrayPtr
(
&
segment0ForOutput
,
1
);
}
}
else
{
KJ_DASSERT
(
moreSegments
->
forOutput
.
size
()
==
moreSegments
->
builders
.
size
()
+
1
,
"moreSegments->forOutput wasn't resized correctly when the last builder was added."
,
moreSegments
->
forOutput
.
size
(),
moreSegments
->
builders
.
size
());
kj
::
ArrayPtr
<
kj
::
ArrayPtr
<
const
word
>>
result
(
&
moreSegments
->
forOutput
[
0
],
moreSegments
->
forOutput
.
size
());
uint
i
=
0
;
result
[
i
++
]
=
segment0
.
currentlyAllocated
();
for
(
auto
&
builder
:
moreSegments
->
builders
)
{
result
[
i
++
]
=
builder
->
currentlyAllocated
();
}
return
result
;
}
}
...
...
@@ -194,11 +206,12 @@ SegmentReader* BuilderArena::tryGetSegment(SegmentId id) {
return
&
segment0
;
}
}
else
{
if
(
moreSegments
==
nullptr
||
id
.
value
>
moreSegments
->
builders
.
size
()
)
{
return
nullptr
;
}
else
{
return
moreSegments
->
builders
[
id
.
value
-
1
].
get
();
KJ_IF_MAYBE
(
segmentState
,
moreSegments
)
{
if
(
id
.
value
<=
segmentState
->
builders
.
size
())
{
return
segmentState
->
builders
[
id
.
value
-
1
].
get
();
}
}
return
nullptr
;
}
}
...
...
c++/src/capnproto/arena.h
View file @
00616e70
...
...
@@ -29,7 +29,6 @@
#endif
#include <vector>
#include <memory>
#include <unordered_map>
#include <kj/common.h>
#include "common.h"
...
...
@@ -165,8 +164,8 @@ private:
// Optimize for single-segment messages so that small messages are handled quickly.
SegmentReader
segment0
;
typedef
std
::
unordered_map
<
uint
,
std
::
unique_ptr
<
SegmentReader
>>
SegmentMap
;
std
::
unique_ptr
<
SegmentMap
>
moreSegments
;
typedef
std
::
unordered_map
<
uint
,
kj
::
Own
<
SegmentReader
>>
SegmentMap
;
kj
::
Maybe
<
kj
::
Own
<
SegmentMap
>
>
moreSegments
;
};
class
BuilderArena
final
:
public
Arena
{
...
...
@@ -201,10 +200,10 @@ private:
kj
::
ArrayPtr
<
const
word
>
segment0ForOutput
;
struct
MultiSegmentState
{
std
::
vector
<
std
::
unique_ptr
<
SegmentBuilder
>>
builders
;
std
::
vector
<
kj
::
Own
<
SegmentBuilder
>>
builders
;
std
::
vector
<
kj
::
ArrayPtr
<
const
word
>>
forOutput
;
};
std
::
unique_ptr
<
MultiSegmentState
>
moreSegments
;
kj
::
Maybe
<
kj
::
Own
<
MultiSegmentState
>
>
moreSegments
;
};
// =======================================================================================
...
...
c++/src/capnproto/message.c++
View file @
00616e70
...
...
@@ -167,8 +167,8 @@ MallocMessageBuilder::~MallocMessageBuilder() {
}
}
if
(
moreSegments
!=
nullptr
)
{
for
(
void
*
ptr
:
moreSegment
s
->
segments
)
{
KJ_IF_MAYBE
(
s
,
moreSegments
)
{
for
(
void
*
ptr
:
s
->
segments
)
{
free
(
ptr
);
}
}
...
...
@@ -202,10 +202,15 @@ kj::ArrayPtr<word> MallocMessageBuilder::allocateSegment(uint minimumSize) {
// After the first segment, we want nextSize to equal the total size allocated so far.
if
(
allocationStrategy
==
AllocationStrategy
::
GROW_HEURISTICALLY
)
nextSize
=
size
;
}
else
{
if
(
moreSegments
==
nullptr
)
{
moreSegments
=
std
::
unique_ptr
<
MoreSegments
>
(
new
MoreSegments
);
MoreSegments
*
segments
;
KJ_IF_MAYBE
(
s
,
moreSegments
)
{
segments
=
s
;
}
else
{
auto
newSegments
=
kj
::
heap
<
MoreSegments
>
();
segments
=
newSegments
;
moreSegments
=
mv
(
newSegments
);
}
moreS
egments
->
segments
.
push_back
(
result
);
s
egments
->
segments
.
push_back
(
result
);
if
(
allocationStrategy
==
AllocationStrategy
::
GROW_HEURISTICALLY
)
nextSize
+=
size
;
}
...
...
c++/src/capnproto/message.h
View file @
00616e70
...
...
@@ -21,9 +21,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <cstddef>
#include <memory>
#include <kj/common.h>
#include <kj/memory.h>
#include "common.h"
#include "layout.h"
...
...
@@ -297,7 +296,7 @@ private:
void
*
firstSegment
;
struct
MoreSegments
;
std
::
unique_ptr
<
MoreSegments
>
moreSegments
;
kj
::
Maybe
<
kj
::
Own
<
MoreSegments
>
>
moreSegments
;
};
class
FlatMessageBuilder
:
public
MessageBuilder
{
...
...
c++/src/capnproto/serialize.c++
View file @
00616e70
...
...
@@ -25,6 +25,7 @@
#include "serialize.h"
#include "layout.h"
#include <kj/debug.h>
#include <exception>
namespace
capnproto
{
...
...
@@ -164,7 +165,7 @@ InputStreamMessageReader::InputStreamMessageReader(
"Message is too large. To increase the limit on the receiving end, see "
"capnproto::ReaderOptions."
)
{
segmentCount
=
1
;
segment0Size
=
std
::
min
<
size_t
>
(
segment0Size
,
options
.
traversalLimitInWords
);
segment0Size
=
kj
::
min
(
segment0Size
,
options
.
traversalLimitInWords
);
totalWords
=
segment0Size
;
break
;
}
...
...
c++/src/kj/common-test.c++
View file @
00616e70
...
...
@@ -208,5 +208,25 @@ TEST(Common, Downcast) {
#endif
}
TEST
(
Common
,
MinMax
)
{
EXPECT_EQ
(
5
,
kj
::
min
(
5
,
9
));
EXPECT_EQ
(
5
,
kj
::
min
(
9
,
5
));
EXPECT_EQ
(
5
,
kj
::
min
(
5
,
5
));
EXPECT_EQ
(
9
,
kj
::
max
(
5
,
9
));
EXPECT_EQ
(
9
,
kj
::
max
(
9
,
5
));
EXPECT_EQ
(
5
,
kj
::
min
(
5
,
5
));
// Hey look, we can handle the types mismatching. Eat your heart out, std.
EXPECT_EQ
(
5
,
kj
::
min
(
5
,
'a'
));
EXPECT_EQ
(
5
,
kj
::
min
(
'a'
,
5
));
EXPECT_EQ
(
'a'
,
kj
::
max
(
5
,
'a'
));
EXPECT_EQ
(
'a'
,
kj
::
max
(
'a'
,
5
));
EXPECT_EQ
(
'a'
,
kj
::
min
(
1234567890123456789ll
,
'a'
));
EXPECT_EQ
(
'a'
,
kj
::
min
(
'a'
,
1234567890123456789ll
));
EXPECT_EQ
(
1234567890123456789ll
,
kj
::
max
(
1234567890123456789ll
,
'a'
));
EXPECT_EQ
(
1234567890123456789ll
,
kj
::
max
(
'a'
,
1234567890123456789ll
));
}
}
// namespace
}
// namespace kj
c++/src/kj/common.h
View file @
00616e70
...
...
@@ -275,6 +275,11 @@ template<typename T> constexpr T&& fwd(RemoveReference<T>&& t) noexcept {
return
static_cast
<
T
&&>
(
t
);
}
template
<
typename
T
,
typename
U
>
auto
min
(
T
&&
a
,
U
&&
b
)
->
decltype
(
a
<
b
?
a
:
b
)
{
return
a
<
b
?
a
:
b
;
}
template
<
typename
T
,
typename
U
>
auto
max
(
T
&&
a
,
U
&&
b
)
->
decltype
(
a
>
b
?
a
:
b
)
{
return
a
>
b
?
a
:
b
;
}
// =======================================================================================
// Manually invoking constructors and destructors
//
...
...
@@ -459,6 +464,10 @@ inline T* readMaybe(const Maybe<T&>& maybe) { return maybe.ptr; }
template
<
typename
T
>
class
Maybe
{
// A T, or nullptr.
// IF YOU CHANGE THIS CLASS: Note that there is a specialization of it in memory.h.
public
:
Maybe
()
:
ptr
(
nullptr
)
{}
Maybe
(
T
&&
t
)
noexcept
(
noexcept
(
T
(
instance
<
T
&&>
())))
:
ptr
(
kj
::
mv
(
t
))
{}
...
...
c++/src/kj/exception.c++
View file @
00616e70
...
...
@@ -115,8 +115,7 @@ String KJ_STRINGIFY(const Exception& e) {
for
(;;)
{
KJ_IF_MAYBE
(
c
,
contextPtr
)
{
++
contextDepth
;
contextPtr
=
c
->
next
.
map
(
[](
const
Own
<
Exception
::
Context
>&
c
)
->
const
Exception
::
Context
&
{
return
*
c
;
});
contextPtr
=
c
->
next
;
}
else
{
break
;
}
...
...
@@ -130,8 +129,7 @@ String KJ_STRINGIFY(const Exception& e) {
KJ_IF_MAYBE
(
c
,
contextPtr
)
{
contextText
[
contextDepth
++
]
=
str
(
c
->
file
,
":"
,
c
->
line
,
": context: "
,
c
->
description
,
"
\n
"
);
contextPtr
=
c
->
next
.
map
(
[](
const
Own
<
Exception
::
Context
>&
c
)
->
const
Exception
::
Context
&
{
return
*
c
;
});
contextPtr
=
c
->
next
;
}
else
{
break
;
}
...
...
@@ -157,7 +155,7 @@ Exception::Exception(const Exception& other) noexcept
memcpy
(
trace
,
other
.
trace
,
sizeof
(
trace
[
0
])
*
traceCount
);
KJ_IF_MAYBE
(
c
,
other
.
context
)
{
context
=
heap
(
*
*
c
);
context
=
heap
(
*
c
);
}
}
...
...
@@ -166,7 +164,7 @@ Exception::~Exception() noexcept {}
Exception
::
Context
::
Context
(
const
Context
&
other
)
noexcept
:
file
(
other
.
file
),
line
(
other
.
line
),
description
(
str
(
other
.
description
))
{
KJ_IF_MAYBE
(
n
,
other
.
next
)
{
next
=
heap
(
*
*
n
);
next
=
heap
(
*
n
);
}
}
...
...
c++/src/kj/exception.h
View file @
00616e70
...
...
@@ -95,7 +95,7 @@ public:
inline
Maybe
<
const
Context
&>
getContext
()
const
{
KJ_IF_MAYBE
(
c
,
context
)
{
return
*
*
c
;
return
*
c
;
}
else
{
return
nullptr
;
}
...
...
c++/src/kj/memory.h
View file @
00616e70
...
...
@@ -123,6 +123,11 @@ private:
const
Disposer
*
disposer
;
// Only valid if ptr != nullptr.
T
*
ptr
;
inline
explicit
Own
(
decltype
(
nullptr
))
:
disposer
(
nullptr
),
ptr
(
nullptr
)
{}
inline
bool
operator
==
(
decltype
(
nullptr
))
{
return
ptr
==
nullptr
;
}
inline
bool
operator
!=
(
decltype
(
nullptr
))
{
return
ptr
!=
nullptr
;
}
// Only called by Maybe<Own<T>>.
inline
void
dispose
()
{
// Make sure that if an exception is thrown, we are left with a null ptr, so we won't possibly
// dispose again.
...
...
@@ -135,6 +140,72 @@ private:
template
<
typename
U
>
friend
class
Own
;
friend
class
Maybe
<
Own
<
T
>>
;
};
namespace
internal
{
template
<
typename
T
>
Own
<
T
>&&
readMaybe
(
Maybe
<
Own
<
T
>>&&
maybe
)
{
return
kj
::
mv
(
maybe
.
ptr
);
}
template
<
typename
T
>
T
*
readMaybe
(
Maybe
<
Own
<
T
>>&
maybe
)
{
return
maybe
.
ptr
;
}
template
<
typename
T
>
const
T
*
readMaybe
(
const
Maybe
<
Own
<
T
>>&
maybe
)
{
return
maybe
.
ptr
;
}
}
// namespace internal
template
<
typename
T
>
class
Maybe
<
Own
<
T
>>
{
public
:
inline
Maybe
()
:
ptr
(
nullptr
)
{}
inline
Maybe
(
Own
<
T
>&&
t
)
noexcept
:
ptr
(
kj
::
mv
(
t
))
{}
inline
Maybe
(
Maybe
&&
other
)
noexcept
:
ptr
(
kj
::
mv
(
other
.
ptr
))
{}
template
<
typename
U
>
inline
Maybe
(
Maybe
<
Own
<
U
>>&&
other
)
:
ptr
(
mv
(
other
.
ptr
))
{}
inline
Maybe
(
decltype
(
nullptr
))
noexcept
:
ptr
(
nullptr
)
{}
inline
operator
Maybe
<
T
&>
()
{
return
ptr
.
get
();
}
inline
operator
Maybe
<
const
T
&>
()
const
{
return
ptr
.
get
();
}
inline
Maybe
&
operator
=
(
Maybe
&&
other
)
{
ptr
=
kj
::
mv
(
other
.
ptr
);
return
*
this
;
}
inline
bool
operator
==
(
decltype
(
nullptr
))
const
{
return
ptr
==
nullptr
;
}
inline
bool
operator
!=
(
decltype
(
nullptr
))
const
{
return
ptr
!=
nullptr
;
}
template
<
typename
Func
>
auto
map
(
Func
&&
f
)
->
Maybe
<
decltype
(
f
(
instance
<
T
&>
()))
>
{
if
(
ptr
==
nullptr
)
{
return
nullptr
;
}
else
{
return
f
(
*
ptr
);
}
}
template
<
typename
Func
>
auto
map
(
Func
&&
f
)
const
->
Maybe
<
decltype
(
f
(
instance
<
const
T
&>
()))
>
{
if
(
ptr
==
nullptr
)
{
return
nullptr
;
}
else
{
return
f
(
*
ptr
);
}
}
// TODO(someday): Once it's safe to require GCC 4.8, use ref qualifiers to provide a version of
// map() that uses move semantics if *this is an rvalue.
private
:
Own
<
T
>
ptr
;
template
<
typename
U
>
friend
class
Maybe
;
template
<
typename
U
>
friend
Own
<
U
>&&
internal
::
readMaybe
(
Maybe
<
Own
<
U
>>&&
maybe
);
template
<
typename
U
>
friend
U
*
internal
::
readMaybe
(
Maybe
<
Own
<
U
>>&
maybe
);
template
<
typename
U
>
friend
const
U
*
internal
::
readMaybe
(
const
Maybe
<
Own
<
U
>>&
maybe
);
};
namespace
internal
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment