Unverified Commit bf32b36a authored by Paul Yang's avatar Paul Yang Committed by GitHub

Merge pull request #5765 from BSBandme/integration

down integration from internal
......@@ -92,6 +92,7 @@ cc_library(
name = "protobuf_lite",
srcs = [
# AUTOGEN(protobuf_lite_srcs)
"src/google/protobuf/any_lite.cc",
"src/google/protobuf/arena.cc",
"src/google/protobuf/extension_set.cc",
"src/google/protobuf/generated_message_table_driven_lite.cc",
......
......@@ -791,6 +791,7 @@ python_EXTRA_DIST= \
python/google/protobuf/internal/descriptor_test.py \
python/google/protobuf/internal/encoder.py \
python/google/protobuf/internal/enum_type_wrapper.py \
python/google/protobuf/internal/extension_dict.py \
python/google/protobuf/internal/factory_test1.proto \
python/google/protobuf/internal/factory_test2.proto \
python/google/protobuf/internal/file_options_test.proto \
......
......@@ -25,6 +25,7 @@ make_tmp_dir:
mkdir -p 'tmp/java/src/main/java'
touch make_tmp_dir
# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
# relative to srcdir, which may not be the same as the current directory when
# building out-of-tree.
......
set(libprotobuf_lite_files
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
${protobuf_source_dir}/src/google/protobuf/arena.cc
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
......
......@@ -249,13 +249,30 @@ class ConformanceJava {
break;
}
case TEXT_PAYLOAD: {
try {
TestMessagesProto3.TestAllTypesProto3.Builder builder =
TestMessagesProto3.TestAllTypesProto3.newBuilder();
TextFormat.merge(request.getTextPayload(), builder);
testMessage = builder.build();
} catch (TextFormat.ParseException e) {
return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build();
if (isProto3) {
try {
TestMessagesProto3.TestAllTypesProto3.Builder builder =
TestMessagesProto3.TestAllTypesProto3.newBuilder();
TextFormat.merge(request.getTextPayload(), builder);
testMessage = builder.build();
} catch (TextFormat.ParseException e) {
return Conformance.ConformanceResponse.newBuilder()
.setParseError(e.getMessage())
.build();
}
} else if (isProto2) {
try {
TestMessagesProto2.TestAllTypesProto2.Builder builder =
TestMessagesProto2.TestAllTypesProto2.newBuilder();
TextFormat.merge(request.getTextPayload(), builder);
testMessage = builder.build();
} catch (TextFormat.ParseException e) {
return Conformance.ConformanceResponse.newBuilder()
.setParseError(e.getMessage())
.build();
}
} else {
throw new RuntimeException("Protobuf request doesn't have specific payload type.");
}
break;
}
......
......@@ -207,9 +207,11 @@ EXTRA_DIST = \
conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la
conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \
binary_json_conformance_main.cc \
conformance_test_main.cc \
binary_json_conformance_suite.h \
binary_json_conformance_suite.cc \
text_format_conformance_suite.h \
text_format_conformance_suite.cc \
conformance_test_runner.cc \
third_party/jsoncpp/json.h \
third_party/jsoncpp/jsoncpp.cpp
......
......@@ -681,6 +681,20 @@ void BinaryAndJsonConformanceSuite::TestUnknownMessage(
}
void BinaryAndJsonConformanceSuite::RunSuiteImpl() {
// Hack to get the list of test failures based on whether
// GOOGLE3_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER is enabled or not.
conformance::FailureSet failure_set;
ConformanceRequest req;
ConformanceResponse res;
req.set_message_type(failure_set.GetTypeName());
req.set_protobuf_payload("");
req.set_requested_output_format(conformance::WireFormat::PROTOBUF);
RunTest("FindFailures", req, &res);
GOOGLE_CHECK(failure_set.MergeFromString(res.protobuf_payload()));
for (const string& failure : failure_set.failure()) {
AddExpectedFailedTest(failure);
}
type_resolver_.reset(NewTypeResolverForDescriptorPool(
kTypeUrlPrefix, DescriptorPool::generated_pool()));
type_url_ = GetTypeUrl(TestAllTypesProto3::descriptor());
......
......@@ -76,6 +76,10 @@ function doTest(request) {
response.setSkipped("JSON not supported.");
return response;
case conformance.ConformanceRequest.PayloadCase.TEXT_PAYLOAD:
response.setSkipped("Text format not supported.");
return response;
case conformance.ConformanceRequest.PayloadCase.PAYLOAD_NOT_SET:
response.setRuntimeError("Request didn't have payload");
return response;
......
......@@ -57,7 +57,10 @@ function doTest($request)
$response->setParseError($e->getMessage());
return $response;
}
} else {
} elseif ($request->getPayload() == "text_payload") {
$response->setSkipped("PHP doesn't support text format yet");
return $response;
} else {
trigger_error("Request didn't have payload.", E_USER_ERROR);
}
......
......@@ -65,7 +65,12 @@ def do_test(request):
# TODO(gerbens): Remove, this is a hack to detect if the old vs new
# parser is used by the cpp code. Relying on a bug in the old parser.
hack_proto = test_messages_proto2_pb2.TestAllTypesProto2()
if hack_proto.ParseFromString(b"\322\002\001"):
old_parser = True
try:
hack_proto.ParseFromString(b"\322\002\001")
except message.DecodeError as e:
old_parser = False
if old_parser:
# the string above is one of the failing conformance test strings of the
# old parser. If we succeed the c++ implementation is using the old
# parser so we add the list of failing conformance tests.
......
......@@ -66,6 +66,12 @@ def do_test(request)
response.parse_error = err.message.encode('utf-8')
return response
end
when :text_payload
begin
response.skipped = "Ruby doesn't support proto2"
return response
end
when nil
fail "Request didn't have payload"
......
......@@ -361,6 +361,10 @@ string ConformanceTestSuite::WireFormatToString(
return "";
}
void ConformanceTestSuite::AddExpectedFailedTest(const std::string& test_name) {
expected_to_fail_.insert(test_name);
}
bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
std::string* output, const string& filename,
conformance::FailureSet* failure_list) {
......@@ -374,17 +378,10 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
output_ = "\nCONFORMANCE TEST BEGIN ====================================\n\n";
ConformanceRequest req;
ConformanceResponse res;
req.set_message_type(failure_list->GetTypeName());
req.set_protobuf_payload("");
req.set_requested_output_format(conformance::WireFormat::PROTOBUF);
RunTest("FindFailures", req, &res);
GOOGLE_CHECK(failure_list->MergeFromString(res.protobuf_payload()));
failure_list_filename_ = filename;
expected_to_fail_.clear();
for (const string& failure : failure_list->failure()) {
expected_to_fail_.insert(failure);
AddExpectedFailedTest(failure);
}
RunSuiteImpl();
......
......@@ -84,8 +84,9 @@ class ConformanceTestRunner {
// over a pipe.
class ForkPipeRunner : public ConformanceTestRunner {
public:
// Note: Run() doesn't take ownership of the pointers inside suites.
static int Run(int argc, char *argv[],
ConformanceTestSuite* suite);
const std::vector<ConformanceTestSuite*>& suites);
ForkPipeRunner(const std::string &executable)
: child_pid_(-1), executable_(executable) {}
......@@ -139,7 +140,10 @@ class ForkPipeRunner : public ConformanceTestRunner {
//
class ConformanceTestSuite {
public:
ConformanceTestSuite() : verbose_(false), enforce_recommended_(false) {}
ConformanceTestSuite()
: verbose_(false),
enforce_recommended_(false),
failure_list_flag_name_("--failure_list") {}
virtual ~ConformanceTestSuite() {}
void SetVerbose(bool verbose) { verbose_ = verbose; }
......@@ -156,6 +160,16 @@ class ConformanceTestSuite {
enforce_recommended_ = value;
}
// Gets the flag name to the failure list file.
// By default, this would return --failure_list
string GetFailureListFlagName() {
return failure_list_flag_name_;
}
void SetFailureListFlagName(const std::string& failure_list_flag_name) {
failure_list_flag_name_ = failure_list_flag_name;
}
// Run all the conformance tests against the given test runner.
// Test output will be stored in "output".
//
......@@ -259,6 +273,8 @@ class ConformanceTestSuite {
const conformance::ConformanceRequest& request,
conformance::ConformanceResponse* response);
void AddExpectedFailedTest(const std::string& test_name);
virtual void RunSuiteImpl() = 0;
ConformanceTestRunner* runner_;
......@@ -267,6 +283,7 @@ class ConformanceTestSuite {
bool verbose_;
bool enforce_recommended_;
std::string output_;
std::string failure_list_flag_name_;
std::string failure_list_filename_;
// The set of test names that are expected to fail in this run, but haven't
......
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "binary_json_conformance_suite.h"
#include "conformance_test.h"
#include "text_format_conformance_suite.h"
int main(int argc, char *argv[]) {
google::protobuf::BinaryAndJsonConformanceSuite binary_and_json_suite;
google::protobuf::TextFormatConformanceTestSuite text_format_suite;
return google::protobuf::ForkPipeRunner::Run(
argc, argv, {&binary_and_json_suite, &text_format_suite});
}
......@@ -119,6 +119,19 @@ void UsageError() {
" should contain one test name per\n");
fprintf(stderr,
" line. Use '#' for comments.\n");
fprintf(stderr,
" --text_format_failure_list <filename> Use to specify list \n");
fprintf(stderr,
" of tests that are expected to \n");
fprintf(stderr,
" fail in the \n");
fprintf(stderr,
" text_format_conformance_suite. \n");
fprintf(stderr,
" File should contain one test name \n");
fprintf(stderr,
" per line. Use '#' for comments.\n");
fprintf(stderr,
" --enforce_recommended Enforce that recommended test\n");
fprintf(stderr,
......@@ -175,41 +188,56 @@ void ForkPipeRunner::RunTest(
}
int ForkPipeRunner::Run(
int argc, char *argv[], ConformanceTestSuite* suite) {
char *program;
string failure_list_filename;
conformance::FailureSet failure_list;
for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], "--failure_list") == 0) {
if (++arg == argc) UsageError();
failure_list_filename = argv[arg];
ParseFailureList(argv[arg], &failure_list);
} else if (strcmp(argv[arg], "--verbose") == 0) {
suite->SetVerbose(true);
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
suite->SetEnforceRecommended(true);
} else if (argv[arg][0] == '-') {
fprintf(stderr, "Unknown option: %s\n", argv[arg]);
UsageError();
} else {
if (arg != argc - 1) {
fprintf(stderr, "Too many arguments.\n");
UsageError();
int argc, char *argv[], const std::vector<ConformanceTestSuite*>& suites) {
if (suites.empty()) {
fprintf(stderr, "No test suites found.\n");
return EXIT_FAILURE;
}
bool all_ok = true;
for (ConformanceTestSuite* suite : suites) {
char *program;
string failure_list_filename;
conformance::FailureSet failure_list;
for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
if (++arg == argc) UsageError();
failure_list_filename = argv[arg];
ParseFailureList(argv[arg], &failure_list);
} else if (strcmp(argv[arg], "--verbose") == 0) {
suite->SetVerbose(true);
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
suite->SetEnforceRecommended(true);
} else if (argv[arg][0] == '-') {
bool recognized_flag = false;
for (ConformanceTestSuite* suite : suites) {
if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
if (++arg == argc) UsageError();
recognized_flag = true;
}
}
if (!recognized_flag) {
fprintf(stderr, "Unknown option: %s\n", argv[arg]);
UsageError();
}
} else {
if (arg != argc - 1) {
fprintf(stderr, "Too many arguments.\n");
UsageError();
}
program = argv[arg];
}
program = argv[arg];
}
}
ForkPipeRunner runner(program);
ForkPipeRunner runner(program);
std::string output;
bool ok =
suite->RunSuite(&runner, &output, failure_list_filename, &failure_list);
std::string output;
all_ok = all_ok &&
suite->RunSuite(&runner, &output, failure_list_filename, &failure_list);
fwrite(output.c_str(), 1, output.size(), stderr);
return ok ? EXIT_SUCCESS : EXIT_FAILURE;
fwrite(output.c_str(), 1, output.size(), stderr);
}
return all_ok ? EXIT_SUCCESS : EXIT_FAILURE;
}
// TODO(haberman): make this work on Windows, instead of using these
......
......@@ -19,4 +19,3 @@ Required.Proto3.ProtobufInput.IllegalZeroFieldNum_Case_0
Required.Proto3.ProtobufInput.IllegalZeroFieldNum_Case_1
Required.Proto3.ProtobufInput.IllegalZeroFieldNum_Case_2
Required.Proto3.ProtobufInput.IllegalZeroFieldNum_Case_3
Required.Proto3.JsonInput.EmptyFieldMask.ProtobufOutput
......@@ -20,4 +20,3 @@ Required.Proto3.JsonInput.FloatFieldTooLarge
Required.Proto3.JsonInput.FloatFieldTooSmall
Required.Proto3.JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool
Required.Proto3.JsonInput.TimestampJsonInputLowercaseT
Required.Proto3.JsonInput.EmptyFieldMask.ProtobufOutput
This diff is collapsed.
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef TEXT_FORMAT_CONFORMANCE_SUITE_H_
#define TEXT_FORMAT_CONFORMANCE_SUITE_H_
#include "conformance_test.h"
namespace google {
namespace protobuf {
class TextFormatConformanceTestSuite : public ConformanceTestSuite {
public:
TextFormatConformanceTestSuite();
private:
void RunSuiteImpl();
void RunValidTextFormatTest(const string& test_name, ConformanceLevel level,
const string& input);
void RunValidTextFormatTestProto2(const string& test_name,
ConformanceLevel level,
const string& input);
void RunValidTextFormatTestWithMessage(const string& test_name,
ConformanceLevel level,
const string& input_text,
const Message& prototype);
void ExpectParseFailure(const string& test_name, ConformanceLevel level,
const string& input);
bool ParseTextFormatResponse(const conformance::ConformanceResponse& response,
Message* test_message);
bool ParseResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message) override;
};
} // namespace protobuf
} // namespace google
#endif // TEXT_FORMAT_CONFORMANCE_SUITE_H_
......@@ -109,6 +109,10 @@ namespace Google.Protobuf.Conformance
}
break;
}
case ConformanceRequest.PayloadOneofCase.TextPayload:
{
return new ConformanceResponse { Skipped = "CSharp doesn't support text format" };
}
default:
throw new Exception("Unsupported request payload: " + request.PayloadCase);
}
......
No preview for this file type
......@@ -4996,7 +4996,7 @@ namespace Google.Protobuf.Reflection {
///
/// Implementations may choose not to generate the map_entry=true message, but
/// use a native map in the target language to hold the keys and values.
/// The reflection APIs in such implementions still need to work as
/// The reflection APIs in such implementations still need to work as
/// if the field is a repeated message field.
///
/// NOTE: Do not set the option in .proto files. Always use the maps syntax
......@@ -7238,7 +7238,7 @@ namespace Google.Protobuf.Reflection {
/// beginning of the "extend" block and is shared by all extensions within
/// the block.
/// - Just because a location's span is a subset of some other location's span
/// does not mean that it is a descendent. For example, a "group" defines
/// does not mean that it is a descendant. For example, a "group" defines
/// both a type and a field in a single declaration. Thus, the locations
/// corresponding to the type and field and their components will overlap.
/// - Code which tries to interpret locations should probably be designed to
......
......@@ -3006,25 +3006,11 @@ public abstract class CodedInputStream {
throw InvalidProtocolBufferException.truncatedMessage();
}
if (refillCallback != null) {
// Skipping more bytes than are in the buffer. First skip what we have.
int tempPos = bufferSize - pos;
pos = bufferSize;
// Keep refilling the buffer until we get to the point we wanted to skip to.
// This has the side effect of ensuring the limits are updated correctly.
refillBuffer(1);
while (size - tempPos > bufferSize) {
tempPos += bufferSize;
pos = bufferSize;
refillBuffer(1);
}
pos = size - tempPos;
} else {
int totalSkipped = 0;
if (refillCallback == null) {
// Skipping more bytes than are in the buffer. First skip what we have.
totalBytesRetired += pos;
int totalSkipped = bufferSize - pos;
totalSkipped = bufferSize - pos;
bufferSize = 0;
pos = 0;
......@@ -3038,6 +3024,12 @@ public abstract class CodedInputStream {
+ "#skip returned invalid result: "
+ skipped
+ "\nThe InputStream implementation is buggy.");
} else if (skipped == 0) {
// The API contract of skip() permits an inputstream to skip zero bytes for any reason
// it wants. In particular, ByteArrayInputStream will just return zero over and over
// when it's at the end of its input. In order to actually confirm that we've hit the
// end of input, we need to issue a read call via the other path.
break;
}
totalSkipped += (int) skipped;
}
......@@ -3046,6 +3038,22 @@ public abstract class CodedInputStream {
recomputeBufferSizeAfterLimit();
}
}
if (totalSkipped < size) {
// Skipping more bytes than are in the buffer. First skip what we have.
int tempPos = bufferSize - pos;
pos = bufferSize;
// Keep refilling the buffer until we get to the point we wanted to skip to.
// This has the side effect of ensuring the limits are updated correctly.
refillBuffer(1);
while (size - tempPos > bufferSize) {
tempPos += bufferSize;
pos = bufferSize;
refillBuffer(1);
}
pos = size - tempPos;
}
}
}
......
......@@ -185,8 +185,9 @@ public final class Descriptors {
if (name.indexOf('.') != -1) {
return null;
}
if (getPackage().length() > 0) {
name = getPackage() + '.' + name;
final String packageName = getPackage();
if (!packageName.isEmpty()) {
name = packageName + '.' + name;
}
final GenericDescriptor result = pool.findSymbol(name);
if (result != null && result instanceof Descriptor && result.getFile() == this) {
......@@ -208,8 +209,9 @@ public final class Descriptors {
if (name.indexOf('.') != -1) {
return null;
}
if (getPackage().length() > 0) {
name = getPackage() + '.' + name;
final String packageName = getPackage();
if (!packageName.isEmpty()) {
name = packageName + '.' + name;
}
final GenericDescriptor result = pool.findSymbol(name);
if (result != null && result instanceof EnumDescriptor && result.getFile() == this) {
......@@ -231,8 +233,9 @@ public final class Descriptors {
if (name.indexOf('.') != -1) {
return null;
}
if (getPackage().length() > 0) {
name = getPackage() + '.' + name;
final String packageName = getPackage();
if (!packageName.isEmpty()) {
name = packageName + '.' + name;
}
final GenericDescriptor result = pool.findSymbol(name);
if (result != null && result instanceof ServiceDescriptor && result.getFile() == this) {
......@@ -252,8 +255,9 @@ public final class Descriptors {
if (name.indexOf('.') != -1) {
return null;
}
if (getPackage().length() > 0) {
name = getPackage() + '.' + name;
final String packageName = getPackage();
if (!packageName.isEmpty()) {
name = packageName + '.' + name;
}
final GenericDescriptor result = pool.findSymbol(name);
if (result != null && result instanceof FieldDescriptor && result.getFile() == this) {
......@@ -1223,14 +1227,20 @@ public final class Descriptors {
// This method should match exactly with the ToJsonName() function in C++
// descriptor.cc.
private static String fieldNameToJsonName(String name) {
StringBuilder result = new StringBuilder(name.length());
final int length = name.length();
StringBuilder result = new StringBuilder(length);
boolean isNextUpperCase = false;
for (int i = 0; i < name.length(); i++) {
for (int i = 0; i < length; i++) {
char ch = name.charAt(i);
if (ch == '_') {
isNextUpperCase = true;
} else if (isNextUpperCase) {
result.append(Character.toUpperCase(ch));
// This closely matches the logic for ASCII characters in:
// http://google3/google/protobuf/descriptor.cc?l=249-251&rcl=228891689
if ('a' <= ch && ch <= 'z') {
ch = (char) (ch - 'a' + 'A');
}
result.append(ch);
isNextUpperCase = false;
} else {
result.append(ch);
......@@ -1787,7 +1797,6 @@ public final class Descriptors {
file.pool.addEnumValueByNumber(this);
}
private Integer number;
// Create an unknown enum value.
private EnumValueDescriptor(
final FileDescriptor file, final EnumDescriptor parent, final Integer number) {
......@@ -1799,7 +1808,6 @@ public final class Descriptors {
this.file = file;
this.type = parent;
this.fullName = parent.getFullName() + '.' + proto.getName();
this.number = number;
// Don't add this descriptor into pool.
}
......@@ -2029,11 +2037,14 @@ public final class Descriptors {
final FileDescriptor file, final Descriptor parent, final String name) {
if (parent != null) {
return parent.getFullName() + '.' + name;
} else if (file.getPackage().length() > 0) {
return file.getPackage() + '.' + name;
} else {
return name;
}
final String packageName = file.getPackage();
if (!packageName.isEmpty()) {
return packageName + '.' + name;
}
return name;
}
// =================================================================
......@@ -2322,13 +2333,13 @@ public final class Descriptors {
validateSymbolName(descriptor);
final String fullName = descriptor.getFullName();
final int dotpos = fullName.lastIndexOf('.');
final GenericDescriptor old = descriptorsByName.put(fullName, descriptor);
if (old != null) {
descriptorsByName.put(fullName, old);
if (descriptor.getFile() == old.getFile()) {
final int dotpos = fullName.lastIndexOf('.');
if (dotpos == -1) {
throw new DescriptorValidationException(
descriptor, '\"' + fullName + "\" is already defined.");
......@@ -2494,27 +2505,22 @@ public final class Descriptors {
final String name = descriptor.getName();
if (name.length() == 0) {
throw new DescriptorValidationException(descriptor, "Missing name.");
} else {
boolean valid = true;
for (int i = 0; i < name.length(); i++) {
final char c = name.charAt(i);
// Non-ASCII characters are not valid in protobuf identifiers, even
// if they are letters or digits.
if (c >= 128) {
valid = false;
}
// First character must be letter or _. Subsequent characters may
// be letters, numbers, or digits.
if (Character.isLetter(c) || c == '_' || (Character.isDigit(c) && i > 0)) {
// Valid
} else {
valid = false;
}
}
if (!valid) {
throw new DescriptorValidationException(
descriptor, '\"' + name + "\" is not a valid identifier.");
}
// Non-ASCII characters are not valid in protobuf identifiers, even
// if they are letters or digits.
// The first character must be a letter or '_'.
// Subsequent characters may be letters, numbers, or digits.
for (int i = 0; i < name.length(); i++) {
final char c = name.charAt(i);
if (('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')
|| (c == '_')
|| ('0' <= c && c <= '9' && i > 0)) {
// Valid
continue;
}
throw new DescriptorValidationException(
descriptor, '\"' + name + "\" is not a valid identifier.");
}
}
}
......
......@@ -1186,4 +1186,13 @@ public class CodedInputStreamTest extends TestCase {
}
}
}
public void testSkipPastEndOfByteArrayInput() throws Exception {
try {
CodedInputStream.newInstance(new ByteArrayInputStream(new byte[100])).skipRawBytes(101);
fail();
} catch (InvalidProtocolBufferException e) {
// Expected
}
}
}
......@@ -216,6 +216,23 @@ public class ParseExceptionsTest {
});
}
@Test
public void messageBuilder_mergeDelimitedFrom_InputStream_malformed() throws Exception {
byte[] body = new byte[80];
CodedOutputStream cos = CodedOutputStream.newInstance(body);
cos.writeRawVarint32(90); // Greater than bytes in stream
cos.writeTag(DescriptorProto.ENUM_TYPE_FIELD_NUMBER, WireFormat.WIRETYPE_LENGTH_DELIMITED);
cos.writeRawVarint32(98); // Nested message with size larger than parent
cos.writeTag(1000, WireFormat.WIRETYPE_LENGTH_DELIMITED);
cos.writeRawVarint32(100); // Unknown field with size larger than parent
ByteArrayInputStream bais = new ByteArrayInputStream(body);
try {
DescriptorProto.parseDelimitedFrom(bais);
fail();
} catch (InvalidProtocolBufferException expected) {
}
}
@Test
public void messageBuilder_mergeDelimitedFrom_InputStreamAndExtensionRegistry() {
setupDelimited();
......
......@@ -799,6 +799,38 @@ jspb.BinaryWriter.prototype.writeMessage = function(
};
/**
* Writes a message set extension to the buffer.
* @param {number} field The field number for the extension.
* @param {?MessageType} value The extension message object to write. Note that
* message set can only have extensions with type of optional message.
* @param {function(!MessageTypeNonNull, !jspb.BinaryWriter)} writerCallback
* Will be invoked with the value to write and the writer to write it with.
* @template MessageType
* Use go/closure-ttl to declare a non-nullable version of MessageType. Replace
* the null in blah|null with none. This is necessary because the compiler will
* infer MessageType to be nullable if the value parameter is nullable.
* @template MessageTypeNonNull :=
* cond(isUnknown(MessageType), unknown(),
* mapunion(MessageType, (X) =>
* cond(eq(X, 'null'), none(), X)))
* =:
*/
jspb.BinaryWriter.prototype.writeMessageSet = function(
field, value, writerCallback) {
if (value == null) return;
// The wire format for a message set is defined by
// google3/net/proto/message_set.proto
this.writeFieldHeader_(1, jspb.BinaryConstants.WireType.START_GROUP);
this.writeFieldHeader_(2, jspb.BinaryConstants.WireType.VARINT);
this.encoder_.writeSignedVarint32(field);
var bookmark = this.beginDelimited_(3);
writerCallback(value, this);
this.endDelimited_(bookmark);
this.writeFieldHeader_(1, jspb.BinaryConstants.WireType.END_GROUP);
};
/**
* Writes a group message to the buffer.
*
......
......@@ -183,9 +183,6 @@ goog.define('jspb.Message.GENERATE_TO_OBJECT', true);
* calling fromObject. Enabling this might disable the JSCompiler's ability
* to dead code eliminate fields used in protocol buffers that are never
* used in an application.
* NOTE: By default no protos actually have a fromObject method. You need to
* add the jspb.generate_from_object options to the proto definition to
* activate the feature.
* By default this is enabled for test code only.
*/
goog.define('jspb.Message.GENERATE_FROM_OBJECT', !goog.DISALLOW_TEST_ONLY_CODE);
......@@ -703,20 +700,7 @@ jspb.Message.getField = function(msg, fieldNumber) {
* @protected
*/
jspb.Message.getRepeatedField = function(msg, fieldNumber) {
if (fieldNumber < msg.pivot_) {
var index = jspb.Message.getIndex_(msg, fieldNumber);
var val = msg.array[index];
if (val === jspb.Message.EMPTY_LIST_SENTINEL_) {
return msg.array[index] = [];
}
return val;
}
var val = msg.extensionObject_[fieldNumber];
if (val === jspb.Message.EMPTY_LIST_SENTINEL_) {
return msg.extensionObject_[fieldNumber] = [];
}
return val;
return /** @type {!Array} */ (jspb.Message.getField(msg, fieldNumber));
};
......
......@@ -106,6 +106,13 @@ message OuterMessage {
}
}
message MineField {
// document.cookie is a banned property in a couple of conformance check
// configs at Google. Verify that having a field called cookie doesn't confuse
// the compiler and break the build.
optional string cookie = 1;
}
message IsExtension {
extend HasExtensions {
optional IsExtension ext_field = 100;
......@@ -261,7 +268,6 @@ message Int64Types {
}
message TestMapFieldsNoBinary {
map<string, string> map_string_string = 1;
map<string, int32> map_string_int32 = 2;
map<string, int64> map_string_int64 = 3;
......@@ -285,7 +291,6 @@ enum MapValueEnumNoBinary {
}
message MapValueMessageNoBinary {
optional int32 foo = 1;
}
......
......@@ -70,7 +70,7 @@ class MessageOptions extends \Google\Protobuf\Internal\Message
* repeated MapFieldEntry map_field = 1;
* Implementations may choose not to generate the map_entry=true message, but
* use a native map in the target language to hold the keys and values.
* The reflection APIs in such implementions still need to work as
* The reflection APIs in such implementations still need to work as
* if the field is a repeated message field.
* NOTE: Do not set the option in .proto files. Always use the maps syntax
* instead. The option should only be implicitly set by the proto compiler
......@@ -133,7 +133,7 @@ class MessageOptions extends \Google\Protobuf\Internal\Message
* repeated MapFieldEntry map_field = 1;
* Implementations may choose not to generate the map_entry=true message, but
* use a native map in the target language to hold the keys and values.
* The reflection APIs in such implementions still need to work as
* The reflection APIs in such implementations still need to work as
* if the field is a repeated message field.
* NOTE: Do not set the option in .proto files. Always use the maps syntax
* instead. The option should only be implicitly set by the proto compiler
......@@ -295,7 +295,7 @@ class MessageOptions extends \Google\Protobuf\Internal\Message
* repeated MapFieldEntry map_field = 1;
* Implementations may choose not to generate the map_entry=true message, but
* use a native map in the target language to hold the keys and values.
* The reflection APIs in such implementions still need to work as
* The reflection APIs in such implementations still need to work as
* if the field is a repeated message field.
* NOTE: Do not set the option in .proto files. Always use the maps syntax
* instead. The option should only be implicitly set by the proto compiler
......@@ -323,7 +323,7 @@ class MessageOptions extends \Google\Protobuf\Internal\Message
* repeated MapFieldEntry map_field = 1;
* Implementations may choose not to generate the map_entry=true message, but
* use a native map in the target language to hold the keys and values.
* The reflection APIs in such implementions still need to work as
* The reflection APIs in such implementations still need to work as
* if the field is a repeated message field.
* NOTE: Do not set the option in .proto files. Always use the maps syntax
* instead. The option should only be implicitly set by the proto compiler
......
......@@ -55,7 +55,7 @@ class SourceCodeInfo extends \Google\Protobuf\Internal\Message
* beginning of the "extend" block and is shared by all extensions within
* the block.
* - Just because a location's span is a subset of some other location's span
* does not mean that it is a descendent. For example, a "group" defines
* does not mean that it is a descendant. For example, a "group" defines
* both a type and a field in a single declaration. Thus, the locations
* corresponding to the type and field and their components will overlap.
* - Code which tries to interpret locations should probably be designed to
......@@ -109,7 +109,7 @@ class SourceCodeInfo extends \Google\Protobuf\Internal\Message
* beginning of the "extend" block and is shared by all extensions within
* the block.
* - Just because a location's span is a subset of some other location's span
* does not mean that it is a descendent. For example, a "group" defines
* does not mean that it is a descendant. For example, a "group" defines
* both a type and a field in a single declaration. Thus, the locations
* corresponding to the type and field and their components will overlap.
* - Code which tries to interpret locations should probably be designed to
......@@ -158,7 +158,7 @@ class SourceCodeInfo extends \Google\Protobuf\Internal\Message
* beginning of the "extend" block and is shared by all extensions within
* the block.
* - Just because a location's span is a subset of some other location's span
* does not mean that it is a descendent. For example, a "group" defines
* does not mean that it is a descendant. For example, a "group" defines
* both a type and a field in a single declaration. Thus, the locations
* corresponding to the type and field and their components will overlap.
* - Code which tries to interpret locations should probably be designed to
......@@ -209,7 +209,7 @@ class SourceCodeInfo extends \Google\Protobuf\Internal\Message
* beginning of the "extend" block and is shared by all extensions within
* the block.
* - Just because a location's span is a subset of some other location's span
* does not mean that it is a descendent. For example, a "group" defines
* does not mean that it is a descendant. For example, a "group" defines
* both a type and a field in a single declaration. Thus, the locations
* corresponding to the type and field and their components will overlap.
* - Code which tries to interpret locations should probably be designed to
......
......@@ -230,6 +230,8 @@ class BaseContainer(object):
kwargs['cmp'] = kwargs.pop('sort_function')
self._values.sort(*args, **kwargs)
collections_abc.MutableSequence.register(BaseContainer)
class RepeatedScalarFieldContainer(BaseContainer):
......@@ -341,8 +343,6 @@ class RepeatedScalarFieldContainer(BaseContainer):
# We are presumably comparing against some other sequence type.
return other == self._values
collections_abc.MutableSequence.register(BaseContainer)
class RepeatedCompositeFieldContainer(BaseContainer):
......@@ -380,6 +380,24 @@ class RepeatedCompositeFieldContainer(BaseContainer):
self._message_listener.Modified()
return new_element
def append(self, value):
"""Appends one element by copying the message."""
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
new_element.CopyFrom(value)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position by copying."""
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
new_element.CopyFrom(value)
self._values.insert(key, new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
......
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains _ExtensionDict class to represent extensions.
"""
from google.protobuf.internal import type_checkers
from google.protobuf.descriptor import FieldDescriptor
def _VerifyExtensionHandle(message, extension_handle):
"""Verify that the given extension handle is valid."""
if not isinstance(extension_handle, FieldDescriptor):
raise KeyError('HasExtension() expects an extension handle, got: %s' %
extension_handle)
if not extension_handle.is_extension:
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
if not extension_handle.containing_type:
raise KeyError('"%s" is missing a containing_type.'
% extension_handle.full_name)
if extension_handle.containing_type is not message.DESCRIPTOR:
raise KeyError('Extension "%s" extends message type "%s", but this '
'message is of type "%s".' %
(extension_handle.full_name,
extension_handle.containing_type.full_name,
message.DESCRIPTOR.full_name))
# TODO(robinson): Unify error handling of "unknown extension" crap.
# TODO(robinson): Support iteritems()-style iteration over all
# extensions with the "has" bits turned on?
class _ExtensionDict(object):
"""Dict-like container for Extension fields on proto instances.
Note that in all cases we expect extension handles to be
FieldDescriptors.
"""
def __init__(self, extended_message):
"""
Args:
extended_message: Message instance for which we are the Extensions dict.
"""
self._extended_message = extended_message
def __getitem__(self, extension_handle):
"""Returns the current value of the given extension handle."""
_VerifyExtensionHandle(self._extended_message, extension_handle)
result = self._extended_message._fields.get(extension_handle)
if result is not None:
return result
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
result = extension_handle._default_constructor(self._extended_message)
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
assert getattr(extension_handle.message_type, '_concrete_class', None), (
'Uninitialized concrete class found for field %r (message type %r)'
% (extension_handle.full_name,
extension_handle.message_type.full_name))
result = extension_handle.message_type._concrete_class()
try:
result._SetListener(self._extended_message._listener_for_children)
except ReferenceError:
pass
else:
# Singular scalar -- just return the default without inserting into the
# dict.
return extension_handle.default_value
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
result = self._extended_message._fields.setdefault(
extension_handle, result)
return result
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
my_fields = self._extended_message.ListFields()
other_fields = other._extended_message.ListFields()
# Get rid of non-extension fields.
my_fields = [field for field in my_fields if field.is_extension]
other_fields = [field for field in other_fields if field.is_extension]
return my_fields == other_fields
def __ne__(self, other):
return not self == other
def __len__(self):
fields = self._extended_message.ListFields()
# Get rid of non-extension fields.
extension_fields = [field for field in fields if field[0].is_extension]
return len(extension_fields)
def __hash__(self):
raise TypeError('unhashable object')
# Note that this is only meaningful for non-repeated, scalar extension
# fields. Note also that we may have to call _Modified() when we do
# successfully set a field this way, to set any necssary "has" bits in the
# ancestors of the extended message.
def __setitem__(self, extension_handle, value):
"""If extension_handle specifies a non-repeated, scalar extension
field, sets the value of that field.
"""
_VerifyExtensionHandle(self._extended_message, extension_handle)
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
raise TypeError(
'Cannot assign to extension "%s" because it is a repeated or '
'composite type.' % extension_handle.full_name)
# It's slightly wasteful to lookup the type checker each time,
# but we expect this to be a vanishingly uncommon case anyway.
type_checker = type_checkers.GetTypeChecker(extension_handle)
# pylint: disable=protected-access
self._extended_message._fields[extension_handle] = (
type_checker.CheckValue(value))
self._extended_message._Modified()
def _FindExtensionByName(self, name):
"""Tries to find a known extension with the specified name.
Args:
name: Extension full name.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_name.get(name, None)
def _FindExtensionByNumber(self, number):
"""Tries to find a known extension with the field number.
Args:
number: Extension field number.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_number.get(number, None)
......@@ -52,7 +52,6 @@ from google.protobuf import wrappers_pb2
from google.protobuf import any_test_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import well_known_types
from google.protobuf import descriptor_pool
from google.protobuf import json_format
from google.protobuf.util import json_format_proto3_pb2
......@@ -481,6 +480,14 @@ class JsonFormatTest(JsonFormatBase):
parsed_message = json_format_proto3_pb2.TestFieldMask()
self.CheckParseBack(message, parsed_message)
message.value.Clear()
self.assertEqual(
json_format.MessageToJson(message, True),
'{\n'
' "value": ""\n'
'}')
self.CheckParseBack(message, parsed_message)
def testWrapperMessage(self):
message = json_format_proto3_pb2.TestWrapper()
message.bool_value.value = False
......@@ -922,17 +929,18 @@ class JsonFormatTest(JsonFormatBase):
text = '{"value": "10000-01-01T00:00:00.00Z"}'
self.assertRaisesRegexp(
json_format.ParseError,
'Failed to parse value field: '
'time data \'10000-01-01T00:00:00\' does not match'
' format \'%Y-%m-%dT%H:%M:%S\'.',
json_format.Parse, text, message)
text = '{"value": "1970-01-01T00:00:00.0123456789012Z"}'
self.assertRaisesRegexp(
well_known_types.ParseError,
json_format.ParseError,
'nanos 0123456789012 more than 9 fractional digits.',
json_format.Parse, text, message)
text = '{"value": "1972-01-01T01:00:00.01+08"}'
self.assertRaisesRegexp(
well_known_types.ParseError,
json_format.ParseError,
(r'Invalid timezone offset value: \+08.'),
json_format.Parse, text, message)
# Time smaller than minimum time.
......
......@@ -136,13 +136,14 @@ class MessageFactoryTest(unittest.TestCase):
'google.protobuf.python.internal.Factory2Message.one_more_field')
ext2 = msg1.Extensions._FindExtensionByName(
'google.protobuf.python.internal.another_field')
self.assertEqual(0, len(msg1.Extensions))
msg1.Extensions[ext1] = 'test1'
msg1.Extensions[ext2] = 'test2'
self.assertEqual('test1', msg1.Extensions[ext1])
self.assertEqual('test2', msg1.Extensions[ext2])
self.assertEqual(None,
msg1.Extensions._FindExtensionByNumber(12321))
self.assertRaises(TypeError, len, msg1.Extensions)
self.assertEqual(2, len(msg1.Extensions))
if api_implementation.Type() == 'cpp':
self.assertRaises(TypeError,
msg1.Extensions._FindExtensionByName, 0)
......
......@@ -411,6 +411,58 @@ class MessageTest(BaseTestCase):
empty.ParseFromString(populated.SerializeToString())
self.assertEqual(str(empty), '')
def testAppendRepeatedCompositeField(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_nested_message.append(
message_module.TestAllTypes.NestedMessage(bb=1))
nested = message_module.TestAllTypes.NestedMessage(bb=2)
msg.repeated_nested_message.append(nested)
try:
msg.repeated_nested_message.append(1)
except TypeError:
pass
self.assertEqual(2, len(msg.repeated_nested_message))
self.assertEqual([1, 2],
[m.bb for m in msg.repeated_nested_message])
def testInsertRepeatedCompositeField(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_nested_message.insert(
-1, message_module.TestAllTypes.NestedMessage(bb=1))
sub_msg = msg.repeated_nested_message[0]
msg.repeated_nested_message.insert(
0, message_module.TestAllTypes.NestedMessage(bb=2))
msg.repeated_nested_message.insert(
99, message_module.TestAllTypes.NestedMessage(bb=3))
msg.repeated_nested_message.insert(
-2, message_module.TestAllTypes.NestedMessage(bb=-1))
msg.repeated_nested_message.insert(
-1000, message_module.TestAllTypes.NestedMessage(bb=-1000))
try:
msg.repeated_nested_message.insert(1, 999)
except TypeError:
pass
self.assertEqual(5, len(msg.repeated_nested_message))
self.assertEqual([-1000, 2, -1, 1, 3],
[m.bb for m in msg.repeated_nested_message])
self.assertEqual(str(msg),
'repeated_nested_message {\n'
' bb: -1000\n'
'}\n'
'repeated_nested_message {\n'
' bb: 2\n'
'}\n'
'repeated_nested_message {\n'
' bb: -1\n'
'}\n'
'repeated_nested_message {\n'
' bb: 1\n'
'}\n'
'repeated_nested_message {\n'
' bb: 3\n'
'}\n')
self.assertEqual(sub_msg.bb, 1)
def testMergeFromRepeatedField(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_int32.append(1)
......@@ -442,6 +494,30 @@ class MessageTest(BaseTestCase):
pass
self.assertEqual(len(msg.repeated_nested_message), 0)
def testRepeatedContains(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_int32.extend([1, 2, 3])
self.assertIn(2, msg.repeated_int32)
self.assertNotIn(0, msg.repeated_int32)
msg.repeated_nested_message.add(bb=1)
sub_msg1 = msg.repeated_nested_message[0]
sub_msg2 = message_module.TestAllTypes.NestedMessage(bb=2)
sub_msg3 = message_module.TestAllTypes.NestedMessage(bb=3)
msg.repeated_nested_message.append(sub_msg2)
msg.repeated_nested_message.insert(0, sub_msg3)
self.assertIn(sub_msg1, msg.repeated_nested_message)
self.assertIn(sub_msg2, msg.repeated_nested_message)
self.assertIn(sub_msg3, msg.repeated_nested_message)
def testRepeatedScalarIterable(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_int32.extend([1, 2, 3])
add = 0
for item in msg.repeated_int32:
add += item
self.assertEqual(add, 6)
def testRepeatedNestedFieldIteration(self, message_module):
msg = message_module.TestAllTypes()
msg.repeated_nested_message.add(bb=1)
......@@ -1173,6 +1249,27 @@ class MessageTest(BaseTestCase):
with self.assertRaises(AttributeError):
m.repeated_int32 = []
def testReturningType(self, message_module):
m = message_module.TestAllTypes()
self.assertEqual(float, type(m.optional_float))
self.assertEqual(float, type(m.optional_double))
self.assertEqual(bool, type(m.optional_bool))
m.optional_float = 1
m.optional_double = 1
m.optional_bool = 1
m.repeated_float.append(1)
m.repeated_double.append(1)
m.repeated_bool.append(1)
m.ParseFromString(m.SerializeToString())
self.assertEqual(float, type(m.optional_float))
self.assertEqual(float, type(m.optional_double))
self.assertEqual('1.0', str(m.optional_double))
self.assertEqual(bool, type(m.optional_bool))
self.assertEqual(float, type(m.repeated_float[0]))
self.assertEqual(float, type(m.repeated_double[0]))
self.assertEqual(bool, type(m.repeated_bool[0]))
self.assertEqual(True, m.repeated_bool[0])
# Class to test proto2-only features (required, extensions, etc.)
class Proto2Test(BaseTestCase):
......
......@@ -50,3 +50,262 @@ extend OutOfOrderFields {
optional uint64 optional_uint64 = 4;
optional int64 optional_int64 = 2;
}
message LotsNestedMessage {
message B0 {}
message B1 {}
message B2 {}
message B3 {}
message B4 {}
message B5 {}
message B6 {}
message B7 {}
message B8 {}
message B9 {}
message B10 {}
message B11 {}
message B12 {}
message B13 {}
message B14 {}
message B15 {}
message B16 {}
message B17 {}
message B18 {}
message B19 {}
message B20 {}
message B21 {}
message B22 {}
message B23 {}
message B24 {}
message B25 {}
message B26 {}
message B27 {}
message B28 {}
message B29 {}
message B30 {}
message B31 {}
message B32 {}
message B33 {}
message B34 {}
message B35 {}
message B36 {}
message B37 {}
message B38 {}
message B39 {}
message B40 {}
message B41 {}
message B42 {}
message B43 {}
message B44 {}
message B45 {}
message B46 {}
message B47 {}
message B48 {}
message B49 {}
message B50 {}
message B51 {}
message B52 {}
message B53 {}
message B54 {}
message B55 {}
message B56 {}
message B57 {}
message B58 {}
message B59 {}
message B60 {}
message B61 {}
message B62 {}
message B63 {}
message B64 {}
message B65 {}
message B66 {}
message B67 {}
message B68 {}
message B69 {}
message B70 {}
message B71 {}
message B72 {}
message B73 {}
message B74 {}
message B75 {}
message B76 {}
message B77 {}
message B78 {}
message B79 {}
message B80 {}
message B81 {}
message B82 {}
message B83 {}
message B84 {}
message B85 {}
message B86 {}
message B87 {}
message B88 {}
message B89 {}
message B90 {}
message B91 {}
message B92 {}
message B93 {}
message B94 {}
message B95 {}
message B96 {}
message B97 {}
message B98 {}
message B99 {}
message B100 {}
message B101 {}
message B102 {}
message B103 {}
message B104 {}
message B105 {}
message B106 {}
message B107 {}
message B108 {}
message B109 {}
message B110 {}
message B111 {}
message B112 {}
message B113 {}
message B114 {}
message B115 {}
message B116 {}
message B117 {}
message B118 {}
message B119 {}
message B120 {}
message B121 {}
message B122 {}
message B123 {}
message B124 {}
message B125 {}
message B126 {}
message B127 {}
message B128 {}
message B129 {}
message B130 {}
message B131 {}
message B132 {}
message B133 {}
message B134 {}
message B135 {}
message B136 {}
message B137 {}
message B138 {}
message B139 {}
message B140 {}
message B141 {}
message B142 {}
message B143 {}
message B144 {}
message B145 {}
message B146 {}
message B147 {}
message B148 {}
message B149 {}
message B150 {}
message B151 {}
message B152 {}
message B153 {}
message B154 {}
message B155 {}
message B156 {}
message B157 {}
message B158 {}
message B159 {}
message B160 {}
message B161 {}
message B162 {}
message B163 {}
message B164 {}
message B165 {}
message B166 {}
message B167 {}
message B168 {}
message B169 {}
message B170 {}
message B171 {}
message B172 {}
message B173 {}
message B174 {}
message B175 {}
message B176 {}
message B177 {}
message B178 {}
message B179 {}
message B180 {}
message B181 {}
message B182 {}
message B183 {}
message B184 {}
message B185 {}
message B186 {}
message B187 {}
message B188 {}
message B189 {}
message B190 {}
message B191 {}
message B192 {}
message B193 {}
message B194 {}
message B195 {}
message B196 {}
message B197 {}
message B198 {}
message B199 {}
message B200 {}
message B201 {}
message B202 {}
message B203 {}
message B204 {}
message B205 {}
message B206 {}
message B207 {}
message B208 {}
message B209 {}
message B210 {}
message B211 {}
message B212 {}
message B213 {}
message B214 {}
message B215 {}
message B216 {}
message B217 {}
message B218 {}
message B219 {}
message B220 {}
message B221 {}
message B222 {}
message B223 {}
message B224 {}
message B225 {}
message B226 {}
message B227 {}
message B228 {}
message B229 {}
message B230 {}
message B231 {}
message B232 {}
message B233 {}
message B234 {}
message B235 {}
message B236 {}
message B237 {}
message B238 {}
message B239 {}
message B240 {}
message B241 {}
message B242 {}
message B243 {}
message B244 {}
message B245 {}
message B246 {}
message B247 {}
message B248 {}
message B249 {}
message B250 {}
message B251 {}
message B252 {}
message B253 {}
message B254 {}
message B255 {}
}
......@@ -64,6 +64,7 @@ from google.protobuf.internal import containers
from google.protobuf.internal import decoder
from google.protobuf.internal import encoder
from google.protobuf.internal import enum_type_wrapper
from google.protobuf.internal import extension_dict
from google.protobuf.internal import message_listener as message_listener_mod
from google.protobuf.internal import type_checkers
from google.protobuf.internal import well_known_types
......@@ -74,7 +75,7 @@ from google.protobuf import text_format
_FieldDescriptor = descriptor_mod.FieldDescriptor
_AnyFullTypeName = 'google.protobuf.Any'
_ExtensionDict = extension_dict._ExtensionDict
class GeneratedProtocolMessageType(type):
......@@ -237,28 +238,6 @@ def _PropertyName(proto_field_name):
return proto_field_name
def _VerifyExtensionHandle(message, extension_handle):
"""Verify that the given extension handle is valid."""
if not isinstance(extension_handle, _FieldDescriptor):
raise KeyError('HasExtension() expects an extension handle, got: %s' %
extension_handle)
if not extension_handle.is_extension:
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
if not extension_handle.containing_type:
raise KeyError('"%s" is missing a containing_type.'
% extension_handle.full_name)
if extension_handle.containing_type is not message.DESCRIPTOR:
raise KeyError('Extension "%s" extends message type "%s", but this '
'message is of type "%s".' %
(extension_handle.full_name,
extension_handle.containing_type.full_name,
message.DESCRIPTOR.full_name))
def _AddSlots(message_descriptor, dictionary):
"""Adds a __slots__ entry to dictionary, containing the names of all valid
attributes for this message type.
......@@ -379,8 +358,8 @@ def _AttachFieldHelpers(cls, field_descriptor):
def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
extension_dict = descriptor.extensions_by_name
for extension_name, extension_field in extension_dict.items():
extensions = descriptor.extensions_by_name
for extension_name, extension_field in extensions.items():
assert extension_name not in dictionary
dictionary[extension_name] = extension_field
......@@ -784,8 +763,8 @@ def _AddPropertiesForNonRepeatedCompositeField(field, cls):
def _AddPropertiesForExtensions(descriptor, cls):
"""Adds properties for all fields in this protocol message type."""
extension_dict = descriptor.extensions_by_name
for extension_name, extension_field in extension_dict.items():
extensions = descriptor.extensions_by_name
for extension_name, extension_field in extensions.items():
constant_name = extension_name.upper() + '_FIELD_NUMBER'
setattr(cls, constant_name, extension_field.number)
......@@ -922,7 +901,7 @@ def _AddClearFieldMethod(message_descriptor, cls):
def _AddClearExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
def ClearExtension(self, extension_handle):
_VerifyExtensionHandle(self, extension_handle)
extension_dict._VerifyExtensionHandle(self, extension_handle)
# Similar to ClearField(), above.
if extension_handle in self._fields:
......@@ -934,7 +913,7 @@ def _AddClearExtensionMethod(cls):
def _AddHasExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
def HasExtension(self, extension_handle):
_VerifyExtensionHandle(self, extension_handle)
extension_dict._VerifyExtensionHandle(self, extension_handle)
if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
raise KeyError('"%s" is repeated.' % extension_handle.full_name)
......@@ -1550,126 +1529,3 @@ class _OneofListener(_Listener):
super(_OneofListener, self).Modified()
except ReferenceError:
pass
# TODO(robinson): Move elsewhere? This file is getting pretty ridiculous...
# TODO(robinson): Unify error handling of "unknown extension" crap.
# TODO(robinson): Support iteritems()-style iteration over all
# extensions with the "has" bits turned on?
class _ExtensionDict(object):
"""Dict-like container for supporting an indexable "Extensions"
field on proto instances.
Note that in all cases we expect extension handles to be
FieldDescriptors.
"""
def __init__(self, extended_message):
"""extended_message: Message instance for which we are the Extensions dict.
"""
self._extended_message = extended_message
def __getitem__(self, extension_handle):
"""Returns the current value of the given extension handle."""
_VerifyExtensionHandle(self._extended_message, extension_handle)
result = self._extended_message._fields.get(extension_handle)
if result is not None:
return result
if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
result = extension_handle._default_constructor(self._extended_message)
elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
assert getattr(extension_handle.message_type, '_concrete_class', None), (
'Uninitialized concrete class found for field %r (message type %r)'
% (extension_handle.full_name,
extension_handle.message_type.full_name))
result = extension_handle.message_type._concrete_class()
try:
result._SetListener(self._extended_message._listener_for_children)
except ReferenceError:
pass
else:
# Singular scalar -- just return the default without inserting into the
# dict.
return extension_handle.default_value
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
result = self._extended_message._fields.setdefault(
extension_handle, result)
return result
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
my_fields = self._extended_message.ListFields()
other_fields = other._extended_message.ListFields()
# Get rid of non-extension fields.
my_fields = [field for field in my_fields if field.is_extension]
other_fields = [field for field in other_fields if field.is_extension]
return my_fields == other_fields
def __ne__(self, other):
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
# Note that this is only meaningful for non-repeated, scalar extension
# fields. Note also that we may have to call _Modified() when we do
# successfully set a field this way, to set any necssary "has" bits in the
# ancestors of the extended message.
def __setitem__(self, extension_handle, value):
"""If extension_handle specifies a non-repeated, scalar extension
field, sets the value of that field.
"""
_VerifyExtensionHandle(self._extended_message, extension_handle)
if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or
extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
raise TypeError(
'Cannot assign to extension "%s" because it is a repeated or '
'composite type.' % extension_handle.full_name)
# It's slightly wasteful to lookup the type checker each time,
# but we expect this to be a vanishingly uncommon case anyway.
type_checker = type_checkers.GetTypeChecker(extension_handle)
# pylint: disable=protected-access
self._extended_message._fields[extension_handle] = (
type_checker.CheckValue(value))
self._extended_message._Modified()
def _FindExtensionByName(self, name):
"""Tries to find a known extension with the specified name.
Args:
name: Extension full name.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_name.get(name, None)
def _FindExtensionByNumber(self, number):
"""Tries to find a known extension with the field number.
Args:
number: Extension field number.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_number.get(number, None)
......@@ -107,13 +107,18 @@ class TypeChecker(object):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), self._acceptable_types))
raise TypeError(message)
# Some field types(float, double and bool) accept other types, must
# convert to the correct type in such cases.
if self._acceptable_types:
if self._acceptable_types[0] in (bool, float):
return self._acceptable_types[0](proposed_value)
return proposed_value
class TypeCheckerWithDefault(TypeChecker):
def __init__(self, default_value, *acceptable_types):
TypeChecker.__init__(self, acceptable_types)
TypeChecker.__init__(self, *acceptable_types)
self._default_value = default_value
def DefaultValue(self):
......@@ -232,9 +237,9 @@ _VALUE_CHECKERS = {
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
_FieldDescriptor.CPPTYPE_DOUBLE: TypeCheckerWithDefault(
0.0, numbers.Real),
0.0, float, numbers.Real),
_FieldDescriptor.CPPTYPE_FLOAT: TypeCheckerWithDefault(
0.0, numbers.Real),
0.0, float, numbers.Real),
_FieldDescriptor.CPPTYPE_BOOL: TypeCheckerWithDefault(
False, bool, numbers.Integral),
_FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
......
......@@ -58,14 +58,6 @@ _SECONDS_PER_DAY = 24 * 3600
_DURATION_SECONDS_MAX = 315576000000
class Error(Exception):
"""Top-level module error."""
class ParseError(Error):
"""Thrown in case of parsing error."""
class Any(object):
"""Class for Any Message type."""
......@@ -136,7 +128,7 @@ class Timestamp(object):
Example of accepted format: '1972-01-01T10:00:20.021-05:00'
Raises:
ParseError: On parsing problems.
ValueError: On parsing problems.
"""
timezone_offset = value.find('Z')
if timezone_offset == -1:
......@@ -144,7 +136,7 @@ class Timestamp(object):
if timezone_offset == -1:
timezone_offset = value.rfind('-')
if timezone_offset == -1:
raise ParseError(
raise ValueError(
'Failed to parse timestamp: missing valid timezone offset.')
time_value = value[0:timezone_offset]
# Parse datetime and nanos.
......@@ -159,7 +151,7 @@ class Timestamp(object):
td = date_object - datetime(1970, 1, 1)
seconds = td.seconds + td.days * _SECONDS_PER_DAY
if len(nano_value) > 9:
raise ParseError(
raise ValueError(
'Failed to parse Timestamp: nanos {0} more than '
'9 fractional digits.'.format(nano_value))
if nano_value:
......@@ -169,13 +161,13 @@ class Timestamp(object):
# Parse timezone offsets.
if value[timezone_offset] == 'Z':
if len(value) != timezone_offset + 1:
raise ParseError('Failed to parse timestamp: invalid trailing'
raise ValueError('Failed to parse timestamp: invalid trailing'
' data {0}.'.format(value))
else:
timezone = value[timezone_offset:]
pos = timezone.find(':')
if pos == -1:
raise ParseError(
raise ValueError(
'Invalid timezone offset value: {0}.'.format(timezone))
if timezone[0] == '+':
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
......@@ -289,10 +281,10 @@ class Duration(object):
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ParseError: On parsing problems.
ValueError: On parsing problems.
"""
if len(value) < 1 or value[-1] != 's':
raise ParseError(
raise ValueError(
'Duration must end with letter "s": {0}.'.format(value))
try:
pos = value.find('.')
......@@ -308,9 +300,9 @@ class Duration(object):
_CheckDurationValid(seconds, nanos)
self.seconds = seconds
self.nanos = nanos
except ValueError:
raise ParseError(
'Couldn\'t parse duration: {0}.'.format(value))
except ValueError as e:
raise ValueError(
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
def ToNanoseconds(self):
"""Converts a Duration to nanoseconds."""
......@@ -375,15 +367,15 @@ class Duration(object):
def _CheckDurationValid(seconds, nanos):
if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
raise Error(
raise ValueError(
'Duration is not valid: Seconds {0} must be in range '
'[-315576000000, 315576000000].'.format(seconds))
if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
raise Error(
raise ValueError(
'Duration is not valid: Nanos {0} must be in range '
'[-999999999, 999999999].'.format(nanos))
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
raise Error(
raise ValueError(
'Duration is not valid: Sign mismatch.')
......@@ -415,8 +407,9 @@ class FieldMask(object):
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
self.Clear()
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
......@@ -509,24 +502,26 @@ def _SnakeCaseToCamelCase(path_name):
after_underscore = False
for c in path_name:
if c.isupper():
raise Error('Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise Error('Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise Error('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
......@@ -535,7 +530,7 @@ def _CamelCaseToSnakeCase(path_name):
result = []
for c in path_name:
if c == '_':
raise ParseError('Fail to parse FieldMask: Path name '
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
......@@ -682,7 +677,7 @@ def _MergeMessage(
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node:
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
......
......@@ -294,12 +294,12 @@ class TimeUtilTest(TimeUtilTestBase):
def testInvalidTimestamp(self):
message = timestamp_pb2.Timestamp()
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'Failed to parse timestamp: missing valid timezone offset.',
message.FromJsonString,
'')
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'Failed to parse timestamp: invalid trailing data '
'1970-01-01T00:00:01Ztrail.',
message.FromJsonString,
......@@ -310,12 +310,12 @@ class TimeUtilTest(TimeUtilTestBase):
' format \'%Y-%m-%dT%H:%M:%S\'',
message.FromJsonString, '10000-01-01T00:00:00.00Z')
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'nanos 0123456789012 more than 9 fractional digits.',
message.FromJsonString,
'1970-01-01T00:00:00.0123456789012Z')
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
(r'Invalid timezone offset value: \+08.'),
message.FromJsonString,
'1972-01-01T01:00:00.01+08',)
......@@ -333,43 +333,43 @@ class TimeUtilTest(TimeUtilTestBase):
def testInvalidDuration(self):
message = duration_pb2.Duration()
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'Duration must end with letter "s": 1.',
message.FromJsonString, '1')
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'Couldn\'t parse duration: 1...2s.',
message.FromJsonString, '1...2s')
text = '-315576000001.000000000s'
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
r'Duration is not valid\: Seconds -315576000001 must be in range'
r' \[-315576000000\, 315576000000\].',
message.FromJsonString, text)
text = '315576000001.000000000s'
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
r'Duration is not valid\: Seconds 315576000001 must be in range'
r' \[-315576000000\, 315576000000\].',
message.FromJsonString, text)
message.seconds = -315576000001
message.nanos = 0
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
r'Duration is not valid\: Seconds -315576000001 must be in range'
r' \[-315576000000\, 315576000000\].',
message.ToJsonString)
message.seconds = 0
message.nanos = 999999999 + 1
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
r'Duration is not valid\: Nanos 1000000000 must be in range'
r' \[-999999999\, 999999999\].',
message.ToJsonString)
message.seconds = -1
message.nanos = 1
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
r'Duration is not valid\: Sign mismatch.',
message.ToJsonString)
......@@ -400,6 +400,7 @@ class FieldMaskTest(unittest.TestCase):
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
self.assertEqual([], mask.paths)
mask.FromJsonString('fooBar')
self.assertEqual(['foo_bar'], mask.paths)
mask.FromJsonString('fooBar,barQuz')
......@@ -512,6 +513,8 @@ class FieldMaskTest(unittest.TestCase):
mask2.FromJsonString('bar,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('', out_mask.ToJsonString())
self.assertEqual(len(out_mask.paths), 0)
self.assertEqual(out_mask.paths, [])
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
......@@ -526,6 +529,15 @@ class FieldMaskTest(unittest.TestCase):
mask2.FromJsonString('foo.bar.baz,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
# Intersect '' with ''
mask1.Clear()
mask2.Clear()
mask1.paths.append('')
mask2.paths.append('')
self.assertEqual(mask1.paths, [''])
self.assertEqual('', mask1.ToJsonString())
out_mask.Intersect(mask1, mask2)
self.assertEqual(out_mask.paths, [])
def testMergeMessageWithoutMapFields(self):
# Test merge one field.
......@@ -682,7 +694,7 @@ class FieldMaskTest(unittest.TestCase):
# No uppercase letter is allowed.
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
'Fail to print FieldMask to Json string: Path name Foo must '
'not contain uppercase letters.',
well_known_types._SnakeCaseToCamelCase,
......@@ -692,19 +704,19 @@ class FieldMaskTest(unittest.TestCase):
# 2. "_" cannot be followed by a digit.
# 3. "_" cannot appear as the last character.
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo__bar.',
well_known_types._SnakeCaseToCamelCase,
'foo__bar')
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo_3bar.',
well_known_types._SnakeCaseToCamelCase,
'foo_3bar')
self.assertRaisesRegexp(
well_known_types.Error,
ValueError,
'Fail to print FieldMask to Json string: Trailing "_" in path '
'name foo_bar_.',
well_known_types._SnakeCaseToCamelCase,
......@@ -718,7 +730,7 @@ class FieldMaskTest(unittest.TestCase):
self.assertEqual('foo3_bar',
well_known_types._CamelCaseToSnakeCase('foo3Bar'))
self.assertRaisesRegexp(
well_known_types.ParseError,
ValueError,
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
well_known_types._CamelCaseToSnakeCase,
'foo_bar')
......
......@@ -570,7 +570,7 @@ class _Parser(object):
setattr(message, field.name, _ConvertScalarFieldValue(value, field))
except ParseError as e:
if field and field.containing_oneof is None:
raise ParseError('Failed to parse {0} field: {1}'.format(name, e))
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
else:
raise ParseError(str(e))
except ValueError as e:
......@@ -607,7 +607,10 @@ class _Parser(object):
"""Convert a JSON representation into message with FromJsonString."""
# Duration, Timestamp, FieldMask have a FromJsonString method to do the
# conversion. Users can also call the method directly.
message.FromJsonString(value)
try:
message.FromJsonString(value)
except ValueError as e:
raise ParseError(e)
def _ConvertValueMessage(self, value, message):
"""Convert a JSON representation into Value message."""
......
......@@ -47,6 +47,7 @@
#include <Python.h>
#include <google/protobuf/descriptor_database.h>
#include <google/protobuf/message.h>
namespace google {
......@@ -76,6 +77,11 @@ struct PyProto_API {
// pointing to the message, like submessages or repeated containers.
// With the current implementation, only empty messages are in this case.
virtual Message* GetMutableMessagePointer(PyObject* msg) const = 0;
// Expose the underlying DescriptorPool and MessageFactory to enable C++ code
// to create Python-compatible message.
virtual const DescriptorPool* GetDefaultDescriptorPool() const = 0;
virtual MessageFactory* GetDefaultMessageFactory() const = 0;
};
inline const char* PyProtoAPICapsuleName() {
......
......@@ -65,6 +65,31 @@ namespace python {
namespace extension_dict {
static Py_ssize_t len(ExtensionDict* self) {
Py_ssize_t size = 0;
std::vector<const FieldDescriptor*> fields;
self->parent->message->GetReflection()->ListFields(*self->parent->message,
&fields);
for (size_t i = 0; i < fields.size(); ++i) {
if (fields[i]->is_extension()) {
// With C++ descriptors, the field can always be retrieved, but for
// unknown extensions which have not been imported in Python code, there
// is no message class and we cannot retrieve the value.
// ListFields() has the same behavior.
if (fields[i]->message_type() != nullptr &&
message_factory::GetMessageClass(
cmessage::GetFactoryForMessage(self->parent),
fields[i]->message_type()) == nullptr) {
PyErr_Clear();
continue;
}
++size;
}
}
return size;
}
PyObject* subscript(ExtensionDict* self, PyObject* key) {
const FieldDescriptor* descriptor = cmessage::GetExtensionDescriptor(key);
if (descriptor == NULL) {
......@@ -246,7 +271,7 @@ static PyObject* RichCompare(ExtensionDict* self, PyObject* other, int opid) {
}
static PyMappingMethods MpMethods = {
(lenfunc)NULL, /* mp_length */
(lenfunc)len, /* mp_length */
(binaryfunc)subscript, /* mp_subscript */
(objobjargproc)ass_subscript,/* mp_ass_subscript */
};
......
......@@ -30,7 +30,9 @@
#include <Python.h>
#include <google/protobuf/pyext/descriptor_pool.h>
#include <google/protobuf/pyext/message.h>
#include <google/protobuf/pyext/message_factory.h>
#include <google/protobuf/proto_api.h>
#include <google/protobuf/message_lite.h>
......@@ -45,37 +47,42 @@ struct ApiImplementation : google::protobuf::python::PyProto_API {
google::protobuf::Message* GetMutableMessagePointer(PyObject* msg) const override {
return google::protobuf::python::PyMessage_GetMutableMessagePointer(msg);
}
const google::protobuf::DescriptorPool* GetDefaultDescriptorPool() const override {
return google::protobuf::python::GetDefaultDescriptorPool()->pool;
}
google::protobuf::MessageFactory* GetDefaultMessageFactory() const override {
return google::protobuf::python::GetDefaultDescriptorPool()
->py_message_factory->message_factory;
}
};
} // namespace
static const char module_docstring[] =
"python-proto2 is a module that can be used to enhance proto2 Python API\n"
"performance.\n"
"\n"
"It provides access to the protocol buffers C++ reflection API that\n"
"implements the basic protocol buffer functions.";
"python-proto2 is a module that can be used to enhance proto2 Python API\n"
"performance.\n"
"\n"
"It provides access to the protocol buffers C++ reflection API that\n"
"implements the basic protocol buffer functions.";
static PyMethodDef ModuleMethods[] = {
{"SetAllowOversizeProtos",
(PyCFunction)google::protobuf::python::cmessage::SetAllowOversizeProtos,
METH_O, "Enable/disable oversize proto parsing."},
// DO NOT USE: For migration and testing only.
{ NULL, NULL}
};
{"SetAllowOversizeProtos",
(PyCFunction)google::protobuf::python::cmessage::SetAllowOversizeProtos, METH_O,
"Enable/disable oversize proto parsing."},
// DO NOT USE: For migration and testing only.
{NULL, NULL}};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef _module = {
PyModuleDef_HEAD_INIT,
"_message",
module_docstring,
-1,
ModuleMethods, /* m_methods */
NULL,
NULL,
NULL,
NULL
};
static struct PyModuleDef _module = {PyModuleDef_HEAD_INIT,
"_message",
module_docstring,
-1,
ModuleMethods, /* m_methods */
NULL,
NULL,
NULL,
NULL};
#define INITFUNC PyInit__message
#define INITFUNC_ERRORVAL NULL
#else // Python 2
......
......@@ -159,10 +159,6 @@ static PyObject* AddToAttached(RepeatedCompositeContainer* self,
}
PyObject* py_cmsg = reinterpret_cast<PyObject*>(cmsg);
if (PyList_Append(self->child_messages, py_cmsg) < 0) {
Py_DECREF(py_cmsg);
return NULL;
}
return py_cmsg;
}
......@@ -174,6 +170,18 @@ static PyObject* AddToReleased(RepeatedCompositeContainer* self,
// Create a new Message detached from the rest.
PyObject* py_cmsg = PyEval_CallObjectWithKeywords(
self->child_message_class->AsPyObject(), args, kwargs);
return py_cmsg;
}
PyObject* Add(RepeatedCompositeContainer* self,
PyObject* args,
PyObject* kwargs) {
PyObject* py_cmsg;
if (self->message == nullptr)
py_cmsg = AddToReleased(self, args, kwargs);
else
py_cmsg = AddToAttached(self, args, kwargs);
if (py_cmsg == NULL)
return NULL;
......@@ -184,19 +192,97 @@ static PyObject* AddToReleased(RepeatedCompositeContainer* self,
return py_cmsg;
}
PyObject* Add(RepeatedCompositeContainer* self,
PyObject* args,
PyObject* kwargs) {
if (self->message == NULL)
return AddToReleased(self, args, kwargs);
else
return AddToAttached(self, args, kwargs);
}
static PyObject* AddMethod(PyObject* self, PyObject* args, PyObject* kwargs) {
return Add(reinterpret_cast<RepeatedCompositeContainer*>(self), args, kwargs);
}
// ---------------------------------------------------------------------
// append()
static PyObject* AddMessage(RepeatedCompositeContainer* self, PyObject* value) {
cmessage::AssureWritable(self->parent);
if (UpdateChildMessages(self) < 0) {
return nullptr;
}
PyObject* py_cmsg;
if (self->message == nullptr) {
py_cmsg = AddToReleased(self, nullptr, nullptr);
if (py_cmsg == nullptr) return nullptr;
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
if (ScopedPyObjectPtr(cmessage::MergeFrom(cmsg, value)) == nullptr) {
Py_DECREF(cmsg);
return nullptr;
}
} else {
Message* message = self->message;
const Reflection* reflection = message->GetReflection();
py_cmsg = AddToAttached(self, nullptr, nullptr);
if (py_cmsg == nullptr) return nullptr;
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
if (ScopedPyObjectPtr(cmessage::MergeFrom(cmsg, value)) == nullptr) {
reflection->RemoveLast(
message, self->parent_field_descriptor);
Py_DECREF(cmsg);
return nullptr;
}
}
return py_cmsg;
}
static PyObject* AppendMethod(PyObject* pself, PyObject* value) {
RepeatedCompositeContainer* self =
reinterpret_cast<RepeatedCompositeContainer*>(pself);
PyObject* py_cmsg = AddMessage(self, value);
if (py_cmsg == nullptr) {
return nullptr;
}
if (PyList_Append(self->child_messages, py_cmsg) < 0) {
Py_DECREF(py_cmsg);
return nullptr;
}
Py_RETURN_NONE;
}
// ---------------------------------------------------------------------
// insert()
static PyObject* Insert(PyObject* pself, PyObject* args) {
RepeatedCompositeContainer* self =
reinterpret_cast<RepeatedCompositeContainer*>(pself);
Py_ssize_t index;
PyObject* value;
if (!PyArg_ParseTuple(args, "nO", &index, &value)) {
return nullptr;
}
PyObject* py_cmsg = AddMessage(self, value);
if (py_cmsg == nullptr) {
return nullptr;
}
if (self->message != nullptr) {
// Swap the element to right position.
Message* message = self->message;
const Reflection* reflection = message->GetReflection();
const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
Py_ssize_t length = reflection->FieldSize(*message, field_descriptor) - 1;
Py_ssize_t end_index = index;
if (end_index < 0) end_index += length;
if (end_index < 0) end_index = 0;
for (Py_ssize_t i = length; i > end_index; i --) {
reflection->SwapElements(message, field_descriptor, i, i - 1);
}
}
if (PyList_Insert(self->child_messages, index, py_cmsg) < 0) {
return nullptr;
}
Py_RETURN_NONE;
}
// ---------------------------------------------------------------------
// extend()
......@@ -638,6 +724,10 @@ static PyMethodDef Methods[] = {
"Makes a deep copy of the class." },
{ "add", (PyCFunction)AddMethod, METH_VARARGS | METH_KEYWORDS,
"Adds an object to the repeated container." },
{ "append", AppendMethod, METH_O,
"Appends a message to the end of the repeated container."},
{ "insert", Insert, METH_VARARGS,
"Inserts a message before the specified index." },
{ "extend", ExtendMethod, METH_O,
"Adds objects to the repeated container." },
{ "pop", Pop, METH_VARARGS,
......
......@@ -200,6 +200,7 @@ libprotobuf_lite_la_SOURCES = \
google/protobuf/stubs/strutil.cc \
google/protobuf/stubs/time.cc \
google/protobuf/stubs/time.h \
google/protobuf/any_lite.cc \
google/protobuf/arena.cc \
google/protobuf/extension_set.cc \
google/protobuf/generated_message_util.cc \
......
......@@ -30,79 +30,35 @@
#include <google/protobuf/any.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/message.h>
namespace google {
namespace protobuf {
namespace internal {
namespace {
string GetTypeUrl(const Descriptor* message,
const string& type_url_prefix) {
if (!type_url_prefix.empty() &&
type_url_prefix[type_url_prefix.size() - 1] == '/') {
return type_url_prefix + message->full_name();
} else {
return type_url_prefix + "/" + message->full_name();
}
}
} // namespace
const char kAnyFullTypeName[] = "google.protobuf.Any";
const char kTypeGoogleApisComPrefix[] = "type.googleapis.com/";
const char kTypeGoogleProdComPrefix[] = "type.googleprod.com/";
AnyMetadata::AnyMetadata(UrlType* type_url, ValueType* value)
: type_url_(type_url), value_(value) {
}
void AnyMetadata::PackFrom(const Message& message) {
PackFrom(message, kTypeGoogleApisComPrefix);
}
void AnyMetadata::PackFrom(const Message& message,
const string& type_url_prefix) {
type_url_->SetNoArena(&::google::protobuf::internal::GetEmptyString(),
GetTypeUrl(message.GetDescriptor(), type_url_prefix));
type_url_->SetNoArena(
&::google::protobuf::internal::GetEmptyString(),
GetTypeUrl(message.GetDescriptor()->full_name(), type_url_prefix));
message.SerializeToString(value_->MutableNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited()));
}
bool AnyMetadata::UnpackTo(Message* message) const {
if (!InternalIs(message->GetDescriptor())) {
if (!InternalIs(message->GetDescriptor()->full_name())) {
return false;
}
return message->ParseFromString(value_->GetNoArena());
}
bool AnyMetadata::InternalIs(const Descriptor* descriptor) const {
const string type_url = type_url_->GetNoArena();
string full_name;
if (!ParseAnyTypeUrl(type_url, &full_name)) {
return false;
}
return full_name == descriptor->full_name();
}
bool ParseAnyTypeUrl(const string& type_url, string* url_prefix,
string* full_type_name) {
size_t pos = type_url.find_last_of("/");
if (pos == string::npos || pos + 1 == type_url.size()) {
return false;
}
if (url_prefix) {
*url_prefix = type_url.substr(0, pos + 1);
}
*full_type_name = type_url.substr(pos + 1);
return true;
}
bool ParseAnyTypeUrl(const string& type_url, string* full_type_name) {
return ParseAnyTypeUrl(type_url, NULL, full_type_name);
}
bool GetAnyFieldDescriptors(const Message& message,
const FieldDescriptor** type_url_field,
const FieldDescriptor** value_field) {
......
......@@ -34,16 +34,26 @@
#include <string>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/message.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/message_lite.h>
#include <google/protobuf/port_def.inc>
namespace google {
namespace protobuf {
class FieldDescriptor;
class Message;
namespace internal {
extern const char kAnyFullTypeName[]; // "google.protobuf.Any".
extern const char kTypeGoogleApisComPrefix[]; // "type.googleapis.com/".
extern const char kTypeGoogleProdComPrefix[]; // "type.googleprod.com/".
std::string GetTypeUrl(StringPiece message_name,
StringPiece type_url_prefix);
// Helper class used to implement google::protobuf::Any.
class PROTOBUF_EXPORT AnyMetadata {
typedef ArenaStringPtr UrlType;
......@@ -54,31 +64,52 @@ class PROTOBUF_EXPORT AnyMetadata {
// Packs a message using the default type URL prefix: "type.googleapis.com".
// The resulted type URL will be "type.googleapis.com/<message_full_name>".
template <typename T>
void PackFrom(const T& message) {
InternalPackFrom(message, kTypeGoogleApisComPrefix, T::FullMessageName());
}
void PackFrom(const Message& message);
// Packs a message using the given type URL prefix. The type URL will be
// constructed by concatenating the message type's full name to the prefix
// with an optional "/" separator if the prefix doesn't already end up "/".
// For example, both PackFrom(message, "type.googleapis.com") and
// PackFrom(message, "type.googleapis.com/") yield the same result type
// URL: "type.googleapis.com/<message_full_name>".
template <typename T>
void PackFrom(const T& message, StringPiece type_url_prefix) {
InternalPackFrom(message, type_url_prefix, T::FullMessageName());
}
void PackFrom(const Message& message, const std::string& type_url_prefix);
// Unpacks the payload into the given message. Returns false if the message's
// type doesn't match the type specified in the type URL (i.e., the full
// name after the last "/" of the type URL doesn't match the message's actual
// full name) or parsing the payload has failed.
template <typename T>
bool UnpackTo(T* message) const {
return InternalUnpackTo(T::FullMessageName(), message);
}
bool UnpackTo(Message* message) const;
// Checks whether the type specified in the type URL matches the given type.
// A type is consdiered matching if its full name matches the full name after
// the last "/" in the type URL.
template<typename T>
template <typename T>
bool Is() const {
return InternalIs(T::default_instance().GetDescriptor());
return InternalIs(T::FullMessageName());
}
private:
bool InternalIs(const Descriptor* message) const;
void InternalPackFrom(const MessageLite& message,
StringPiece type_url_prefix,
StringPiece type_name);
bool InternalUnpackTo(StringPiece type_name,
MessageLite* message) const;
bool InternalIs(StringPiece type_name) const;
UrlType* type_url_;
ValueType* value_;
......@@ -86,10 +117,6 @@ class PROTOBUF_EXPORT AnyMetadata {
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(AnyMetadata);
};
extern const char kAnyFullTypeName[]; // "google.protobuf.Any".
extern const char kTypeGoogleApisComPrefix[]; // "type.googleapis.com/".
extern const char kTypeGoogleProdComPrefix[]; // "type.googleprod.com/".
// Get the proto type name from Any::type_url value. For example, passing
// "type.googleapis.com/rpc.QueryOrigin" will return "rpc.QueryOrigin" in
// *full_type_name. Returns false if the type_url does not have a "/"
......
......@@ -42,9 +42,9 @@ void InitDefaults_google_2fprotobuf_2fany_2eproto() {
::google::protobuf::internal::InitSCC(&scc_info_Any_google_2fprotobuf_2fany_2eproto.base);
}
::google::protobuf::Metadata file_level_metadata_google_2fprotobuf_2fany_2eproto[1];
constexpr ::google::protobuf::EnumDescriptor const** file_level_enum_descriptors_google_2fprotobuf_2fany_2eproto = nullptr;
constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_google_2fprotobuf_2fany_2eproto = nullptr;
static ::google::protobuf::Metadata file_level_metadata_google_2fprotobuf_2fany_2eproto[1];
static constexpr ::google::protobuf::EnumDescriptor const** file_level_enum_descriptors_google_2fprotobuf_2fany_2eproto = nullptr;
static constexpr ::google::protobuf::ServiceDescriptor const** file_level_service_descriptors_google_2fprotobuf_2fany_2eproto = nullptr;
const ::google::protobuf::uint32 TableStruct_google_2fprotobuf_2fany_2eproto::offsets[] PROTOBUF_SECTION_VARIABLE(protodesc_cold) = {
~0u, // no _has_bits_
......@@ -63,7 +63,7 @@ static ::google::protobuf::Message const * const file_default_instances[] = {
reinterpret_cast<const ::google::protobuf::Message*>(&::google::protobuf::_Any_default_instance_),
};
::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_google_2fprotobuf_2fany_2eproto = {
static ::google::protobuf::internal::AssignDescriptorsTable assign_descriptors_table_google_2fprotobuf_2fany_2eproto = {
{}, AddDescriptors_google_2fprotobuf_2fany_2eproto, "google/protobuf/any.proto", schemas,
file_default_instances, TableStruct_google_2fprotobuf_2fany_2eproto::offsets,
file_level_metadata_google_2fprotobuf_2fany_2eproto, 1, file_level_enum_descriptors_google_2fprotobuf_2fany_2eproto, file_level_service_descriptors_google_2fprotobuf_2fany_2eproto,
......@@ -77,7 +77,7 @@ const char descriptor_table_protodef_google_2fprotobuf_2fany_2eproto[] =
"\003GPB\252\002\036Google.Protobuf.WellKnownTypesb\006p"
"roto3"
;
::google::protobuf::internal::DescriptorTable descriptor_table_google_2fprotobuf_2fany_2eproto = {
static ::google::protobuf::internal::DescriptorTable descriptor_table_google_2fprotobuf_2fany_2eproto = {
false, InitDefaults_google_2fprotobuf_2fany_2eproto,
descriptor_table_protodef_google_2fprotobuf_2fany_2eproto,
"google/protobuf/any.proto", &assign_descriptors_table_google_2fprotobuf_2fany_2eproto, 205,
......@@ -111,11 +111,6 @@ void Any::PackFrom(const ::google::protobuf::Message& message,
bool Any::UnpackTo(::google::protobuf::Message* message) const {
return _any_metadata_.UnpackTo(message);
}
bool Any::ParseAnyTypeUrl(const string& type_url,
string* full_type_name) {
return ::google::protobuf::internal::ParseAnyTypeUrl(type_url,
full_type_name);
}
bool Any::GetAnyFieldDescriptors(
const ::google::protobuf::Message& message,
const ::google::protobuf::FieldDescriptor** type_url_field,
......@@ -123,6 +118,11 @@ bool Any::GetAnyFieldDescriptors(
return ::google::protobuf::internal::GetAnyFieldDescriptors(
message, type_url_field, value_field);
}
bool Any::ParseAnyTypeUrl(const string& type_url,
string* full_type_name) {
return ::google::protobuf::internal::ParseAnyTypeUrl(type_url,
full_type_name);
}
class Any::HasBitSetters {
public:
......@@ -192,71 +192,40 @@ void Any::Clear() {
}
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
const char* Any::_InternalParse(const char* begin, const char* end, void* object,
::google::protobuf::internal::ParseContext* ctx) {
auto msg = static_cast<Any*>(object);
::google::protobuf::int32 size; (void)size;
int depth; (void)depth;
::google::protobuf::uint32 tag;
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
const char* Any::_InternalParse(const char* ptr, ::google::protobuf::internal::ParseContext* ctx) {
while (!ctx->Done(&ptr)) {
::google::protobuf::uint32 tag;
ptr = ::google::protobuf::internal::ReadTag(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string type_url = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = ::google::protobuf::io::ReadSize(ptr, &size);
ptr = ::google::protobuf::internal::InlineGreedyStringParserUTF8(mutable_type_url(), ptr, ctx, "google.protobuf.Any.type_url");
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Any.type_url");
object = msg->mutable_type_url();
if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) {
parser_till_end = ::google::protobuf::internal::GreedyStringParserUTF8;
goto string_till_end;
}
GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheckUTF8(ptr, size, ctx));
::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx);
ptr += size;
break;
}
// bytes value = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = ::google::protobuf::io::ReadSize(ptr, &size);
ptr = ::google::protobuf::internal::InlineGreedyStringParser(mutable_value(), ptr, ctx);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
object = msg->mutable_value();
if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) {
parser_till_end = ::google::protobuf::internal::GreedyStringParser;
goto string_till_end;
}
GOOGLE_PROTOBUF_PARSER_ASSERT(::google::protobuf::internal::StringCheck(ptr, size, ctx));
::google::protobuf::internal::InlineGreedyStringParser(object, ptr, size, ctx);
ptr += size;
break;
}
default: {
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
ctx->SetLastTag(tag);
return ptr;
}
auto res = UnknownFieldParse(tag, {_InternalParse, msg},
ptr, end, msg->_internal_metadata_.mutable_unknown_fields(), ctx);
ptr = res.first;
ptr = UnknownFieldParse(tag,
_internal_metadata_.mutable_unknown_fields(), ptr, ctx);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr != nullptr);
if (res.second) return ptr;
break;
}
} // switch
} // while
return ptr;
string_till_end:
static_cast<::std::string*>(object)->clear();
static_cast<::std::string*>(object)->reserve(size);
goto len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Any::MergePartialFromCodedStream(
......
......@@ -108,15 +108,15 @@ class PROTOBUF_EXPORT Any final :
void PackFrom(const ::google::protobuf::Message& message,
const ::std::string& type_url_prefix);
bool UnpackTo(::google::protobuf::Message* message) const;
static bool GetAnyFieldDescriptors(
const ::google::protobuf::Message& message,
const ::google::protobuf::FieldDescriptor** type_url_field,
const ::google::protobuf::FieldDescriptor** value_field);
template<typename T> bool Is() const {
return _any_metadata_.Is<T>();
}
static bool ParseAnyTypeUrl(const string& type_url,
string* full_type_name);
static bool GetAnyFieldDescriptors(
const ::google::protobuf::Message& message,
const ::google::protobuf::FieldDescriptor** type_url_field,
const ::google::protobuf::FieldDescriptor** value_field);
void Swap(Any* other);
friend void swap(Any& a, Any& b) {
a.Swap(&b);
......@@ -140,8 +140,7 @@ class PROTOBUF_EXPORT Any final :
size_t ByteSizeLong() const final;
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);
::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; }
const char* _InternalParse(const char* ptr, ::google::protobuf::internal::ParseContext* ctx) final;
#else
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) final;
......@@ -153,10 +152,14 @@ class PROTOBUF_EXPORT Any final :
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
void SharedCtor();
void SharedDtor();
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Any* other);
friend class ::google::protobuf::internal::AnyMetadata;
static ::google::protobuf::StringPiece FullMessageName() {
return "google.protobuf.Any";
}
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return nullptr;
......
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <google/protobuf/any.h>
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/stubs/strutil.h>
namespace google {
namespace protobuf {
namespace internal {
string GetTypeUrl(StringPiece message_name,
StringPiece type_url_prefix) {
if (!type_url_prefix.empty() &&
type_url_prefix[type_url_prefix.size() - 1] == '/') {
return StrCat(type_url_prefix, message_name);
} else {
return StrCat(type_url_prefix, "/", message_name);
}
}
const char kAnyFullTypeName[] = "google.protobuf.Any";
const char kTypeGoogleApisComPrefix[] = "type.googleapis.com/";
const char kTypeGoogleProdComPrefix[] = "type.googleprod.com/";
AnyMetadata::AnyMetadata(UrlType* type_url, ValueType* value)
: type_url_(type_url), value_(value) {}
void AnyMetadata::InternalPackFrom(const MessageLite& message,
StringPiece type_url_prefix,
StringPiece type_name) {
type_url_->SetNoArena(&::google::protobuf::internal::GetEmptyString(),
GetTypeUrl(type_name, type_url_prefix));
message.SerializeToString(value_->MutableNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited()));
}
bool AnyMetadata::InternalUnpackTo(StringPiece type_name,
MessageLite* message) const {
if (!InternalIs(type_name)) {
return false;
}
return message->ParseFromString(value_->GetNoArena());
}
namespace {
// The type URL could be stored in either an ArenaStringPtr or a
// StringPieceField, so we provide these helpers to get a string_view from
// either type. We use a template function as a way to avoid depending on
// StringPieceField.
template <typename T>
StringPiece Get(const T* ptr) {
return ptr->Get();
}
template <>
// NOLINTNEXTLINE: clang-diagnostic-unused-function
StringPiece Get(const ArenaStringPtr* ptr) {
return ptr->GetNoArena();
}
} // namespace
bool AnyMetadata::InternalIs(StringPiece type_name) const {
StringPiece type_url = Get(type_url_);
return type_url.size() >= type_name.size() + 1 &&
type_url[type_url.size() - type_name.size() - 1] == '/' &&
HasSuffixString(type_url, type_name);
}
bool ParseAnyTypeUrl(const string& type_url, string* url_prefix,
string* full_type_name) {
size_t pos = type_url.find_last_of("/");
if (pos == string::npos || pos + 1 == type_url.size()) {
return false;
}
if (url_prefix) {
*url_prefix = type_url.substr(0, pos + 1);
}
*full_type_name = type_url.substr(pos + 1);
return true;
}
bool ParseAnyTypeUrl(const string& type_url, string* full_type_name) {
return ParseAnyTypeUrl(type_url, nullptr, full_type_name);
}
} // namespace internal
} // namespace protobuf
} // namespace google
......@@ -29,9 +29,11 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <google/protobuf/any_test.pb.h>
#include <google/protobuf/unittest.pb.h>
#include <gtest/gtest.h>
namespace google {
namespace protobuf {
namespace {
......@@ -46,10 +48,22 @@ TEST(AnyTest, TestPackAndUnpack) {
ASSERT_TRUE(message.ParseFromString(data));
EXPECT_TRUE(message.has_any_value());
submessage.Clear();
ASSERT_TRUE(message.any_value().UnpackTo(&submessage));
EXPECT_EQ(12345, submessage.int32_value());
}
TEST(AnyTest, TestUnpackWithTypeMismatch) {
protobuf_unittest::TestAny payload;
payload.set_int32_value(13);
google::protobuf::Any any;
any.PackFrom(payload);
// Attempt to unpack into the wrong type.
protobuf_unittest::TestAllTypes dest;
EXPECT_FALSE(any.UnpackTo(&dest));
}
TEST(AnyTest, TestPackAndUnpackAny) {
// We can pack a Any message inside another Any message.
protobuf_unittest::TestAny submessage;
......@@ -63,26 +77,26 @@ TEST(AnyTest, TestPackAndUnpackAny) {
ASSERT_TRUE(message.ParseFromString(data));
EXPECT_TRUE(message.has_any_value());
any.Clear();
submessage.Clear();
ASSERT_TRUE(message.any_value().UnpackTo(&any));
ASSERT_TRUE(any.UnpackTo(&submessage));
EXPECT_EQ(12345, submessage.int32_value());
}
TEST(AnyType, TestPackWithCustomTypeUrl) {
TEST(AnyTest, TestPackWithCustomTypeUrl) {
protobuf_unittest::TestAny submessage;
submessage.set_int32_value(12345);
google::protobuf::Any any;
// Pack with a custom type URL prefix.
any.PackFrom(submessage, "type.myservice.com");
EXPECT_EQ("type.myservice.com/" + submessage.GetDescriptor()->full_name(),
any.type_url());
EXPECT_EQ("type.myservice.com/protobuf_unittest.TestAny", any.type_url());
// Pack with a custom type URL prefix ending with '/'.
any.PackFrom(submessage, "type.myservice.com/");
EXPECT_EQ("type.myservice.com/" + submessage.GetDescriptor()->full_name(),
any.type_url());
EXPECT_EQ("type.myservice.com/protobuf_unittest.TestAny", any.type_url());
// Pack with an empty type URL prefix.
any.PackFrom(submessage, "");
EXPECT_EQ("/" + submessage.GetDescriptor()->full_name(), any.type_url());
EXPECT_EQ("/protobuf_unittest.TestAny", any.type_url());
// Test unpacking the type.
submessage.Clear();
......@@ -104,6 +118,15 @@ TEST(AnyTest, TestIs) {
ASSERT_TRUE(message.ParseFromString(message.SerializeAsString()));
EXPECT_FALSE(message.any_value().Is<protobuf_unittest::TestAny>());
EXPECT_TRUE(message.any_value().Is<google::protobuf::Any>());
any.set_type_url("/protobuf_unittest.TestAny");
EXPECT_TRUE(any.Is<protobuf_unittest::TestAny>());
// The type URL must contain at least one "/".
any.set_type_url("protobuf_unittest.TestAny");
EXPECT_FALSE(any.Is<protobuf_unittest::TestAny>());
// The type name after the slash must be fully qualified.
any.set_type_url("/TestAny");
EXPECT_FALSE(any.Is<protobuf_unittest::TestAny>());
}
TEST(AnyTest, MoveConstructor) {
......@@ -117,6 +140,7 @@ TEST(AnyTest, MoveConstructor) {
google::protobuf::Any dst(std::move(src));
EXPECT_EQ(type_url, dst.type_url().data());
payload.Clear();
ASSERT_TRUE(dst.UnpackTo(&payload));
EXPECT_EQ(12345, payload.int32_value());
}
......@@ -133,6 +157,7 @@ TEST(AnyTest, MoveAssignment) {
google::protobuf::Any dst;
dst = std::move(src);
EXPECT_EQ(type_url, dst.type_url().data());
payload.Clear();
ASSERT_TRUE(dst.UnpackTo(&payload));
EXPECT_EQ(12345, payload.int32_value());
}
......
This diff is collapsed.
......@@ -31,6 +31,13 @@
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
#include <google/protobuf/extension_set.h> // IWYU pragma: export
#include <google/protobuf/unknown_field_set.h>
namespace google {
namespace protobuf {
namespace internal {
class AnyMetadata;
} // namespace internal
} // namespace protobuf
} // namespace google
#include <google/protobuf/source_context.pb.h>
#include <google/protobuf/type.pb.h>
// @@protoc_insertion_point(includes)
......@@ -134,8 +141,7 @@ class PROTOBUF_EXPORT Api final :
size_t ByteSizeLong() const final;
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);
::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; }
const char* _InternalParse(const char* ptr, ::google::protobuf::internal::ParseContext* ctx) final;
#else
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) final;
......@@ -147,10 +153,14 @@ class PROTOBUF_EXPORT Api final :
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
void SharedCtor();
void SharedDtor();
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Api* other);
friend class ::google::protobuf::internal::AnyMetadata;
static ::google::protobuf::StringPiece FullMessageName() {
return "google.protobuf.Api";
}
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return nullptr;
......@@ -325,8 +335,7 @@ class PROTOBUF_EXPORT Method final :
size_t ByteSizeLong() const final;
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);
::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; }
const char* _InternalParse(const char* ptr, ::google::protobuf::internal::ParseContext* ctx) final;
#else
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) final;
......@@ -338,10 +347,14 @@ class PROTOBUF_EXPORT Method final :
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
void SharedCtor();
void SharedDtor();
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Method* other);
friend class ::google::protobuf::internal::AnyMetadata;
static ::google::protobuf::StringPiece FullMessageName() {
return "google.protobuf.Method";
}
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return nullptr;
......@@ -509,8 +522,7 @@ class PROTOBUF_EXPORT Mixin final :
size_t ByteSizeLong() const final;
#if GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
static const char* _InternalParse(const char* begin, const char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);
::google::protobuf::internal::ParseFunc _ParseFunc() const final { return _InternalParse; }
const char* _InternalParse(const char* ptr, ::google::protobuf::internal::ParseContext* ctx) final;
#else
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) final;
......@@ -522,10 +534,14 @@ class PROTOBUF_EXPORT Mixin final :
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
void SharedCtor();
void SharedDtor();
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Mixin* other);
friend class ::google::protobuf::internal::AnyMetadata;
static ::google::protobuf::StringPiece FullMessageName() {
return "google.protobuf.Mixin";
}
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return nullptr;
......
......@@ -34,6 +34,7 @@
#define GOOGLE_PROTOBUF_ARENA_H__
#include <limits>
#include <type_traits>
#ifdef max
#undef max // Visual Studio defines this macro
#endif
......@@ -156,13 +157,14 @@ struct ArenaOptions {
private:
// Hooks for adding external functionality such as user-specific metrics
// collection, specific debugging abilities, etc.
// Init hook may return a pointer to a cookie to be stored in the arena.
// reset and destruction hooks will then be called with the same cookie
// pointer. This allows us to save an external object per arena instance and
// use it on the other hooks (Note: It is just as legal for init to return
// NULL and not use the cookie feature).
// on_arena_reset and on_arena_destruction also receive the space used in
// the arena just before the reset.
// Init hook (if set) will always be called at Arena init time. Init hook may
// return a pointer to a cookie to be stored in the arena. Reset and
// destruction hooks will then be called with the same cookie pointer. This
// allows us to save an external object per arena instance and use it on the
// other hooks (Note: If init hook returns NULL, the other hooks will NOT be
// called on this arena instance).
// on_arena_reset and on_arena_destruction also receive the space used in the
// arena just before the reset.
void* (*on_arena_init)(Arena* arena);
void (*on_arena_reset)(Arena* arena, void* cookie, uint64 space_used);
void (*on_arena_destruction)(Arena* arena, void* cookie, uint64 space_used);
......@@ -408,12 +410,12 @@ class PROTOBUF_EXPORT Arena {
}
// Retrieves the arena associated with |value| if |value| is an arena-capable
// message, or NULL otherwise. This differs from value->GetArena() in that the
// latter is a virtual call, while this method is a templated call that
// resolves at compile-time.
// message, or NULL otherwise. If possible, the call resolves at compile time.
// Note that we can often devirtualize calls to `value->GetArena()` so usually
// calling this method is unnecessary.
template <typename T>
PROTOBUF_ALWAYS_INLINE static Arena* GetArena(const T* value) {
return GetArenaInternal(value, is_arena_constructable<T>());
return GetArenaInternal(value);
}
template <typename T>
......@@ -440,6 +442,15 @@ class PROTOBUF_EXPORT Arena {
sizeof(char)>
is_arena_constructable;
template <typename U>
static char HasGetArena(decltype(&U::GetArena));
template <typename U>
static double HasGetArena(...);
typedef std::integral_constant<bool, sizeof(HasGetArena<T>(nullptr)) ==
sizeof(char)>
has_get_arena;
template <typename... Args>
static T* Construct(void* ptr, Args&&... args) {
return new (ptr) T(std::forward<Args>(args)...);
......@@ -655,16 +666,24 @@ class PROTOBUF_EXPORT Arena {
// Implementation for GetArena(). Only message objects with
// InternalArenaConstructable_ tags can be associated with an arena, and such
// objects must implement a GetArenaNoVirtual() method.
template <typename T>
PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(const T* value,
std::true_type) {
template <typename T, typename std::enable_if<
is_arena_constructable<T>::value, int>::type = 0>
PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(const T* value) {
return InternalHelper<T>::GetArena(value);
}
template <typename T>
PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(const T* /* value */,
std::false_type) {
return NULL;
template <typename T,
typename std::enable_if<!is_arena_constructable<T>::value &&
InternalHelper<T>::has_get_arena::value,
int>::type = 0>
PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(const T* value) {
return value->GetArena();
}
template <typename T, typename std::enable_if<
!is_arena_constructable<T>::value &&
!InternalHelper<T>::has_get_arena::value,
int>::type = 0>
PROTOBUF_ALWAYS_INLINE static Arena* GetArenaInternal(const T* value) {
return nullptr;
}
// For friends of arena.
......
......@@ -1324,6 +1324,12 @@ TEST(ArenaTest, GetArenaShouldReturnTheArenaForArenaAllocatedMessages) {
const ArenaMessage* const_pointer_to_message = message;
EXPECT_EQ(&arena, Arena::GetArena(message));
EXPECT_EQ(&arena, Arena::GetArena(const_pointer_to_message));
// Test that the Message* / MessageLite* specialization SFINAE works.
const Message* const_pointer_to_message_type = message;
EXPECT_EQ(&arena, Arena::GetArena(const_pointer_to_message_type));
const MessageLite* const_pointer_to_message_lite_type = message;
EXPECT_EQ(&arena, Arena::GetArena(const_pointer_to_message_lite_type));
}
TEST(ArenaTest, GetArenaShouldReturnNullForNonArenaAllocatedMessages) {
......
......@@ -2463,9 +2463,8 @@ TEST_P(EncodeDecodeTest, ProtoParseError) {
"net/proto2/internal/no_such_file.proto: No such file or directory\n");
}
INSTANTIATE_TEST_CASE_P(FileDescriptorSetSource,
EncodeDecodeTest,
testing::Values(PROTO_PATH, DESCRIPTOR_SET_IN));
INSTANTIATE_TEST_SUITE_P(FileDescriptorSetSource, EncodeDecodeTest,
testing::Values(PROTO_PATH, DESCRIPTOR_SET_IN));
} // anonymous namespace
#endif // !GOOGLE_PROTOBUF_HEAP_CHECK_DRACONIAN
......
......@@ -72,7 +72,6 @@ EnumGenerator::EnumGenerator(const EnumDescriptor* descriptor,
variables_["short_name"] = descriptor_->name();
variables_["enumbase"] = options_.proto_h ? " : int" : "";
variables_["nested_name"] = descriptor_->name();
variables_["constexpr"] = options_.proto_h ? "constexpr" : "";
variables_["prefix"] =
(descriptor_->containing_type() == NULL) ? "" : classname_ + "_";
}
......@@ -127,15 +126,15 @@ void EnumGenerator::GenerateDefinition(io::Printer* printer) {
format(
"$dllexport_decl $bool $classname$_IsValid(int value);\n"
"const $classname$ ${1$$prefix$$short_name$_MIN$}$ = "
"constexpr $classname$ ${1$$prefix$$short_name$_MIN$}$ = "
"$prefix$$2$;\n"
"const $classname$ ${1$$prefix$$short_name$_MAX$}$ = "
"constexpr $classname$ ${1$$prefix$$short_name$_MAX$}$ = "
"$prefix$$3$;\n",
descriptor_, EnumValueName(min_value), EnumValueName(max_value));
if (generate_array_size_) {
format(
"const int ${1$$prefix$$short_name$_ARRAYSIZE$}$ = "
"constexpr int ${1$$prefix$$short_name$_ARRAYSIZE$}$ = "
"$prefix$$short_name$_MAX + 1;\n\n",
descriptor_);
}
......@@ -199,7 +198,7 @@ void EnumGenerator::GenerateSymbolImports(io::Printer* printer) const {
string deprecated_attr = DeprecatedAttribute(
options_, descriptor_->value(j)->options().deprecated());
format(
"$1$static $constexpr $const $nested_name$ ${2$$3$$}$ =\n"
"$1$static constexpr $nested_name$ ${2$$3$$}$ =\n"
" $classname$_$3$;\n",
deprecated_attr, descriptor_->value(j),
EnumValueName(descriptor_->value(j)));
......@@ -209,14 +208,14 @@ void EnumGenerator::GenerateSymbolImports(io::Printer* printer) const {
"static inline bool $nested_name$_IsValid(int value) {\n"
" return $classname$_IsValid(value);\n"
"}\n"
"static const $nested_name$ ${1$$nested_name$_MIN$}$ =\n"
"static constexpr $nested_name$ ${1$$nested_name$_MIN$}$ =\n"
" $classname$_$nested_name$_MIN;\n"
"static const $nested_name$ ${1$$nested_name$_MAX$}$ =\n"
"static constexpr $nested_name$ ${1$$nested_name$_MAX$}$ =\n"
" $classname$_$nested_name$_MAX;\n",
descriptor_);
if (generate_array_size_) {
format(
"static const int ${1$$nested_name$_ARRAYSIZE$}$ =\n"
"static constexpr int ${1$$nested_name$_ARRAYSIZE$}$ =\n"
" $classname$_$nested_name$_ARRAYSIZE;\n",
descriptor_);
}
......@@ -297,25 +296,26 @@ void EnumGenerator::GenerateMethods(int idx, io::Printer* printer) {
if (descriptor_->containing_type() != NULL) {
string parent = ClassName(descriptor_->containing_type(), false);
// We need to "define" the static constants which were declared in the
// header, to give the linker a place to put them. Or at least the C++
// standard says we have to. MSVC actually insists that we do _not_ define
// them again in the .cc file, prior to VC++ 2015.
format("#if !defined(_MSC_VER) || _MSC_VER >= 1900\n");
// Before C++17, we must define the static constants which were
// declared in the header, to give the linker a place to put them.
// But pre-2015 MSVC++ insists that we not.
format("#if (__cplusplus < 201703) && "
"(!defined(_MSC_VER) || _MSC_VER >= 1900)\n");
for (int i = 0; i < descriptor_->value_count(); i++) {
format("$constexpr $const $classname$ $1$::$2$;\n", parent,
format("constexpr $classname$ $1$::$2$;\n", parent,
EnumValueName(descriptor_->value(i)));
}
format(
"const $classname$ $1$::$nested_name$_MIN;\n"
"const $classname$ $1$::$nested_name$_MAX;\n",
"constexpr $classname$ $1$::$nested_name$_MIN;\n"
"constexpr $classname$ $1$::$nested_name$_MAX;\n",
parent);
if (generate_array_size_) {
format("const int $1$::$nested_name$_ARRAYSIZE;\n", parent);
format("constexpr int $1$::$nested_name$_ARRAYSIZE;\n", parent);
}
format("#endif // !defined(_MSC_VER) || _MSC_VER >= 1900\n");
format("#endif // (__cplusplus < 201703) && "
"(!defined(_MSC_VER) || _MSC_VER >= 1900)\n");
}
}
......
......@@ -706,29 +706,34 @@ void FileGenerator::GenerateReflectionInitializationCode(io::Printer* printer) {
// in the file.
if (!message_generators_.empty()) {
format("::$proto_ns$::Metadata $file_level_metadata$[$1$];\n",
format("static ::$proto_ns$::Metadata $file_level_metadata$[$1$];\n",
message_generators_.size());
} else {
format(
"static "
"constexpr ::$proto_ns$::Metadata* $file_level_metadata$ = nullptr;\n");
}
if (!enum_generators_.empty()) {
format(
"static "
"const ::$proto_ns$::EnumDescriptor* "
"$file_level_enum_descriptors$[$1$];\n",
enum_generators_.size());
} else {
format(
"static "
"constexpr ::$proto_ns$::EnumDescriptor const** "
"$file_level_enum_descriptors$ = nullptr;\n");
}
if (HasGenericServices(file_, options_) && file_->service_count() > 0) {
format(
"static "
"const ::$proto_ns$::ServiceDescriptor* "
"$file_level_service_descriptors$[$1$];\n",
file_->service_count());
} else {
format(
"static "
"constexpr ::$proto_ns$::ServiceDescriptor const** "
"$file_level_service_descriptors$ = nullptr;\n");
}
......@@ -795,6 +800,7 @@ void FileGenerator::GenerateReflectionInitializationCode(io::Printer* printer) {
// AssignDescriptors(). All later times, waits for the first call to
// complete and then returns.
format(
"static "
"::$proto_ns$::internal::AssignDescriptorsTable $assign_desc_table$ = "
"{\n"
" {}, $add_descriptors$, \"$filename$\", schemas,\n"
......@@ -846,6 +852,7 @@ void FileGenerator::GenerateReflectionInitializationCode(io::Printer* printer) {
// Now generate the AddDescriptors() function.
format(
"static "
"::$proto_ns$::internal::DescriptorTable $1$ = {\n"
" false, $init_defaults$, \n"
" $2$,\n",
......@@ -1295,6 +1302,26 @@ void FileGenerator::GenerateLibraryIncludes(io::Printer* printer) {
if (IsAnyMessage(file_, options_)) {
IncludeFile("net/proto2/internal/any.h", printer);
} else {
// For Any support with lite protos, we need to friend AnyMetadata, so we
// forward-declare it here.
if (options_.opensource_runtime) {
format(
"namespace google {\n"
"namespace protobuf {\n"
"namespace internal {\n"
"class AnyMetadata;\n"
"} // namespace internal\n"
"} // namespace protobuf\n"
"} // namespace google\n");
} else {
format(
"namespace google {\nnamespace protobuf {\n"
"namespace internal {\n"
"class AnyMetadata;\n"
"} // namespace internal\n"
"} // namespace protobuf\n} // namespace google\n");
}
}
}
......
......@@ -65,15 +65,15 @@ void SetMessageVariables(const FieldDescriptor* descriptor,
switch (val->cpp_type()) {
case FieldDescriptor::CPPTYPE_MESSAGE:
(*variables)["val_cpp"] = FieldMessageTypeName(val);
(*variables)["wrapper"] = "EntryWrapper";
(*variables)["wrapper"] = "MapEntryWrapper";
break;
case FieldDescriptor::CPPTYPE_ENUM:
(*variables)["val_cpp"] = ClassName(val->enum_type(), true);
(*variables)["wrapper"] = "EnumEntryWrapper";
(*variables)["wrapper"] = "MapEnumEntryWrapper";
break;
default:
(*variables)["val_cpp"] = PrimitiveTypeName(options, val->cpp_type());
(*variables)["wrapper"] = "EntryWrapper";
(*variables)["wrapper"] = "MapEntryWrapper";
}
(*variables)["key_wire_type"] =
"TYPE_" + ToUpper(DeclaredTypeMethodName(key->type()));
......@@ -238,11 +238,9 @@ GenerateMergeFromCodedStream(io::Printer* printer) const {
}
}
static void GenerateSerializationLoop(const Formatter& format,
bool supports_arenas, bool string_key,
static void GenerateSerializationLoop(const Formatter& format, bool string_key,
bool string_value, bool to_array,
bool is_deterministic) {
format("::std::unique_ptr<$map_classname$> entry;\n");
string ptr;
if (is_deterministic) {
format("for (size_type i = 0; i < n; i++) {\n");
......@@ -257,24 +255,17 @@ static void GenerateSerializationLoop(const Formatter& format,
}
format.Indent();
format("entry.reset($name$_.New$wrapper$($1$->first, $1$->second));\n", ptr);
format(
"$map_classname$::$wrapper$ entry(nullptr, $1$->first, $1$->second);\n",
ptr);
if (to_array) {
format(
"target = ::$proto_ns$::internal::WireFormatLite::InternalWrite"
"$declared_type$NoVirtualToArray($number$, *entry, target);\n");
"$declared_type$NoVirtualToArray($number$, entry, target);\n");
} else {
format(
"::$proto_ns$::internal::WireFormatLite::Write$stream_writer$($number$,"
" "
"*entry, output);\n");
}
// If entry is allocated by arena, its desctructor should be avoided.
if (supports_arenas) {
format(
"if (entry->GetArena() != nullptr) {\n"
" entry.release();\n"
"}\n");
" entry, output);\n");
}
if (string_key || string_value) {
......@@ -365,13 +356,11 @@ void MapFieldGenerator::GenerateSerializeWithCachedSizes(io::Printer* printer,
" ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less());\n",
to_array ? "false" : "output->IsSerializationDeterministic()");
format.Indent();
GenerateSerializationLoop(format, SupportsArenas(descriptor_), string_key,
string_value, to_array, true);
GenerateSerializationLoop(format, string_key, string_value, to_array, true);
format.Outdent();
format("} else {\n");
format.Indent();
GenerateSerializationLoop(format, SupportsArenas(descriptor_), string_key,
string_value, to_array, false);
GenerateSerializationLoop(format, string_key, string_value, to_array, false);
format.Outdent();
format("}\n");
format.Outdent();
......@@ -384,35 +373,13 @@ GenerateByteSize(io::Printer* printer) const {
format(
"total_size += $tag_size$ *\n"
" ::$proto_ns$::internal::FromIntSize(this->$name$_size());\n"
"{\n"
" ::std::unique_ptr<$map_classname$> entry;\n"
" for (::$proto_ns$::Map< $key_cpp$, $val_cpp$ >::const_iterator\n"
" it = this->$name$().begin();\n"
" it != this->$name$().end(); ++it) {\n");
// If entry is allocated by arena, its desctructor should be avoided.
if (SupportsArenas(descriptor_)) {
format(
" if (entry.get() != nullptr && entry->GetArena() != nullptr) {\n"
" entry.release();\n"
" }\n");
}
format(
" entry.reset($name$_.New$wrapper$(it->first, it->second));\n"
" total_size += ::$proto_ns$::internal::WireFormatLite::\n"
" $declared_type$SizeNoVirtual(*entry);\n"
" }\n");
// If entry is allocated by arena, its desctructor should be avoided.
if (SupportsArenas(descriptor_)) {
format(
" if (entry.get() != nullptr && entry->GetArena() != nullptr) {\n"
" entry.release();\n"
" }\n");
}
format("}\n");
"for (::$proto_ns$::Map< $key_cpp$, $val_cpp$ >::const_iterator\n"
" it = this->$name$().begin();\n"
" it != this->$name$().end(); ++it) {\n"
" $map_classname$::$wrapper$ entry(nullptr, it->first, it->second);\n"
" total_size += ::$proto_ns$::internal::WireFormatLite::\n"
" $declared_type$SizeNoVirtual(entry);\n"
"}\n");
}
} // namespace cpp
......
......@@ -152,11 +152,11 @@ ImmutableEnumFieldGenerator(const FieldDescriptor* descriptor,
ImmutableEnumFieldGenerator::~ImmutableEnumFieldGenerator() {}
int ImmutableEnumFieldGenerator::GetNumBitsForMessage() const {
return 1;
return SupportFieldPresence(descriptor_->file()) ? 1 : 0;
}
int ImmutableEnumFieldGenerator::GetNumBitsForBuilder() const {
return 1;
return GetNumBitsForMessage();
}
void ImmutableEnumFieldGenerator::
......
......@@ -132,7 +132,7 @@ ImmutableEnumFieldLiteGenerator(const FieldDescriptor* descriptor,
ImmutableEnumFieldLiteGenerator::~ImmutableEnumFieldLiteGenerator() {}
int ImmutableEnumFieldLiteGenerator::GetNumBitsForMessage() const {
return 1;
return SupportFieldPresence(descriptor_->file()) ? 1 : 0;
}
void ImmutableEnumFieldLiteGenerator::
......
......@@ -66,11 +66,6 @@ using internal::WireFormat;
using internal::WireFormatLite;
namespace {
bool GenerateHasBits(const Descriptor* descriptor) {
return SupportFieldPresence(descriptor->file()) ||
HasRepeatedFields(descriptor);
}
string MapValueImmutableClassdName(const Descriptor* descriptor,
ClassNameResolver* name_resolver) {
const FieldDescriptor* value_field = descriptor->FindFieldByName("value");
......@@ -397,18 +392,16 @@ void ImmutableMessageGenerator::Generate(io::Printer* printer) {
messageGenerator.Generate(printer);
}
if (GenerateHasBits(descriptor_)) {
// Integers for bit fields.
int totalBits = 0;
for (int i = 0; i < descriptor_->field_count(); i++) {
totalBits += field_generators_.get(descriptor_->field(i))
.GetNumBitsForMessage();
}
int totalInts = (totalBits + 31) / 32;
for (int i = 0; i < totalInts; i++) {
printer->Print("private int $bit_field_name$;\n",
"bit_field_name", GetBitFieldName(i));
}
// Integers for bit fields.
int totalBits = 0;
for (int i = 0; i < descriptor_->field_count(); i++) {
totalBits +=
field_generators_.get(descriptor_->field(i)).GetNumBitsForMessage();
}
int totalInts = (totalBits + 31) / 32;
for (int i = 0; i < totalInts; i++) {
printer->Print("private int $bit_field_name$;\n", "bit_field_name",
GetBitFieldName(i));
}
// oneof
......
......@@ -59,13 +59,6 @@ namespace protobuf {
namespace compiler {
namespace java {
namespace {
bool GenerateHasBits(const Descriptor* descriptor) {
return SupportFieldPresence(descriptor->file()) ||
HasRepeatedFields(descriptor);
}
} // namespace
MessageBuilderLiteGenerator::MessageBuilderLiteGenerator(
const Descriptor* descriptor, Context* context)
: descriptor_(descriptor), context_(context),
......
......@@ -137,11 +137,11 @@ ImmutableMessageFieldGenerator(const FieldDescriptor* descriptor,
ImmutableMessageFieldGenerator::~ImmutableMessageFieldGenerator() {}
int ImmutableMessageFieldGenerator::GetNumBitsForMessage() const {
return 1;
return SupportFieldPresence(descriptor_->file()) ? 1 : 0;
}
int ImmutableMessageFieldGenerator::GetNumBitsForBuilder() const {
return 1;
return GetNumBitsForMessage();
}
void ImmutableMessageFieldGenerator::
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment