Unverified Commit 1354e469 authored by Paul Yang's avatar Paul Yang Committed by GitHub

Merge pull request #5502 from TeBoring/github-sync

Down-integrate internal changes to github.
parents 043b99c7 f8fc10df
No preview for this file type
......@@ -32,7 +32,22 @@ package com.google.protobuf;
import static com.google.protobuf.Internal.checkNotNull;
import com.google.protobuf.DescriptorProtos.*;
import com.google.protobuf.DescriptorProtos.DescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumDescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumOptions;
import com.google.protobuf.DescriptorProtos.EnumValueDescriptorProto;
import com.google.protobuf.DescriptorProtos.EnumValueOptions;
import com.google.protobuf.DescriptorProtos.FieldDescriptorProto;
import com.google.protobuf.DescriptorProtos.FieldOptions;
import com.google.protobuf.DescriptorProtos.FileDescriptorProto;
import com.google.protobuf.DescriptorProtos.FileOptions;
import com.google.protobuf.DescriptorProtos.MessageOptions;
import com.google.protobuf.DescriptorProtos.MethodDescriptorProto;
import com.google.protobuf.DescriptorProtos.MethodOptions;
import com.google.protobuf.DescriptorProtos.OneofDescriptorProto;
import com.google.protobuf.DescriptorProtos.OneofOptions;
import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto;
import com.google.protobuf.DescriptorProtos.ServiceOptions;
import com.google.protobuf.Descriptors.FileDescriptor.Syntax;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
......@@ -1211,7 +1226,7 @@ public final class Descriptors {
StringBuilder result = new StringBuilder(name.length());
boolean isNextUpperCase = false;
for (int i = 0; i < name.length(); i++) {
Character ch = name.charAt(i);
char ch = name.charAt(i);
if (ch == '_') {
isNextUpperCase = true;
} else if (isNextUpperCase) {
......
......@@ -1681,6 +1681,15 @@ public abstract class GeneratedMessageV3 extends AbstractMessage
}
}
@Override
public Message.Builder newBuilderForField(final FieldDescriptor field) {
if (field.isExtension()) {
return DynamicMessage.newBuilder(field.getMessageType());
} else {
return super.newBuilderForField(field);
}
}
protected final void mergeExtensionFields(final ExtendableMessage other) {
ensureExtensionsIsMutable();
extensions.mergeFrom(other.extensions);
......
......@@ -1127,7 +1127,6 @@ public final class TextFormat {
PARSER.merge(input, builder);
}
/**
* Parse a text-format message from {@code input}.
*
......@@ -1167,7 +1166,6 @@ public final class TextFormat {
PARSER.merge(input, extensionRegistry, builder);
}
/**
* Parse a text-format message from {@code input}. Extensions will be recognized if they are
* registered in {@code extensionRegistry}.
......@@ -1187,7 +1185,6 @@ public final class TextFormat {
}
/**
* Parser for text-format proto2 instances. This class is thread-safe. The implementation largely
* follows google/protobuf/text_format.cc.
......@@ -1217,36 +1214,6 @@ public final class TextFormat {
FORBID_SINGULAR_OVERWRITES
}
/**
* Determines how to deal with repeated values for singular Message fields. For example,
* given a field "foo" containing subfields "baz" and "qux":
*
* <ul>
* <li>"foo { baz: 1 } foo { baz: 2 }", or
* <li>"foo { baz: 1 } foo { qux: 2 }"
* </ul>
*/
public enum MergingStyle {
/**
* Merge the values in standard protobuf fashion:
*
* <ul>
* <li>"foo { baz: 2 }" and
* <li>"foo { baz: 1, qux: 2 }", respectively.
* </ul>
*/
RECURSIVE,
/**
* Later values overwrite ("clobber") previous values:
*
* <ul>
* <li>"foo { baz: 2 }" and
* <li>"foo { qux: 2 }", respectively.
* </ul>
*/
NON_RECURSIVE
}
private final boolean allowUnknownFields;
private final boolean allowUnknownEnumValues;
private final boolean allowUnknownExtensions;
......@@ -1349,7 +1316,6 @@ public final class TextFormat {
}
private static final int BUFFER_SIZE = 4096;
// TODO(chrisn): See if working around java.io.Reader#read(CharBuffer)
......@@ -1435,20 +1401,18 @@ public final class TextFormat {
List<UnknownField> unknownFields = new ArrayList<UnknownField>();
while (!tokenizer.atEnd()) {
mergeField(tokenizer, extensionRegistry, target, MergingStyle.RECURSIVE, unknownFields);
mergeField(tokenizer, extensionRegistry, target, unknownFields);
}
checkUnknownFields(unknownFields);
}
/** Parse a single field from {@code tokenizer} and merge it into {@code builder}. */
private void mergeField(
final Tokenizer tokenizer,
final ExtensionRegistry extensionRegistry,
final MessageReflection.MergeTarget target,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
mergeField(
......@@ -1456,7 +1420,6 @@ public final class TextFormat {
extensionRegistry,
target,
parseInfoTreeBuilder,
mergingStyle,
unknownFields);
}
......@@ -1466,7 +1429,6 @@ public final class TextFormat {
final ExtensionRegistry extensionRegistry,
final MessageReflection.MergeTarget target,
TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
FieldDescriptor field = null;
......@@ -1573,7 +1535,6 @@ public final class TextFormat {
field,
extension,
childParseTreeBuilder,
mergingStyle,
unknownFields);
} else {
consumeFieldValues(
......@@ -1583,7 +1544,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
} else {
......@@ -1595,7 +1555,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
......@@ -1620,7 +1579,6 @@ public final class TextFormat {
final FieldDescriptor field,
final ExtensionRegistry.ExtensionInfo extension,
final TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
// Support specifying repeated field values as a comma-separated list.
......@@ -1635,7 +1593,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
if (tokenizer.tryConsume("]")) {
// End of list.
......@@ -1652,7 +1609,6 @@ public final class TextFormat {
field,
extension,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
}
......@@ -1665,7 +1621,6 @@ public final class TextFormat {
final FieldDescriptor field,
final ExtensionRegistry.ExtensionInfo extension,
final TextFormatParseInfoTree.Builder parseTreeBuilder,
final MergingStyle mergingStyle,
List<UnknownField> unknownFields)
throws ParseException {
if (singularOverwritePolicy == SingularOverwritePolicy.FORBID_SINGULAR_OVERWRITES
......@@ -1698,18 +1653,9 @@ public final class TextFormat {
endToken = "}";
}
final MessageReflection.MergeTarget subField;
Message defaultInstance = (extension == null) ? null : extension.defaultInstance;
switch (mergingStyle) {
case RECURSIVE:
subField = target.newMergeTargetForField(field, defaultInstance);
break;
case NON_RECURSIVE:
subField = target.newEmptyTargetForField(field, defaultInstance);
break;
default:
throw new AssertionError();
}
MessageReflection.MergeTarget subField =
target.newMergeTargetForField(field, defaultInstance);
while (!tokenizer.tryConsume(endToken)) {
if (tokenizer.atEnd()) {
......@@ -1720,7 +1666,6 @@ public final class TextFormat {
extensionRegistry,
subField,
parseTreeBuilder,
mergingStyle,
unknownFields);
}
......
......@@ -391,14 +391,12 @@ final class UnsafeUtil {
}
/**
* Gets the field with the given name within the class, or {@code null} if not found. If found,
* the field is made accessible.
* Gets the field with the given name within the class, or {@code null} if not found.
*/
private static Field field(Class<?> clazz, String fieldName) {
Field field;
try {
field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
} catch (Throwable t) {
// Failed to access the fields.
field = null;
......
......@@ -1104,7 +1104,8 @@ final class Utf8 {
private static int partialIsValidUtf8NonAscii(byte[] bytes, int index, int limit) {
for (; ; ) {
int byte1, byte2;
int byte1;
int byte2;
// Optimize for interior runs of ASCII bytes.
do {
......
......@@ -33,8 +33,10 @@ package com.google.protobuf;
import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.OneofDescriptor;
import protobuf_unittest.UnittestProto;
import protobuf_unittest.UnittestProto.TestAllExtensions;
import protobuf_unittest.UnittestProto.TestAllTypes;
import protobuf_unittest.UnittestProto.TestAllTypes.NestedMessage;
import protobuf_unittest.UnittestProto.TestEmptyMessage;
import protobuf_unittest.UnittestProto.TestPackedTypes;
import java.util.Arrays;
......@@ -223,6 +225,17 @@ public class DynamicMessageTest extends TestCase {
packedReflectionTester.assertPackedFieldsSetViaReflection(message3);
}
public void testGetBuilderForExtensionField() {
DynamicMessage.Builder builder = DynamicMessage.newBuilder(TestAllExtensions.getDescriptor());
Message.Builder fieldBuilder =
builder.newBuilderForField(UnittestProto.optionalNestedMessageExtension.getDescriptor());
final int expected = 7432;
FieldDescriptor field =
NestedMessage.getDescriptor().findFieldByNumber(NestedMessage.BB_FIELD_NUMBER);
fieldBuilder.setField(field, expected);
assertEquals(expected, fieldBuilder.build().getField(field));
}
public void testDynamicMessageCopy() throws Exception {
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
TestUtil.setAllFields(builder);
......
......@@ -578,6 +578,29 @@ public class GeneratedMessageTest extends TestCase {
TestUtil.assertAllExtensionsSet(message);
}
public void testGetBuilderForExtensionField() {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
Message.Builder fieldBuilder =
builder.newBuilderForField(UnittestProto.optionalNestedMessageExtension.getDescriptor());
final int expected = 7432;
FieldDescriptor field =
NestedMessage.getDescriptor().findFieldByNumber(NestedMessage.BB_FIELD_NUMBER);
fieldBuilder.setField(field, expected);
assertEquals(expected, fieldBuilder.build().getField(field));
}
public void testGetBuilderForNonMessageExtensionField() {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
try {
// This should throw an exception because the extension field is not a message.
builder.newBuilderForField(UnittestProto.optionalInt32Extension.getDescriptor());
fail("Exception was not thrown");
} catch (UnsupportedOperationException e) {
// This exception is expected.
}
}
public void testExtensionRepeatedSetters() throws Exception {
TestAllExtensions.Builder builder = TestAllExtensions.newBuilder();
TestUtil.setAllExtensions(builder);
......
......@@ -42,6 +42,7 @@ import static com.google.protobuf.util.Timestamps.NANOS_PER_MICROSECOND;
import static com.google.protobuf.util.Timestamps.NANOS_PER_MILLISECOND;
import static com.google.protobuf.util.Timestamps.NANOS_PER_SECOND;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Duration;
import java.text.ParseException;
import java.util.Comparator;
......@@ -91,8 +92,8 @@ public final class Durations {
}
/**
* Compares two durations. The value returned is identical to what would be returned by:
* {@code Durations.comparator().compare(x, y)}.
* Compares two durations. The value returned is identical to what would be returned by: {@code
* Durations.comparator().compare(x, y)}.
*
* @return the value {@code 0} if {@code x == y}; a value less than {@code 0} if {@code x < y};
* and a value greater than {@code 0} if {@code x > y}
......@@ -151,6 +152,7 @@ public final class Durations {
* @throws IllegalArgumentException if {@code duration} is negative or invalid
* @throws NullPointerException if {@code duration} is {@code null}
*/
@CanIgnoreReturnValue
public static Duration checkNotNegative(Duration duration) {
checkValid(duration);
checkArgument(!isNegative(duration), "duration (%s) must not be negative", toString(duration));
......@@ -163,6 +165,7 @@ public final class Durations {
* @throws IllegalArgumentException if {@code duration} is negative, {@code ZERO}, or invalid
* @throws NullPointerException if {@code duration} is {@code null}
*/
@CanIgnoreReturnValue
public static Duration checkPositive(Duration duration) {
checkValid(duration);
checkArgument(
......@@ -173,19 +176,32 @@ public final class Durations {
}
/** Throws an {@link IllegalArgumentException} if the given {@link Duration} is not valid. */
@CanIgnoreReturnValue
public static Duration checkValid(Duration duration) {
long seconds = duration.getSeconds();
int nanos = duration.getNanos();
if (!isValid(seconds, nanos)) {
throw new IllegalArgumentException(String.format(
"Duration is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-315,576,000,000, +315,576,000,000]. "
+ "Nanos (%s) must be in range [-999,999,999, +999,999,999]. "
+ "Nanos must have the same sign as seconds", seconds, nanos));
throw new IllegalArgumentException(
String.format(
"Duration is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-315,576,000,000, +315,576,000,000]. "
+ "Nanos (%s) must be in range [-999,999,999, +999,999,999]. "
+ "Nanos must have the same sign as seconds",
seconds, nanos));
}
return duration;
}
/**
* Builds the given builder and throws an {@link IllegalArgumentException} if it is not valid. See
* {@link #checkValid(Duration}).
*
* @return A valid, built {@link Duration}.
*/
public static Duration checkValid(Duration.Builder durationBuilder) {
return checkValid(durationBuilder.build());
}
/**
* Convert Duration to string format. The string format will contains 3, 6, or 9 fractional digits
* depending on the precision required to represent the exact Duration value. For example: "1s",
......
......@@ -30,6 +30,7 @@
package com.google.protobuf.util;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
......@@ -88,15 +89,14 @@ final class FieldMaskTree {
}
/**
* Adds a field path to the tree. In a FieldMask, every field path matches the
* specified field as well as all its sub-fields. For example, a field path
* "foo.bar" matches field "foo.bar" and also "foo.bar.baz", etc. When adding
* a field path to the tree, redundant sub-paths will be removed. That is,
* after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
* exists, which will turn the tree node for "foo.bar" to a leaf node.
* Likewise, if the field path to add is a sub-path of an existing leaf node,
* nothing will be changed in the tree.
* Adds a field path to the tree. In a FieldMask, every field path matches the specified field as
* well as all its sub-fields. For example, a field path "foo.bar" matches field "foo.bar" and
* also "foo.bar.baz", etc. When adding a field path to the tree, redundant sub-paths will be
* removed. That is, after adding "foo.bar" to the tree, "foo.bar.baz" will be removed if it
* exists, which will turn the tree node for "foo.bar" to a leaf node. Likewise, if the field path
* to add is a sub-path of an existing leaf node, nothing will be changed in the tree.
*/
@CanIgnoreReturnValue
FieldMaskTree addFieldPath(String path) {
String[] parts = path.split(FIELD_PATH_SEPARATOR_REGEX);
if (parts.length == 0) {
......@@ -125,9 +125,8 @@ final class FieldMaskTree {
return this;
}
/**
* Merges all field paths in a FieldMask into this tree.
*/
/** Merges all field paths in a FieldMask into this tree. */
@CanIgnoreReturnValue
FieldMaskTree mergeFromFieldMask(FieldMask mask) {
for (String path : mask.getPathsList()) {
addFieldPath(path);
......
......@@ -36,12 +36,12 @@ import com.google.common.base.CaseFormat;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.primitives.Ints;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Internal;
import com.google.protobuf.Message;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
......@@ -277,9 +277,7 @@ public class FieldMaskUtil {
/**
* Whether to replace message fields (i.e., discard existing content in
* destination message fields) when merging.
* Default behavior is to merge the source message field into the
* destination message field.
* destination message fields).
*/
public boolean replaceMessageFields() {
return replaceMessageFields;
......@@ -287,9 +285,7 @@ public class FieldMaskUtil {
/**
* Whether to replace repeated fields (i.e., discard existing content in
* destination repeated fields) when merging.
* Default behavior is to append elements from source repeated field to the
* destination repeated field.
* destination repeated fields).
*/
public boolean replaceRepeatedFields() {
return replaceRepeatedFields;
......@@ -297,30 +293,51 @@ public class FieldMaskUtil {
/**
* Whether to replace primitive (non-repeated and non-message) fields in
* destination message fields with the source primitive fields (i.e., if the
* field is set in the source, the value is copied to the
* destination; if the field is unset in the source, the field is cleared
* from the destination) when merging.
*
* <p>Default behavior is to always set the value of the source primitive
* field to the destination primitive field, and if the source field is
* unset, the default value of the source field is copied to the
* destination.
* destination message fields with the source primitive fields (i.e., clear
* destination field if source field is not set).
*/
public boolean replacePrimitiveFields() {
return replacePrimitiveFields;
}
/**
* Specify whether to replace message fields. Defaults to false.
*
* <p>If true, discard existing content in destination message fields when merging.
*
* <p>If false, merge the source message field into the destination message field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceMessageFields(boolean value) {
replaceMessageFields = value;
return this;
}
/**
* Specify whether to replace repeated fields. Defaults to false.
*
* <p>If true, discard existing content in destination repeated fields) when merging.
*
* <p>If false, append elements from source repeated field to the destination repeated field.
*/
@CanIgnoreReturnValue
public MergeOptions setReplaceRepeatedFields(boolean value) {
replaceRepeatedFields = value;
return this;
}
/**
* Specify whether to replace primitive (non-repeated and non-message) fields in destination
* message fields with the source primitive fields. Defaults to false.
*
* <p>If true, set the value of the destination primitive field to the source primitive field if
* the source field is set, but clear the destination field otherwise.
*
* <p>If false, always set the value of the destination primitive field to the source primitive
* field, and if the source field is unset, the default value of the source field is copied to
* the destination.
*/
@CanIgnoreReturnValue
public MergeOptions setReplacePrimitiveFields(boolean value) {
replacePrimitiveFields = value;
return this;
......
......@@ -32,6 +32,7 @@ package com.google.protobuf.util;
import com.google.common.base.Preconditions;
import com.google.common.io.BaseEncoding;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
......@@ -226,7 +227,7 @@ public class JsonFormat {
return new Printer(
registry,
false,
fieldsToAlwaysOutput,
Collections.unmodifiableSet(new HashSet<>(fieldsToAlwaysOutput)),
preservingProtoFieldNames,
omittingInsignificantWhitespace,
printingEnumsAsInts,
......@@ -467,9 +468,10 @@ public class JsonFormat {
private Builder() {}
/**
* Adds a message type and all types defined in the same .proto file as
* well as all transitively imported .proto files to this {@link Builder}.
* Adds a message type and all types defined in the same .proto file as well as all
* transitively imported .proto files to this {@link Builder}.
*/
@CanIgnoreReturnValue
public Builder add(Descriptor messageType) {
if (types == null) {
throw new IllegalStateException("A TypeRegistry.Builer can only be used once.");
......@@ -479,9 +481,10 @@ public class JsonFormat {
}
/**
* Adds message types and all types defined in the same .proto file as
* well as all transitively imported .proto files to this {@link Builder}.
* Adds message types and all types defined in the same .proto file as well as all
* transitively imported .proto files to this {@link Builder}.
*/
@CanIgnoreReturnValue
public Builder add(Iterable<Descriptor> messageTypes) {
if (types == null) {
throw new IllegalStateException("A TypeRegistry.Builder can only be used once.");
......
......@@ -36,6 +36,7 @@ import static com.google.common.math.LongMath.checkedAdd;
import static com.google.common.math.LongMath.checkedMultiply;
import static com.google.common.math.LongMath.checkedSubtract;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.protobuf.Duration;
import com.google.protobuf.Timestamp;
import java.text.ParseException;
......@@ -119,8 +120,8 @@ public final class Timestamps {
}
/**
* Compares two timestamps. The value returned is identical to what would be returned by:
* {@code Timestamps.comparator().compare(x, y)}.
* Compares two timestamps. The value returned is identical to what would be returned by: {@code
* Timestamps.comparator().compare(x, y)}.
*
* @return the value {@code 0} if {@code x == y}; a value less than {@code 0} if {@code x < y};
* and a value greater than {@code 0} if {@code x > y}
......@@ -162,18 +163,31 @@ public final class Timestamps {
}
/** Throws an {@link IllegalArgumentException} if the given {@link Timestamp} is not valid. */
@CanIgnoreReturnValue
public static Timestamp checkValid(Timestamp timestamp) {
long seconds = timestamp.getSeconds();
int nanos = timestamp.getNanos();
if (!isValid(seconds, nanos)) {
throw new IllegalArgumentException(String.format(
"Timestamp is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-62,135,596,800, +253,402,300,799]. "
+ "Nanos (%s) must be in range [0, +999,999,999].", seconds, nanos));
throw new IllegalArgumentException(
String.format(
"Timestamp is not valid. See proto definition for valid values. "
+ "Seconds (%s) must be in range [-62,135,596,800, +253,402,300,799]. "
+ "Nanos (%s) must be in range [0, +999,999,999].",
seconds, nanos));
}
return timestamp;
}
/**
* Builds the given builder and throws an {@link IllegalArgumentException} if it is not valid. See
* {@link #checkValid(Timestamp}).
*
* @return A valid, built {@link Timestamp}.
*/
public static Timestamp checkValid(Timestamp.Builder timestampBuilder) {
return checkValid(timestampBuilder.build());
}
/**
* Convert Timestamp to RFC 3339 date string format. The output will always be Z-normalized and
* uses 3, 6 or 9 fractional digits as required to represent the exact value. Note that Timestamp
......
......@@ -49,19 +49,20 @@ import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
import com.google.protobuf.util.JsonFormat.TypeRegistry;
import com.google.protobuf.util.JsonTestProto.TestAllTypes;
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedEnum;
import com.google.protobuf.util.JsonTestProto.TestAllTypes.NestedMessage;
import com.google.protobuf.util.JsonTestProto.TestAny;
import com.google.protobuf.util.JsonTestProto.TestCustomJsonName;
import com.google.protobuf.util.JsonTestProto.TestDuration;
import com.google.protobuf.util.JsonTestProto.TestFieldMask;
import com.google.protobuf.util.JsonTestProto.TestMap;
import com.google.protobuf.util.JsonTestProto.TestOneof;
import com.google.protobuf.util.JsonTestProto.TestRecursive;
import com.google.protobuf.util.JsonTestProto.TestStruct;
import com.google.protobuf.util.JsonTestProto.TestTimestamp;
import com.google.protobuf.util.JsonTestProto.TestWrappers;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.AliasedEnum;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.NestedEnum;
import com.google.protobuf.util.proto.JsonTestProto.TestAllTypes.NestedMessage;
import com.google.protobuf.util.proto.JsonTestProto.TestAny;
import com.google.protobuf.util.proto.JsonTestProto.TestCustomJsonName;
import com.google.protobuf.util.proto.JsonTestProto.TestDuration;
import com.google.protobuf.util.proto.JsonTestProto.TestFieldMask;
import com.google.protobuf.util.proto.JsonTestProto.TestMap;
import com.google.protobuf.util.proto.JsonTestProto.TestOneof;
import com.google.protobuf.util.proto.JsonTestProto.TestRecursive;
import com.google.protobuf.util.proto.JsonTestProto.TestStruct;
import com.google.protobuf.util.proto.JsonTestProto.TestTimestamp;
import com.google.protobuf.util.proto.JsonTestProto.TestWrappers;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
......@@ -676,7 +677,7 @@ public class JsonFormatTest extends TestCase {
+ "}",
builder);
fail();
} catch (InvalidProtocolBufferException e) {
// Exception expected.
}
......@@ -1159,8 +1160,8 @@ public class JsonFormatTest extends TestCase {
}
public void testParserAcceptBase64Variants() throws Exception {
assertAccepts("optionalBytes", "AQI"); // No padding
assertAccepts("optionalBytes", "-_w"); // base64Url, no padding
assertAccepts("optionalBytes", "AQI"); // No padding
assertAccepts("optionalBytes", "-_w"); // base64Url, no padding
}
public void testParserRejectInvalidEnumValue() throws Exception {
......@@ -1197,6 +1198,23 @@ public class JsonFormatTest extends TestCase {
assertEquals(0, builder.getOptionalNestedEnumValue());
}
public void testParserSupportAliasEnums() throws Exception {
TestAllTypes.Builder builder = TestAllTypes.newBuilder();
String json = "{\n" + " \"optionalAliasedEnum\": \"QUX\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
builder = TestAllTypes.newBuilder();
json = "{\n" + " \"optionalAliasedEnum\": \"qux\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
builder = TestAllTypes.newBuilder();
json = "{\n" + " \"optionalAliasedEnum\": \"bAz\"\n" + "}";
JsonFormat.parser().merge(json, builder);
assertEquals(AliasedEnum.ALIAS_BAZ, builder.getOptionalAliasedEnum());
}
public void testUnknownEnumMap() throws Exception {
TestMap.Builder builder = TestMap.newBuilder();
JsonFormat.parser()
......@@ -1280,7 +1298,8 @@ public class JsonFormatTest extends TestCase {
+ " \"repeatedString\": [],\n"
+ " \"repeatedBytes\": [],\n"
+ " \"repeatedNestedMessage\": [],\n"
+ " \"repeatedNestedEnum\": []\n"
+ " \"repeatedNestedEnum\": [],\n"
+ " \"optionalAliasedEnum\": \"ALIAS_FOO\"\n"
+ "}",
JsonFormat.printer().includingDefaultValueFields().print(message));
......@@ -1644,11 +1663,11 @@ public class JsonFormatTest extends TestCase {
mapBuilder.putStringToInt32Map("\ud834\udd20", 3); // utf-8 F0 9D 84 A0
mapBuilder.putStringToInt32Map("foo", 99);
mapBuilder.putStringToInt32Map("xxx", 123);
mapBuilder.putStringToInt32Map("\u20ac", 1); // utf-8 E2 82 AC
mapBuilder.putStringToInt32Map("\u20ac", 1); // utf-8 E2 82 AC
mapBuilder.putStringToInt32Map("abc", 20);
mapBuilder.putStringToInt32Map("19", 19);
mapBuilder.putStringToInt32Map("8", 8);
mapBuilder.putStringToInt32Map("\ufb00", 2); // utf-8 EF AC 80
mapBuilder.putStringToInt32Map("\ufb00", 2); // utf-8 EF AC 80
mapBuilder.putInt32ToInt32Map(3, 3);
mapBuilder.putInt32ToInt32Map(10, 10);
mapBuilder.putInt32ToInt32Map(5, 5);
......
......@@ -32,7 +32,7 @@ syntax = "proto3";
package json_test;
option java_package = "com.google.protobuf.util";
option java_package = "com.google.protobuf.util.proto";
option java_outer_classname = "JsonTestProto";
import "google/protobuf/any.proto";
......@@ -48,6 +48,17 @@ message TestAllTypes {
BAR = 1;
BAZ = 2;
}
enum AliasedEnum {
option allow_alias = true;
ALIAS_FOO = 0;
ALIAS_BAR = 1;
ALIAS_BAZ = 2;
QUX = 2;
qux = 2;
bAz = 2;
}
message NestedMessage {
int32 value = 1;
}
......@@ -69,6 +80,7 @@ message TestAllTypes {
bytes optional_bytes = 15;
NestedMessage optional_nested_message = 18;
NestedEnum optional_nested_enum = 21;
AliasedEnum optional_aliased_enum = 52;
// Repeated
repeated int32 repeated_int32 = 31;
......
......@@ -55,7 +55,7 @@ goog.forwardDeclare('jspb.BinaryMessage');
goog.forwardDeclare('jspb.BinaryReader');
goog.forwardDeclare('jspb.BinaryWriter');
goog.forwardDeclare('jspb.Message');
goog.forwardDeclare('jsproto.BinaryExtension');
goog.forwardDeclare('jsprotolib.BinaryExtension');
......@@ -122,7 +122,7 @@ jspb.RepeatedFieldType;
!Uint8Array|
!jspb.ConstBinaryMessage|
!jspb.BinaryMessage|
!jsproto.BinaryExtension}
!jsprotolib.BinaryExtension}
*/
jspb.AnyFieldType;
......
......@@ -63,7 +63,7 @@ goog.require('jspb.utils');
* @struct
*/
jspb.BinaryIterator = function(opt_decoder, opt_next, opt_elements) {
/** @private {jspb.BinaryDecoder} */
/** @private {?jspb.BinaryDecoder} */
this.decoder_ = null;
/**
......
......@@ -97,7 +97,7 @@ jspb.BinaryReader = function(opt_bytes, opt_start, opt_length) {
/**
* User-defined reader callbacks.
* @private {Object<string, function(!jspb.BinaryReader):*>}
* @private {?Object<string, function(!jspb.BinaryReader):*>}
*/
this.readCallbacks_ = null;
};
......
This diff is collapsed.
......@@ -49,9 +49,11 @@ from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import wrappers_pb2
from google.protobuf import any_test_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import well_known_types
from google.protobuf import descriptor_pool
from google.protobuf import json_format
from google.protobuf.util import json_format_proto3_pb2
......@@ -202,8 +204,8 @@ class JsonFormatTest(JsonFormatBase):
def testJsonParseDictToAnyDoesNotAlterInput(self):
orig_dict = {
"int32Value": 20,
"@type": "type.googleapis.com/proto3.TestMessage"
'int32Value': 20,
'@type': 'type.googleapis.com/proto3.TestMessage'
}
copied_dict = json.loads(json.dumps(orig_dict))
parsed_message = any_pb2.Any()
......@@ -628,6 +630,19 @@ class JsonFormatTest(JsonFormatBase):
' "value": {\n'
' "@type": "type.googleapis.com/proto3.TestMessage"')
def testAnyMessageDescriptorPoolMissingType(self):
packed_message = unittest_pb2.OneString()
packed_message.data = 'string'
message = any_test_pb2.TestAny()
message.any_value.Pack(packed_message)
empty_pool = descriptor_pool.DescriptorPool()
with self.assertRaises(TypeError) as cm:
json_format.MessageToJson(message, True, descriptor_pool=empty_pool)
self.assertEqual(
'Can not find message descriptor by type_url:'
' type.googleapis.com/protobuf_unittest.OneString.',
str(cm.exception))
def testWellKnownInAnyMessage(self):
message = any_pb2.Any()
int32_value = wrappers_pb2.Int32Value()
......@@ -1016,6 +1031,32 @@ class JsonFormatTest(JsonFormatBase):
json_format.ParseDict(js_dict, message)
self.assertEqual(expected, message.int32_value)
def testParseDictAnyDescriptorPoolMissingType(self):
# Confirm that ParseDict does not raise ParseError with default pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
json_format.ParseDict(js_dict, any_test_pb2.TestAny())
# Check ParseDict raises ParseError with empty pool
js_dict = {
'any_value': {
'@type': 'type.googleapis.com/proto3.MessageType',
'value': 1234
}
}
with self.assertRaises(json_format.ParseError) as cm:
empty_pool = descriptor_pool.DescriptorPool()
json_format.ParseDict(js_dict,
any_test_pb2.TestAny(),
descriptor_pool=empty_pool)
self.assertEqual(
str(cm.exception),
'Failed to parse any_value field: Can not find message descriptor by'
' type_url: type.googleapis.com/proto3.MessageType..')
def testMessageToDict(self):
message = json_format_proto3_pb2.TestMessage()
message.int32_value = 12345
......
......@@ -30,6 +30,7 @@
syntax = "proto2";
package google.protobuf.python.internal;
message TestEnumValues {
......
......@@ -1406,7 +1406,11 @@ def _DiscardUnknownFields(self):
self._unknown_field_set = None # pylint: disable=protected-access
for field, value in self.ListFields():
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
if field.label == _FieldDescriptor.LABEL_REPEATED:
if _IsMapField(field):
if _IsMessageMapField(field):
for key in value:
value[key].DiscardUnknownFields()
elif field.label == _FieldDescriptor.LABEL_REPEATED:
for sub_message in value:
sub_message.DiscardUnknownFields()
else:
......
......@@ -715,6 +715,24 @@ class TextFormatParserTests(TextFormatBase):
self.assertEqual(m.optional_string, self._GOLDEN_UNICODE)
self.assertEqual(m.repeated_bytes[0], self._GOLDEN_BYTES)
def testParseDuplicateMessages(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:59 : Message type "\w+.TestAllTypes" '
r'should not have multiple "optional_nested_message" fields.'),
text_format.Parse, text,
message)
def testParseDuplicateScalars(self, message_module):
message = message_module.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
r'1:36 : Message type "\w+.TestAllTypes" should not '
r'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
@_parameterized.parameters(unittest_pb2, unittest_proto3_arena_pb2)
class TextFormatMergeTests(TextFormatBase):
......@@ -1293,16 +1311,6 @@ class Proto2Tests(TextFormatBase):
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateMessages(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:59 : Message type "protobuf_unittest.TestAllTypes" '
'should not have multiple "optional_nested_message" fields.'),
text_format.Parse, text,
message)
def testParseDuplicateExtensionMessages(self):
message = unittest_pb2.TestAllExtensions()
text = ('[protobuf_unittest.optional_nested_message_extension]: {} '
......@@ -1313,14 +1321,6 @@ class Proto2Tests(TextFormatBase):
'"protobuf_unittest.optional_nested_message_extension" extensions.'),
text_format.Parse, text, message)
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
six.assertRaisesRegex(self, text_format.ParseError, (
'1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
'have multiple "optional_int32" fields.'), text_format.Parse, text,
message)
def testParseGroupNotClosed(self):
message = unittest_pb2.TestAllTypes()
text = 'RepeatedGroup: <'
......
......@@ -39,6 +39,7 @@ try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
......@@ -138,6 +139,18 @@ class UnknownFieldsTest(BaseTestCase):
self.assertEqual(
b'', message.repeated_nested_message[0].SerializeToString())
msg = map_unittest_pb2.TestMap()
msg.map_int32_all_types[1].optional_nested_message.ParseFromString(
other_message.SerializeToString())
msg.map_string_string['1'] = 'test'
self.assertNotEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
msg.DiscardUnknownFields()
self.assertEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
class UnknownFieldsAccessorsTest(BaseTestCase):
......
......@@ -96,12 +96,14 @@ class ParseError(Error):
"""Thrown in case of parsing error."""
def MessageToJson(message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
sort_keys=False,
use_integers_for_enums=False):
def MessageToJson(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
sort_keys=False,
use_integers_for_enums=False,
descriptor_pool=None):
"""Converts protobuf message to JSON format.
Args:
......@@ -117,20 +119,26 @@ def MessageToJson(message,
An indent level of 0 or negative will only insert newlines.
sort_keys: If True, then the output will be sorted by field names.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums)
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool)
return printer.ToJsonString(message, indent, sort_keys)
def MessageToDict(message,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False):
def MessageToDict(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None):
"""Converts protobuf message to a dictionary.
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
......@@ -145,13 +153,17 @@ def MessageToDict(message,
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
A dict representation of the protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums)
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool)
# pylint: disable=protected-access
return printer._MessageToJsonObject(message)
......@@ -165,13 +177,16 @@ def _IsMapEntry(field):
class _Printer(object):
"""JSON format printer for protocol message."""
def __init__(self,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False):
def __init__(
self,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None):
self.including_default_value_fields = including_default_value_fields
self.preserving_proto_field_name = preserving_proto_field_name
self.use_integers_for_enums = use_integers_for_enums
self.descriptor_pool = descriptor_pool
def ToJsonString(self, message, indent, sort_keys):
js = self._MessageToJsonObject(message)
......@@ -300,7 +315,7 @@ class _Printer(object):
js = OrderedDict()
type_url = message.type_url
js['@type'] = type_url
sub_message = _CreateMessageFromTypeUrl(type_url)
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
sub_message.ParseFromString(message.value)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
......@@ -366,13 +381,13 @@ def _DuplicateChecker(js):
return result
def _CreateMessageFromTypeUrl(type_url):
# TODO(jieluo): Should add a way that users can register the type resolver
# instead of the default one.
def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
"""Creates a message from a type URL."""
db = symbol_database.Default()
pool = db.pool if descriptor_pool is None else descriptor_pool
type_name = type_url.split('/')[-1]
try:
message_descriptor = db.pool.FindMessageTypeByName(type_name)
message_descriptor = pool.FindMessageTypeByName(type_name)
except KeyError:
raise TypeError(
'Can not find message descriptor by type_url: {0}.'.format(type_url))
......@@ -380,13 +395,15 @@ def _CreateMessageFromTypeUrl(type_url):
return message_class()
def Parse(text, message, ignore_unknown_fields=False):
def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
"""Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
......@@ -399,21 +416,26 @@ def Parse(text, message, ignore_unknown_fields=False):
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
return ParseDict(js, message, ignore_unknown_fields)
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool)
def ParseDict(js_dict, message, ignore_unknown_fields=False):
def ParseDict(js_dict,
message,
ignore_unknown_fields=False,
descriptor_pool=None):
"""Parses a JSON dictionary representation into a message.
Args:
js_dict: Dict representation of a JSON message.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
"""
parser = _Parser(ignore_unknown_fields)
parser = _Parser(ignore_unknown_fields, descriptor_pool)
parser.ConvertMessage(js_dict, message)
return message
......@@ -424,9 +446,9 @@ _INT_OR_FLOAT = six.integer_types + (float,)
class _Parser(object):
"""JSON format parser for protocol message."""
def __init__(self,
ignore_unknown_fields):
def __init__(self, ignore_unknown_fields, descriptor_pool):
self.ignore_unknown_fields = ignore_unknown_fields
self.descriptor_pool = descriptor_pool
def ConvertMessage(self, value, message):
"""Convert a JSON object into a message.
......@@ -562,7 +584,7 @@ class _Parser(object):
except KeyError:
raise ParseError('@type is missing when parsing any message.')
sub_message = _CreateMessageFromTypeUrl(type_url)
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
......
......@@ -346,11 +346,11 @@ PyObject* MapReflectionFriend::MergeFrom(PyObject* _self, PyObject* arg) {
const Message* other_message = other_map->message;
const Reflection* reflection = message->GetReflection();
const Reflection* other_reflection = other_message->GetReflection();
internal::MapFieldBase* field = reflection->MapData(
internal::MapFieldBase* field = reflection->MutableMapData(
message, self->parent_field_descriptor);
internal::MapFieldBase* other_field =
other_reflection->MapData(const_cast<Message*>(other_message),
self->parent_field_descriptor);
const internal::MapFieldBase* other_field =
other_reflection->GetMapData(*other_message,
self->parent_field_descriptor);
field->MergeFrom(*other_field);
self->version++;
Py_RETURN_NONE;
......
......@@ -937,11 +937,9 @@ class _Parser(object):
else:
getattr(message, field.name).append(value)
else:
# Proto3 doesn't represent presence so we can't test if multiple scalars
# have occurred. We have to allow them.
can_check_presence = not self._IsProto3Syntax(message)
if field.is_extension:
if (not self._allow_multiple_scalars and can_check_presence and
if (not self._allow_multiple_scalars and
not self._IsProto3Syntax(message) and
message.HasExtension(field)):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" extensions.' %
......@@ -949,8 +947,16 @@ class _Parser(object):
else:
message.Extensions[field] = value
else:
if (not self._allow_multiple_scalars and can_check_presence and
message.HasField(field.name)):
duplicate_error = False
if not self._allow_multiple_scalars:
if self._IsProto3Syntax(message):
# Proto3 doesn't represent presence so we try best effort to check
# multiple scalars by compare to default values.
duplicate_error = bool(getattr(message, field.name))
else:
duplicate_error = message.HasField(field.name)
if duplicate_error:
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" fields.' %
(message.DESCRIPTOR.full_name, field.name))
......
......@@ -201,13 +201,13 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// string type_url = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 10) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
ctx->extra_parse_data().SetFieldName("google.protobuf.Any.type_url");
auto str = msg->mutable_type_url();
......@@ -226,7 +226,7 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
// bytes value = 2;
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 18) goto handle_unusual;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = ::google::protobuf::io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
auto str = msg->mutable_value();
if (size > end - ptr + ::google::protobuf::internal::ParseContext::kSlopBytes) {
......@@ -242,7 +242,7 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
......@@ -256,13 +256,9 @@ const char* Any::_InternalParse(const char* begin, const char* end, void* object
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
len_delim_till_end:
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
{parser_till_end, object}, size);
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Any::MergePartialFromCodedStream(
......@@ -352,8 +348,7 @@ void Any::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Any::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Any)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
......
......@@ -148,7 +148,7 @@ class PROTOBUF_EXPORT Any : public ::google::protobuf::Message /* @@protoc_inser
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
This diff is collapsed.
......@@ -142,7 +142,7 @@ class PROTOBUF_EXPORT Api : public ::google::protobuf::Message /* @@protoc_inser
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......@@ -332,7 +332,7 @@ class PROTOBUF_EXPORT Method : public ::google::protobuf::Message /* @@protoc_in
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......@@ -515,7 +515,7 @@ class PROTOBUF_EXPORT Mixin : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
......@@ -271,6 +271,8 @@ inline bool IsWeak(const FieldDescriptor* field, const Options& options) {
return false;
}
bool IsStringInlined(const FieldDescriptor* descriptor, const Options& options);
// For a string field, returns the effective ctype. If the actual ctype is
// not supported, returns the default of STRING.
FieldOptions::CType EffectiveStringCType(const FieldDescriptor* field,
......
......@@ -262,8 +262,7 @@ static void GenerateSerializationLoop(const Formatter& format,
if (to_array) {
format(
"target = ::$proto_ns$::internal::WireFormatLite::InternalWrite"
"$declared_type$NoVirtualToArray($number$, *entry, deterministic, "
"target);\n");
"$declared_type$NoVirtualToArray($number$, *entry, target);\n");
} else {
format(
"::$proto_ns$::internal::WireFormatLite::Write$stream_writer$($number$,"
......@@ -365,7 +364,7 @@ void MapFieldGenerator::GenerateSerializeWithCachedSizes(io::Printer* printer,
" items[static_cast<ptrdiff_t>(n)] = SortItem(&*it);\n"
" }\n"
" ::std::sort(&items[0], &items[static_cast<ptrdiff_t>(n)], Less());\n",
to_array ? "deterministic" : "output->IsSerializationDeterministic()");
to_array ? "false" : "output->IsSerializationDeterministic()");
format.Indent();
GenerateSerializationLoop(format, SupportsArenas(descriptor_), string_key,
string_value, to_array, true);
......
......@@ -800,7 +800,8 @@ void MessageGenerator::GenerateSingularFieldHasBits(
} else {
format(
"inline bool $classname$::has_$name$() const {\n"
" return this != internal_default_instance() && $name$_ != nullptr;\n"
" return this != internal_default_instance() "
"&& $name$_ != nullptr;\n"
"}\n");
}
}
......@@ -941,7 +942,7 @@ void MessageGenerator::GenerateClassDefinition(io::Printer* printer) {
"public:\n"
"#if $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
"static bool _ParseMap(const char* begin, const "
"char* end, void* object, ::google::protobuf::internal::ParseContext* ctx);\n"
"char* end, void* object, ::$proto_ns$::internal::ParseContext* ctx);\n"
"#endif // $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
" typedef ::$proto_ns$::internal::MapEntry$lite$<$classname$, \n"
" $key_cpp$, $val_cpp$,\n"
......@@ -1190,7 +1191,7 @@ void MessageGenerator::GenerateClassDefinition(io::Printer* printer) {
if (HasFastArraySerialization(descriptor_->file(), options_)) {
format(
"$uint8$* InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const final;\n");
" $uint8$* target) const final;\n");
}
}
......@@ -1654,8 +1655,10 @@ int MessageGenerator::GenerateFieldMetadata(io::Printer* printer) {
return 2;
}
format(
"{PROTOBUF_FIELD_OFFSET($classtype$, _cached_size_), 0, 0, 0, nullptr},\n");
"{PROTOBUF_FIELD_OFFSET($classtype$, _cached_size_),"
" 0, 0, 0, nullptr},\n");
std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
sorted_extensions.reserve(descriptor_->extension_range_count());
for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
sorted_extensions.push_back(descriptor_->extension_range(i));
}
......@@ -1864,8 +1867,8 @@ void MessageGenerator::GenerateClassMethods(io::Printer* printer) {
}
format(
"#if $GOOGLE_PROTOBUF$_ENABLE_EXPERIMENTAL_PARSER\n"
"bool $classname$::_ParseMap(const char* begin, const "
"char* end, void* object, ::google::protobuf::internal::ParseContext* ctx) {\n"
"bool $classname$::_ParseMap(const char* begin, const char* end, "
"void* object, ::$proto_ns$::internal::ParseContext* ctx) {\n"
" using MF = ::$proto_ns$::internal::MapField$1$<\n"
" $classname$, EntryKeyType, EntryValueType,\n"
" kEntryKeyFieldType, kEntryValueFieldType,\n"
......@@ -1885,7 +1888,8 @@ void MessageGenerator::GenerateClassMethods(io::Printer* printer) {
format(
" DO_(parser.ParseMapEnumValidation(\n"
" begin, end, ctx->extra_parse_data().field_number,\n"
" static_cast<::google::protobuf::internal::InternalMetadataWithArena$1$*>("
" static_cast<::$proto_ns$::internal::"
"InternalMetadataWithArena$1$*>("
"ctx->extra_parse_data().unknown_fields), $2$_IsValid));\n",
HasDescriptorMethods(descriptor_->file(), options_) ? "" : "Lite",
QualifiedClassName(val->enum_type()));
......@@ -3769,7 +3773,7 @@ void MessageGenerator::GenerateSerializeOneExtensionRange(
if (to_array) {
format(
"target = _extensions_.InternalSerializeWithCachedSizesToArray(\n"
" $start$, $end$, deterministic, target);\n\n");
" $start$, $end$, target);\n\n");
} else {
format(
"_extensions_.SerializeWithCachedSizes($start$, $end$, output);\n"
......@@ -3819,10 +3823,9 @@ void MessageGenerator::GenerateSerializeWithCachedSizesToArray(
// Special-case MessageSet.
format(
"$uint8$* $classname$::InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const {\n"
" $uint8$* target) const {\n"
" target = _extensions_."
"InternalSerializeMessageSetWithCachedSizesToArray(\n"
" deterministic, target);\n");
"InternalSerializeMessageSetWithCachedSizesToArray(target);\n");
GOOGLE_CHECK(UseUnknownFieldSet(descriptor_->file(), options_));
std::map<string, string> vars;
SetUnknkownFieldsVariable(descriptor_, options_, &vars);
......@@ -3839,10 +3842,9 @@ void MessageGenerator::GenerateSerializeWithCachedSizesToArray(
format(
"$uint8$* $classname$::InternalSerializeWithCachedSizesToArray(\n"
" bool deterministic, $uint8$* target) const {\n");
" $uint8$* target) const {\n");
format.Indent();
format("(void)deterministic; // Unused\n");
format("// @@protoc_insertion_point(serialize_to_array_start:$full_name$)\n");
GenerateSerializeWithCachedSizesBody(printer, true);
......@@ -3937,6 +3939,7 @@ void MessageGenerator::GenerateSerializeWithCachedSizesBody(
SortFieldsByNumber(descriptor_);
std::vector<const Descriptor::ExtensionRange*> sorted_extensions;
sorted_extensions.reserve(descriptor_->extension_range_count());
for (int i = 0; i < descriptor_->extension_range_count(); ++i) {
sorted_extensions.push_back(descriptor_->extension_range(i));
}
......
......@@ -477,7 +477,7 @@ GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const {
format(
"target = ::$proto_ns$::internal::WireFormatLite::\n"
" InternalWrite$declared_type$ToArray(\n"
" $number$, HasBitSetters::$name$(this), deterministic, target);\n");
" $number$, HasBitSetters::$name$(this), target);\n");
}
void MessageFieldGenerator::
......@@ -812,8 +812,7 @@ GenerateSerializeWithCachedSizesToArray(io::Printer* printer) const {
" n = static_cast<unsigned int>(this->$name$_size()); i < n; i++) {\n"
" target = ::$proto_ns$::internal::WireFormatLite::\n"
" InternalWrite$declared_type$ToArray(\n"
" $number$, this->$name$(static_cast<int>(i)), deterministic, "
"target);\n"
" $number$, this->$name$(static_cast<int>(i)), target);\n"
"}\n");
}
......
......@@ -43,8 +43,8 @@ class AccessInfoMap;
namespace cpp {
enum class EnforceOptimizeMode {
kNoEnforcement, // Use the runtime specified by the file specific options.
kSpeed, // This is the full runtime.
kNoEnforcement, // Use the runtime specified by the file specific options.
kSpeed, // This is the full runtime.
kLiteRuntime,
};
......
......@@ -89,21 +89,7 @@ StringFieldGenerator::StringFieldGenerator(const FieldDescriptor* descriptor,
const Options& options)
: FieldGenerator(descriptor, options),
lite_(!HasDescriptorMethods(descriptor->file(), options)),
inlined_(false) {
// TODO(ckennelly): Handle inlining for any.proto.
if (IsAnyMessage(descriptor_->containing_type(), options_)) {
inlined_ = false;
}
if (descriptor_->containing_type()->options().map_entry()) {
inlined_ = false;
}
// Limit to proto2, as we rely on has bits to distinguish field presence for
// release_$name$. On proto3, we cannot use the address of the string
// instance when the field has been inlined.
inlined_ = inlined_ && HasFieldPresence(descriptor_->file());
inlined_(IsStringInlined(descriptor, options)) {
SetStringVariables(descriptor, &variables_, options);
}
......
......@@ -147,6 +147,7 @@ enum ConflictingEnum { // NO_PROTO3
NOT_EQ = 1; // NO_PROTO3
volatile = 2; // NO_PROTO3
return = 3; // NO_PROTO3
NULL = 4; // NO_PROTO3
} // NO_PROTO3
message DummyMessage {}
......
......@@ -107,12 +107,13 @@ TEST(GENERATED_MESSAGE_TEST_NAME, TestConflictingEnumNames) {
message.set_conflicting_enum(protobuf_unittest::TestConflictingEnumNames_NestedConflictingEnum_XOR);
EXPECT_EQ(5, message.conflicting_enum());
protobuf_unittest::ConflictingEnum conflicting_enum;
conflicting_enum = protobuf_unittest::NOT_EQ;
EXPECT_EQ(1, conflicting_enum);
conflicting_enum = protobuf_unittest::return_;
EXPECT_EQ(3, conflicting_enum);
conflicting_enum = protobuf_unittest::NULL_;
EXPECT_EQ(4, conflicting_enum);
}
} // namespace cpp_unittest
......
......@@ -2154,6 +2154,7 @@ TEST(HELPERS_TEST_NAME, TestSCC) {
MessageSCCAnalyzer scc_analyzer((Options()));
const SCC* scc = scc_analyzer.GetSCC(a.GetDescriptor());
std::vector<string> names;
names.reserve(scc->descriptors.size());
for (int i = 0; i < scc->descriptors.size(); i++) {
names.push_back(scc->descriptors[i]->full_name());
}
......
......@@ -2430,6 +2430,9 @@ void Generator::GenerateClassFromObject(const GeneratorOptions& options,
" * @param {!Object} obj The object representation of this proto to\n"
" * load the data from.\n"
" * @return {!$classname$}\n"
" * @suppress {missingProperties} To prevent JSCompiler errors at "
"the\n"
" * `goog.isDef(obj.<fieldName>)` lookups.\n"
" */\n"
"$classname$.fromObject = function(obj) {\n"
" var msg = new $classname$();\n",
......@@ -2437,7 +2440,9 @@ void Generator::GenerateClassFromObject(const GeneratorOptions& options,
for (int i = 0; i < desc->field_count(); i++) {
const FieldDescriptor* field = desc->field(i);
GenerateClassFieldFromObject(options, printer, field);
if (!IgnoreField(field)) {
GenerateClassFieldFromObject(options, printer, field);
}
}
printer->Print(
......@@ -2479,9 +2484,8 @@ void Generator::GenerateClassFieldFromObject(
printer->Print(
" goog.isDef(obj.$name$) && "
"jspb.Message.setRepeatedWrapperField(\n"
" msg, $index$, goog.array.map(obj.$name$, function(i) {\n"
" return $fieldclass$.fromObject(i);\n"
" }));\n",
" msg, $index$, obj.$name$.map(\n"
" $fieldclass$.fromObject));\n",
"name", JSObjectFieldName(options, field),
"index", JSFieldIndex(field),
"fieldclass", SubmessageTypeRef(options, field));
......
......@@ -1564,15 +1564,18 @@ bool Parser::ParseExtensions(DescriptorProto* message,
// name literals.
bool Parser::ParseReserved(DescriptorProto* message,
const LocationRecorder& message_location) {
io::Tokenizer::Token start_token = input_->current();
// Parse the declaration.
DO(Consume("reserved"));
if (LookingAtType(io::Tokenizer::TYPE_STRING)) {
LocationRecorder location(message_location,
DescriptorProto::kReservedNameFieldNumber);
location.StartAt(start_token);
return ParseReservedNames(message, location);
} else {
LocationRecorder location(message_location,
DescriptorProto::kReservedRangeFieldNumber);
location.StartAt(start_token);
return ParseReservedNumbers(message, location);
}
}
......@@ -1638,16 +1641,19 @@ bool Parser::ParseReservedNumbers(DescriptorProto* message,
}
bool Parser::ParseReserved(EnumDescriptorProto* message,
const LocationRecorder& message_location) {
const LocationRecorder& message_location) {
io::Tokenizer::Token start_token = input_->current();
// Parse the declaration.
DO(Consume("reserved"));
if (LookingAtType(io::Tokenizer::TYPE_STRING)) {
LocationRecorder location(message_location,
DescriptorProto::kReservedNameFieldNumber);
location.StartAt(start_token);
return ParseReservedNames(message, location);
} else {
LocationRecorder location(message_location,
DescriptorProto::kReservedRangeFieldNumber);
location.StartAt(start_token);
return ParseReservedNumbers(message, location);
}
}
......
......@@ -2739,6 +2739,33 @@ TEST_F(SourceInfoTest, ExtensionRanges) {
EXPECT_TRUE(HasSpan(file_.message_type(0), "name"));
}
TEST_F(SourceInfoTest, ReservedRanges) {
EXPECT_TRUE(
Parse("message Message {\n"
" $a$reserved $b$1$c$ to $d$4$e$, $f$6$g$;$h$\n"
"}\n"));
const DescriptorProto::ReservedRange& range1 =
file_.message_type(0).reserved_range(0);
const DescriptorProto::ReservedRange& range2 =
file_.message_type(0).reserved_range(1);
EXPECT_TRUE(HasSpan('a', 'h', file_.message_type(0), "reserved_range"));
EXPECT_TRUE(HasSpan('b', 'e', range1));
EXPECT_TRUE(HasSpan('b', 'c', range1, "start"));
EXPECT_TRUE(HasSpan('d', 'e', range1, "end"));
EXPECT_TRUE(HasSpan('f', 'g', range2));
EXPECT_TRUE(HasSpan('f', 'g', range2, "start"));
EXPECT_TRUE(HasSpan('f', 'g', range2, "end"));
// Ignore these.
EXPECT_TRUE(HasSpan(file_));
EXPECT_TRUE(HasSpan(file_.message_type(0)));
EXPECT_TRUE(HasSpan(file_.message_type(0), "name"));
}
TEST_F(SourceInfoTest, Oneofs) {
EXPECT_TRUE(Parse(
"message Foo {\n"
......
This diff is collapsed.
......@@ -161,7 +161,7 @@ class PROTOC_EXPORT Version : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......@@ -313,7 +313,7 @@ class PROTOC_EXPORT CodeGeneratorRequest : public ::google::protobuf::Message /*
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......@@ -487,7 +487,7 @@ class PROTOC_EXPORT CodeGeneratorResponse_File : public ::google::protobuf::Mess
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......@@ -647,7 +647,7 @@ class PROTOC_EXPORT CodeGeneratorResponse : public ::google::protobuf::Message /
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
......@@ -359,6 +359,22 @@ bool Generator::Generate(const FileDescriptor* file,
return !printer.failed();
}
// BEGIN GOOGLE-INTERNAL
// Strip the google3.third_party.py. prefix off of a module name as we
// NEVER want that invalid module import path to be generated in google3.
// Our sys.path has google3/third_party/py/ in it. All modules from
// that tree need to be imported using just their own name.
// See http://go/ThirdPartyPython
void StripThirdPartyPy(string* module_name) {
const string third_party_py_prefix = "google3.third_party.py.";
int len = third_party_py_prefix.length();
if (module_name->compare(0, len,
third_party_py_prefix, 0,
len) == 0) {
*module_name = module_name->erase(0, len);
}
}
// END GOOGLE-INTERNAL
// Prints Python imports for all modules imported by |file|.
void Generator::PrintImports() const {
......@@ -367,6 +383,9 @@ void Generator::PrintImports() const {
string module_name = ModuleName(filename);
string module_alias = ModuleAlias(filename);
// BEGIN GOOGLE-INTERNAL
StripThirdPartyPy(&module_name);
// END GOOGLE-INTERNAL
if (ContainsPythonKeyword(module_name)) {
// If the module path contains a Python keyword, we have to quote the
// module name and import it using importlib. Otherwise the usual kind of
......@@ -397,6 +416,9 @@ void Generator::PrintImports() const {
// Print public imports.
for (int i = 0; i < file_->public_dependency_count(); ++i) {
string module_name = ModuleName(file_->public_dependency(i)->name());
// BEGIN GOOGLE-INTERNAL
StripThirdPartyPy(&module_name);
// END GOOGLE-INTERNAL
printer_->Print("from $module$ import *\n", "module", module_name);
}
printer_->Print("\n");
......
......@@ -60,9 +60,6 @@
// Author: kenton@google.com (Kenton Varda)
#ifndef GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__
#define GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__
#include <vector>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/io/zero_copy_stream.h>
......@@ -94,5 +91,3 @@ class ZipWriter {
} // namespace compiler
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_COMPILER_ZIP_WRITER_H__
......@@ -5073,10 +5073,12 @@ void DescriptorBuilder::CheckEnumValueUniqueness(
if (!inserted && insert_result.first->second->name() != value->name() &&
insert_result.first->second->number() != value->number()) {
string error_message =
"When enum name is stripped and label is PascalCased (" + stripped +
"), this value label conflicts with " + values[stripped]->name() +
". This will make the proto fail to compile for some languages, such "
"as C#.";
"Enum name " + value->name() + " has the same name as " +
values[stripped]->name() +
" if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please "
"assign the same numeric value to both enums.";
// There are proto2 enums out there with conflicting names, so to preserve
// compatibility we issue only a warning for proto2.
if (result->file()->syntax() == FileDescriptor::SYNTAX_PROTO2) {
......
......@@ -232,6 +232,8 @@ class PROTOBUF_EXPORT LazyDescriptor {
// Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT Descriptor {
public:
typedef DescriptorProto Proto;
// The name of the message type, not including its scope.
const std::string& name() const;
......@@ -345,6 +347,8 @@ class PROTOBUF_EXPORT Descriptor {
// A range of field numbers which are designated for third-party
// extensions.
struct ExtensionRange {
typedef DescriptorProto_ExtensionRange Proto;
typedef ExtensionRangeOptions OptionsType;
// See Descriptor::CopyTo().
......@@ -511,6 +515,8 @@ class PROTOBUF_EXPORT Descriptor {
// Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT FieldDescriptor {
public:
typedef FieldDescriptorProto Proto;
// Identifies a field type. 0 is reserved for errors. The order is weird
// for historical reasons. Types 12 and up are new in proto2.
enum Type {
......@@ -821,6 +827,8 @@ class PROTOBUF_EXPORT FieldDescriptor {
// Describes a oneof defined in a message type.
class PROTOBUF_EXPORT OneofDescriptor {
public:
typedef OneofDescriptorProto Proto;
const std::string& name() const; // Name of this oneof.
const std::string& full_name() const; // Fully-qualified name of the oneof.
......@@ -895,6 +903,8 @@ class PROTOBUF_EXPORT OneofDescriptor {
// to construct your own descriptors.
class PROTOBUF_EXPORT EnumDescriptor {
public:
typedef EnumDescriptorProto Proto;
// The name of this enum type in the containing scope.
const std::string& name() const;
......@@ -1051,6 +1061,8 @@ class PROTOBUF_EXPORT EnumDescriptor {
// your own descriptors.
class PROTOBUF_EXPORT EnumValueDescriptor {
public:
typedef EnumValueDescriptorProto Proto;
const std::string& name() const; // Name of this enum constant.
int index() const; // Index within the enums's Descriptor.
int number() const; // Numeric value of this enum constant.
......@@ -1129,6 +1141,8 @@ class PROTOBUF_EXPORT EnumValueDescriptor {
// ServiceDescriptor. Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT ServiceDescriptor {
public:
typedef ServiceDescriptorProto Proto;
// The name of the service, not including its containing scope.
const std::string& name() const;
// The fully-qualified name of the service, scope delimited by periods.
......@@ -1209,6 +1223,8 @@ class PROTOBUF_EXPORT ServiceDescriptor {
// own descriptors.
class PROTOBUF_EXPORT MethodDescriptor {
public:
typedef MethodDescriptorProto Proto;
// Name of this method, not including containing scope.
const std::string& name() const;
// The fully-qualified name of the method, scope delimited by periods.
......@@ -1294,6 +1310,8 @@ class PROTOBUF_EXPORT MethodDescriptor {
// descriptor->file(). Use DescriptorPool to construct your own descriptors.
class PROTOBUF_EXPORT FileDescriptor {
public:
typedef FileDescriptorProto Proto;
// The filename, relative to the source tree.
// e.g. "foo/bar/baz.proto"
const std::string& name() const;
......
This diff is collapsed.
This diff is collapsed.
......@@ -6005,6 +6005,34 @@ TEST_F(ValidationErrorTest, MapEntryConflictsWithEnum) {
"with an existing enum type.\n");
}
TEST_F(ValidationErrorTest, EnumValuesConflictWithDifferentCasing) {
BuildFileWithErrors(
"syntax: 'proto3'"
"name: 'foo.proto' "
"enum_type {"
" name: 'FooEnum' "
" value { name: 'BAR' number: 0 }"
" value { name: 'bar' number: 1 }"
"}",
"foo.proto: bar: NAME: Enum name bar has the same name as BAR "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
// Not an error because both enums are mapped to the same value.
BuildFile(
"syntax: 'proto3'"
"name: 'foo.proto' "
"enum_type {"
" name: 'FooEnum' "
" options { allow_alias: true }"
" value { name: 'UNKNOWN' number: 0 }"
" value { name: 'BAR' number: 1 }"
" value { name: 'bar' number: 1 }"
"}");
}
TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
BuildFileWithErrors(
"syntax: 'proto3'"
......@@ -6014,9 +6042,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM_BAZ' number: 0 }"
" value { name: 'BAZ' number: 1 }"
"}",
"foo.proto: BAZ: NAME: When enum name is stripped and label is "
"PascalCased (Baz), this value label conflicts with FOO_ENUM_BAZ. This "
"will make the proto fail to compile for some languages, such as C#.\n");
"foo.proto: BAZ: NAME: Enum name BAZ has the same name as FOO_ENUM_BAZ "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
......@@ -6026,9 +6056,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOOENUM_BAZ' number: 0 }"
" value { name: 'BAZ' number: 1 }"
"}",
"foo.proto: BAZ: NAME: When enum name is stripped and label is "
"PascalCased (Baz), this value label conflicts with FOOENUM_BAZ. This "
"will make the proto fail to compile for some languages, such as C#.\n");
"foo.proto: BAZ: NAME: Enum name BAZ has the same name as FOOENUM_BAZ "
"if you ignore case and strip out the enum name prefix (if any). "
"This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
......@@ -6038,10 +6070,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM_BAR_BAZ' number: 0 }"
" value { name: 'BAR__BAZ' number: 1 }"
"}",
"foo.proto: BAR__BAZ: NAME: When enum name is stripped and label is "
"PascalCased (BarBaz), this value label conflicts with "
"FOO_ENUM_BAR_BAZ. This will make the proto fail to compile for some "
"languages, such as C#.\n");
"foo.proto: BAR__BAZ: NAME: Enum name BAR__BAZ has the same name as "
"FOO_ENUM_BAR_BAZ if you ignore case and strip out the enum name prefix "
"(if any). This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
BuildFileWithErrors(
"syntax: 'proto3'"
......@@ -6051,10 +6084,11 @@ TEST_F(ValidationErrorTest, EnumValuesConflictWhenPrefixesStripped) {
" value { name: 'FOO_ENUM__BAR_BAZ' number: 0 }"
" value { name: 'BAR_BAZ' number: 1 }"
"}",
"foo.proto: BAR_BAZ: NAME: When enum name is stripped and label is "
"PascalCased (BarBaz), this value label conflicts with "
"FOO_ENUM__BAR_BAZ. This will make the proto fail to compile for some "
"languages, such as C#.\n");
"foo.proto: BAR_BAZ: NAME: Enum name BAR_BAZ has the same name as "
"FOO_ENUM__BAR_BAZ if you ignore case and strip out the enum name prefix "
"(if any). This is error-prone and can lead to undefined behavior. "
"Please avoid doing this. If you are using allow_alias, please assign "
"the same numeric value to both enums.\n");
// This isn't an error because the underscore will cause the PascalCase to
// differ by case (BarBaz vs. Barbaz).
......
......@@ -182,14 +182,14 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
// int64 seconds = 1;
case 1: {
if (static_cast<::google::protobuf::uint8>(tag) != 8) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int64 value = val;
msg->set_seconds(value);
......@@ -199,14 +199,14 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
case 2: {
if (static_cast<::google::protobuf::uint8>(tag) != 16) goto handle_unusual;
::google::protobuf::uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = ::google::protobuf::io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
::google::protobuf::int32 value = val;
msg->set_nanos(value);
break;
}
default: {
handle_unusual: (void)&&handle_unusual;
handle_unusual:
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
......@@ -220,13 +220,6 @@ const char* Duration::_InternalParse(const char* begin, const char* end, void* o
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Duration::MergePartialFromCodedStream(
......@@ -310,8 +303,7 @@ void Duration::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Duration::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Duration)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
......
......@@ -139,7 +139,7 @@ class PROTOBUF_EXPORT Duration : public ::google::protobuf::Message /* @@protoc_
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
......@@ -168,11 +168,10 @@ const char* Empty::_InternalParse(const char* begin, const char* end, void* obje
::google::protobuf::internal::ParseFunc parser_till_end; (void)parser_till_end;
auto ptr = begin;
while (ptr < end) {
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = ::google::protobuf::io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
switch (tag >> 3) {
default: {
handle_unusual: (void)&&handle_unusual;
if ((tag & 7) == 4 || tag == 0) {
ctx->EndGroup(tag);
return ptr;
......@@ -186,13 +185,6 @@ const char* Empty::_InternalParse(const char* begin, const char* end, void* obje
} // switch
} // while
return ptr;
len_delim_till_end: (void)&&len_delim_till_end;
return ctx->StoreAndTailCall(ptr, end, {_InternalParse, msg},
{parser_till_end, object}, size);
group_continues: (void)&&group_continues;
GOOGLE_DCHECK(ptr >= end);
GOOGLE_PROTOBUF_PARSER_ASSERT(ctx->StoreGroup({_InternalParse, msg}, {parser_till_end, object}, depth, tag));
return ptr;
}
#else // GOOGLE_PROTOBUF_ENABLE_EXPERIMENTAL_PARSER
bool Empty::MergePartialFromCodedStream(
......@@ -235,8 +227,7 @@ void Empty::SerializeWithCachedSizes(
}
::google::protobuf::uint8* Empty::InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const {
(void)deterministic; // Unused
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:google.protobuf.Empty)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
......
......@@ -139,7 +139,7 @@ class PROTOBUF_EXPORT Empty : public ::google::protobuf::Message /* @@protoc_ins
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
......@@ -48,6 +48,7 @@
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/parse_context.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/port.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/wire_format_lite.h>
......@@ -461,13 +462,11 @@ class PROTOBUF_EXPORT ExtensionSet {
// Returns a pointer past the last written byte.
uint8* InternalSerializeWithCachedSizesToArray(int start_field_number,
int end_field_number,
bool deterministic,
uint8* target) const;
// Like above but serializes in MessageSet format.
void SerializeMessageSetWithCachedSizes(io::CodedOutputStream* output) const;
uint8* InternalSerializeMessageSetWithCachedSizesToArray(bool deterministic,
uint8* target) const;
uint8* InternalSerializeMessageSetWithCachedSizesToArray(uint8* target) const;
// For backward-compatibility, versions of two of the above methods that
// serialize deterministically iff SetDefaultSerializationDeterministic()
......@@ -531,12 +530,6 @@ class PROTOBUF_EXPORT ExtensionSet {
virtual void WriteMessage(int number,
io::CodedOutputStream* output) const = 0;
virtual uint8* WriteMessageToArray(int number, uint8* target) const = 0;
virtual uint8* InternalWriteMessageToArray(int number, bool,
uint8* target) const {
// TODO(gpike): make this pure virtual. This is a placeholder because we
// need to update third_party/upb, for example.
return WriteMessageToArray(number, target);
}
private:
virtual void UnusedKeyMethod(); // Dummy key method to avoid weak vtable.
......@@ -606,12 +599,11 @@ class PROTOBUF_EXPORT ExtensionSet {
void SerializeFieldWithCachedSizes(int number,
io::CodedOutputStream* output) const;
uint8* InternalSerializeFieldWithCachedSizesToArray(int number,
bool deterministic,
uint8* target) const;
void SerializeMessageSetItemWithCachedSizes(
int number, io::CodedOutputStream* output) const;
uint8* InternalSerializeMessageSetItemWithCachedSizesToArray(
int number, bool deterministic, uint8* target) const;
int number, uint8* target) const;
size_t ByteSize(int number) const;
size_t MessageSetItemByteSize(int number) const;
void Clear();
......@@ -819,11 +811,10 @@ const char* ParseMessageSet(const char* begin, const char* end, Msg* msg,
ExtensionSet* ext, Metadata* metadata,
internal::ParseContext* ctx) {
auto ptr = begin;
int depth;
(void)depth;
int depth = 0;
while (ptr < end) {
uint32 tag;
ptr = Varint::Parse32Inline(ptr, &tag);
ptr = io::Parse32(ptr, &tag);
GOOGLE_PROTOBUF_PARSER_ASSERT(ptr);
if (tag == WireFormatLite::kMessageSetItemStartTag) {
ctx->extra_parse_data().payload.clear();
......@@ -848,6 +839,7 @@ const char* ParseMessageSet(const char* begin, const char* end, Msg* msg,
}
return ptr;
}
#endif
// These are just for convenience...
......
This diff is collapsed.
......@@ -92,7 +92,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
#define HANDLE_VARINT_TYPE(UPPERCASE, CPP_CAMELCASE) \
case WireFormatLite::TYPE_##UPPERCASE: { \
uint64 value; \
ptr = Varint::Parse64(ptr, &value); \
ptr = io::Parse64(ptr, &value); \
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true)); \
if (extension.is_repeated) { \
Add##CPP_CAMELCASE(number, WireFormatLite::TYPE_##UPPERCASE, \
......@@ -111,7 +111,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
#define HANDLE_SVARINT_TYPE(UPPERCASE, CPP_CAMELCASE, SIZE) \
case WireFormatLite::TYPE_##UPPERCASE: { \
uint64 val; \
ptr = Varint::Parse64(ptr, &val); \
ptr = io::Parse64(ptr, &val); \
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true)); \
auto value = WireFormatLite::ZigZagDecode##SIZE(val); \
if (extension.is_repeated) { \
......@@ -151,7 +151,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
case WireFormatLite::TYPE_ENUM: {
uint64 val;
ptr = Varint::Parse64(ptr, &val);
ptr = io::Parse64(ptr, &val);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true));
int value = val;
......@@ -221,7 +221,7 @@ std::pair<const char*, bool> ExtensionSet::ParseFieldWithExtensionInfo(
length_delim:
uint32 size;
ptr = Varint::Parse32Inline(ptr, &size);
ptr = io::Parse32(ptr, &size);
GOOGLE_PROTOBUF_ASSERT_RETURN(ptr, std::make_pair(nullptr, true));
if (size > end - ptr) goto len_delim_till_end;
{
......
This diff is collapsed.
......@@ -139,7 +139,7 @@ class PROTOBUF_EXPORT FieldMask : public ::google::protobuf::Message /* @@protoc
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const final;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* target) const final;
::google::protobuf::uint8* target) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
......
......@@ -670,8 +670,11 @@ class GeneratedMessageReflection final : public Reflection {
Message* sub_message,
const FieldDescriptor* field) const;
internal::MapFieldBase* MapData(Message* message,
const FieldDescriptor* field) const override;
internal::MapFieldBase* MutableMapData(
Message* message, const FieldDescriptor* field) const override;
const internal::MapFieldBase* GetMapData(
const Message& message, const FieldDescriptor* field) const override;
friend inline // inline so nobody can call this function.
void
......
......@@ -34,7 +34,6 @@
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/wire_format_lite.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/port_def.inc>
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment