aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/protobuf/py2
diff options
context:
space:
mode:
authororivej <orivej@yandex-team.ru>2022-02-10 16:45:01 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:45:01 +0300
commit2d37894b1b037cf24231090eda8589bbb44fb6fc (patch)
treebe835aa92c6248212e705f25388ebafcf84bc7a1 /contrib/python/protobuf/py2
parent718c552901d703c502ccbefdfc3c9028d608b947 (diff)
downloadydb-2d37894b1b037cf24231090eda8589bbb44fb6fc.tar.gz
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/protobuf/py2')
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/descriptor.py76
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/descriptor_database.py10
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/descriptor_pool.py508
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/api_implementation.py52
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/containers.py36
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/decoder.py102
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/encoder.py184
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/python_message.py58
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/type_checkers.py68
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/well_known_types.py118
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/internal/wire_format.py4
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/json_format.py194
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/message.py8
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/message_factory.py16
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.cc56
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.h2
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.cc162
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.h14
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/extension_dict.cc104
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/map_container.cc144
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/map_container.h6
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/message.cc660
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/message.h32
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.cc444
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.h190
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/repeated_composite_container.cc24
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/repeated_scalar_container.cc10
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/safe_numerics.h312
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/pyext/scoped_pyobject_ptr.h66
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/reflection.py4
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/symbol_database.py46
-rw-r--r--contrib/python/protobuf/py2/google/protobuf/text_format.py174
32 files changed, 1942 insertions, 1942 deletions
diff --git a/contrib/python/protobuf/py2/google/protobuf/descriptor.py b/contrib/python/protobuf/py2/google/protobuf/descriptor.py
index 7c0b2a752d..70fdae16ff 100644
--- a/contrib/python/protobuf/py2/google/protobuf/descriptor.py
+++ b/contrib/python/protobuf/py2/google/protobuf/descriptor.py
@@ -465,7 +465,7 @@ class FieldDescriptor(DescriptorBase):
containing_oneof (OneofDescriptor): If the field is a member of a oneof
union, contains its descriptor. Otherwise, None.
-
+
file (FileDescriptor): Reference to file descriptor.
"""
@@ -552,7 +552,7 @@ class FieldDescriptor(DescriptorBase):
default_value, message_type, enum_type, containing_type,
is_extension, extension_scope, options=None,
serialized_options=None,
- has_default_value=True, containing_oneof=None, json_name=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
file=None, create_key=None): # pylint: disable=redefined-builtin
_message.Message._CheckCalledFromGeneratedFile()
if is_extension:
@@ -564,7 +564,7 @@ class FieldDescriptor(DescriptorBase):
default_value, message_type, enum_type, containing_type,
is_extension, extension_scope, options=None,
serialized_options=None,
- has_default_value=True, containing_oneof=None, json_name=None,
+ has_default_value=True, containing_oneof=None, json_name=None,
file=None, create_key=None): # pylint: disable=redefined-builtin
"""The arguments are as described in the description of FieldDescriptor
attributes above.
@@ -580,12 +580,12 @@ class FieldDescriptor(DescriptorBase):
options, serialized_options, 'FieldOptions')
self.name = name
self.full_name = full_name
- self.file = file
+ self.file = file
self._camelcase_name = None
- if json_name is None:
- self.json_name = _ToJsonName(name)
- else:
- self.json_name = json_name
+ if json_name is None:
+ self.json_name = _ToJsonName(name)
+ else:
+ self.json_name = json_name
self.index = index
self.number = number
self.type = type
@@ -1051,31 +1051,31 @@ def _ToCamelCase(name):
return ''.join(result)
-def _OptionsOrNone(descriptor_proto):
- """Returns the value of the field `options`, or None if it is not set."""
- if descriptor_proto.HasField('options'):
- return descriptor_proto.options
- else:
- return None
-
-
-def _ToJsonName(name):
- """Converts name to Json name and returns it."""
- capitalize_next = False
- result = []
-
- for c in name:
- if c == '_':
- capitalize_next = True
- elif capitalize_next:
- result.append(c.upper())
- capitalize_next = False
- else:
- result += c
-
- return ''.join(result)
-
-
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _ToJsonName(name):
+ """Converts name to Json name and returns it."""
+ capitalize_next = False
+ result = []
+
+ for c in name:
+ if c == '_':
+ capitalize_next = True
+ elif capitalize_next:
+ result.append(c.upper())
+ capitalize_next = False
+ else:
+ result += c
+
+ return ''.join(result)
+
+
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
syntax=None):
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
@@ -1154,10 +1154,10 @@ def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
full_name = '.'.join(full_message_name + [field_proto.name])
enum_desc = None
nested_desc = None
- if field_proto.json_name:
- json_name = field_proto.json_name
- else:
- json_name = None
+ if field_proto.json_name:
+ json_name = field_proto.json_name
+ else:
+ json_name = None
if field_proto.HasField('type_name'):
type_name = field_proto.type_name
full_type_name = '.'.join(full_message_name +
@@ -1172,7 +1172,7 @@ def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
field_proto.number, field_proto.type,
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
field_proto.label, None, nested_desc, enum_desc, None, False, None,
- options=_OptionsOrNone(field_proto), has_default_value=False,
+ options=_OptionsOrNone(field_proto), has_default_value=False,
json_name=json_name, create_key=_internal_create_key)
fields.append(field)
diff --git a/contrib/python/protobuf/py2/google/protobuf/descriptor_database.py b/contrib/python/protobuf/py2/google/protobuf/descriptor_database.py
index 3e1332e636..073eddc711 100644
--- a/contrib/python/protobuf/py2/google/protobuf/descriptor_database.py
+++ b/contrib/python/protobuf/py2/google/protobuf/descriptor_database.py
@@ -56,8 +56,8 @@ class DescriptorDatabase(object):
Args:
file_desc_proto: The FileDescriptorProto to add.
Raises:
- DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
- add a proto with the same name but different definition than an
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
+ add a proto with the same name but different definition than an
existing proto in the database.
"""
proto_name = file_desc_proto.name
@@ -69,7 +69,7 @@ class DescriptorDatabase(object):
else:
return
- # Add all the top-level descriptors to the index.
+ # Add all the top-level descriptors to the index.
package = file_desc_proto.package
for message in file_desc_proto.message_type:
for name in _ExtractSymbols(message, package):
@@ -81,7 +81,7 @@ class DescriptorDatabase(object):
'.'.join((package, enum_value.name))] = file_desc_proto
for extension in file_desc_proto.extension:
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
- for service in file_desc_proto.service:
+ for service in file_desc_proto.service:
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
def FindFileByName(self, name):
@@ -168,7 +168,7 @@ def _ExtractSymbols(desc_proto, package):
Yields:
The fully qualified name found in the descriptor.
"""
- message_name = package + '.' + desc_proto.name if package else desc_proto.name
+ message_name = package + '.' + desc_proto.name if package else desc_proto.name
yield message_name
for nested_type in desc_proto.nested_type:
for symbol in _ExtractSymbols(nested_type, message_name):
diff --git a/contrib/python/protobuf/py2/google/protobuf/descriptor_pool.py b/contrib/python/protobuf/py2/google/protobuf/descriptor_pool.py
index 0f382c833b..de9100b09c 100644
--- a/contrib/python/protobuf/py2/google/protobuf/descriptor_pool.py
+++ b/contrib/python/protobuf/py2/google/protobuf/descriptor_pool.py
@@ -57,15 +57,15 @@ directly instead of this class.
__author__ = 'matthewtoia@google.com (Matt Toia)'
-import collections
+import collections
import warnings
-
+
from google.protobuf import descriptor
from google.protobuf import descriptor_database
from google.protobuf import text_encoding
-_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
+_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
def _Deprecated(func):
@@ -99,22 +99,22 @@ def _NormalizeFullyQualifiedName(name):
return name.lstrip('.')
-def _OptionsOrNone(descriptor_proto):
- """Returns the value of the field `options`, or None if it is not set."""
- if descriptor_proto.HasField('options'):
- return descriptor_proto.options
- else:
- return None
-
-
-def _IsMessageSetExtension(field):
- return (field.is_extension and
- field.containing_type.has_options and
- field.containing_type.GetOptions().message_set_wire_format and
- field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
- field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
-
-
+def _OptionsOrNone(descriptor_proto):
+ """Returns the value of the field `options`, or None if it is not set."""
+ if descriptor_proto.HasField('options'):
+ return descriptor_proto.options
+ else:
+ return None
+
+
+def _IsMessageSetExtension(field):
+ return (field.is_extension and
+ field.containing_type.has_options and
+ field.containing_type.GetOptions().message_set_wire_format and
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
+
+
class DescriptorPool(object):
"""A collection of protobufs dynamically constructed by descriptor protos."""
@@ -141,18 +141,18 @@ class DescriptorPool(object):
self._descriptor_db = descriptor_db
self._descriptors = {}
self._enum_descriptors = {}
- self._service_descriptors = {}
+ self._service_descriptors = {}
self._file_descriptors = {}
- self._toplevel_extensions = {}
+ self._toplevel_extensions = {}
# TODO(jieluo): Remove _file_desc_by_toplevel_extension after
# maybe year 2020 for compatibility issue (with 3.4.1 only).
- self._file_desc_by_toplevel_extension = {}
+ self._file_desc_by_toplevel_extension = {}
self._top_enum_values = {}
- # We store extensions in two two-level mappings: The first key is the
- # descriptor of the message being extended, the second key is the extension
- # full name or its tag number.
- self._extensions_by_name = collections.defaultdict(dict)
- self._extensions_by_number = collections.defaultdict(dict)
+ # We store extensions in two two-level mappings: The first key is the
+ # descriptor of the message being extended, the second key is the extension
+ # full name or its tag number.
+ self._extensions_by_name = collections.defaultdict(dict)
+ self._extensions_by_number = collections.defaultdict(dict)
def _CheckConflictRegister(self, desc, desc_name, file_name):
"""Check if the descriptor name conflicts with another of the same name.
@@ -238,7 +238,7 @@ class DescriptorPool(object):
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
self._descriptors[desc.full_name] = desc
- self._AddFileDescriptor(desc.file)
+ self._AddFileDescriptor(desc.file)
# Add EnumDescriptor to descriptor pool is dreprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@@ -250,7 +250,7 @@ class DescriptorPool(object):
def _AddEnumDescriptor(self, enum_desc):
"""Adds an EnumDescriptor to the pool.
- This method also registers the FileDescriptor associated with the enum.
+ This method also registers the FileDescriptor associated with the enum.
Args:
enum_desc: An EnumDescriptor.
@@ -279,78 +279,78 @@ class DescriptorPool(object):
'.'.join((package, enum_value.name)))
self._CheckConflictRegister(enum_value, full_name, file_name)
self._top_enum_values[full_name] = enum_value
- self._AddFileDescriptor(enum_desc.file)
+ self._AddFileDescriptor(enum_desc.file)
# Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
- def AddServiceDescriptor(self, service_desc):
+ def AddServiceDescriptor(self, service_desc):
self._AddServiceDescriptor(service_desc)
# Never call this method. It is for internal usage only.
def _AddServiceDescriptor(self, service_desc):
- """Adds a ServiceDescriptor to the pool.
-
- Args:
- service_desc: A ServiceDescriptor.
- """
-
- if not isinstance(service_desc, descriptor.ServiceDescriptor):
- raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
-
+ """Adds a ServiceDescriptor to the pool.
+
+ Args:
+ service_desc: A ServiceDescriptor.
+ """
+
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
+
self._CheckConflictRegister(service_desc, service_desc.full_name,
service_desc.file.name)
- self._service_descriptors[service_desc.full_name] = service_desc
-
+ self._service_descriptors[service_desc.full_name] = service_desc
+
# Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
- def AddExtensionDescriptor(self, extension):
+ def AddExtensionDescriptor(self, extension):
self._AddExtensionDescriptor(extension)
# Never call this method. It is for internal usage only.
def _AddExtensionDescriptor(self, extension):
- """Adds a FieldDescriptor describing an extension to the pool.
-
- Args:
- extension: A FieldDescriptor.
-
- Raises:
- AssertionError: when another extension with the same number extends the
- same message.
- TypeError: when the specified extension is not a
- descriptor.FieldDescriptor.
- """
- if not (isinstance(extension, descriptor.FieldDescriptor) and
- extension.is_extension):
- raise TypeError('Expected an extension descriptor.')
-
- if extension.extension_scope is None:
- self._toplevel_extensions[extension.full_name] = extension
-
- try:
- existing_desc = self._extensions_by_number[
- extension.containing_type][extension.number]
- except KeyError:
- pass
- else:
- if extension is not existing_desc:
- raise AssertionError(
- 'Extensions "%s" and "%s" both try to extend message type "%s" '
- 'with field number %d.' %
- (extension.full_name, existing_desc.full_name,
- extension.containing_type.full_name, extension.number))
-
- self._extensions_by_number[extension.containing_type][
- extension.number] = extension
- self._extensions_by_name[extension.containing_type][
- extension.full_name] = extension
-
- # Also register MessageSet extensions with the type name.
- if _IsMessageSetExtension(extension):
- self._extensions_by_name[extension.containing_type][
- extension.message_type.full_name] = extension
-
+ """Adds a FieldDescriptor describing an extension to the pool.
+
+ Args:
+ extension: A FieldDescriptor.
+
+ Raises:
+ AssertionError: when another extension with the same number extends the
+ same message.
+ TypeError: when the specified extension is not a
+ descriptor.FieldDescriptor.
+ """
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
+ extension.is_extension):
+ raise TypeError('Expected an extension descriptor.')
+
+ if extension.extension_scope is None:
+ self._toplevel_extensions[extension.full_name] = extension
+
+ try:
+ existing_desc = self._extensions_by_number[
+ extension.containing_type][extension.number]
+ except KeyError:
+ pass
+ else:
+ if extension is not existing_desc:
+ raise AssertionError(
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
+ 'with field number %d.' %
+ (extension.full_name, existing_desc.full_name,
+ extension.containing_type.full_name, extension.number))
+
+ self._extensions_by_number[extension.containing_type][
+ extension.number] = extension
+ self._extensions_by_name[extension.containing_type][
+ extension.full_name] = extension
+
+ # Also register MessageSet extensions with the type name.
+ if _IsMessageSetExtension(extension):
+ self._extensions_by_name[extension.containing_type][
+ extension.message_type.full_name] = extension
+
@_Deprecated
def AddFileDescriptor(self, file_desc):
self._InternalAddFileDescriptor(file_desc)
@@ -366,24 +366,24 @@ class DescriptorPool(object):
file_desc: A FileDescriptor.
"""
- self._AddFileDescriptor(file_desc)
- # TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
+ self._AddFileDescriptor(file_desc)
+ # TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
# FieldDescriptor.file is added in code gen. Remove this solution after
# maybe 2020 for compatibility reason (with 3.4.1 only).
- for extension in file_desc.extensions_by_name.values():
- self._file_desc_by_toplevel_extension[
- extension.full_name] = file_desc
-
- def _AddFileDescriptor(self, file_desc):
- """Adds a FileDescriptor to the pool, non-recursively.
-
- If the FileDescriptor contains messages or enums, the caller must explicitly
- register them.
-
- Args:
- file_desc: A FileDescriptor.
- """
-
+ for extension in file_desc.extensions_by_name.values():
+ self._file_desc_by_toplevel_extension[
+ extension.full_name] = file_desc
+
+ def _AddFileDescriptor(self, file_desc):
+ """Adds a FileDescriptor to the pool, non-recursively.
+
+ If the FileDescriptor contains messages or enums, the caller must explicitly
+ register them.
+
+ Args:
+ file_desc: A FileDescriptor.
+ """
+
if not isinstance(file_desc, descriptor.FileDescriptor):
raise TypeError('Expected instance of descriptor.FileDescriptor.')
self._file_descriptors[file_desc.name] = file_desc
@@ -398,7 +398,7 @@ class DescriptorPool(object):
FileDescriptor: The descriptor for the named file.
Raises:
- KeyError: if the file cannot be found in the pool.
+ KeyError: if the file cannot be found in the pool.
"""
try:
@@ -428,7 +428,7 @@ class DescriptorPool(object):
symbol.
Raises:
- KeyError: if the file cannot be found in the pool.
+ KeyError: if the file cannot be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
@@ -468,28 +468,28 @@ class DescriptorPool(object):
pass
try:
- return self._service_descriptors[symbol].file
- except KeyError:
- pass
-
- try:
+ return self._service_descriptors[symbol].file
+ except KeyError:
+ pass
+
+ try:
return self._top_enum_values[symbol].type.file
- except KeyError:
- pass
-
- try:
- return self._file_desc_by_toplevel_extension[symbol]
- except KeyError:
- pass
-
+ except KeyError:
+ pass
+
+ try:
+ return self._file_desc_by_toplevel_extension[symbol]
+ except KeyError:
+ pass
+
# Try fields, enum values and nested extensions inside a message.
top_name, _, sub_name = symbol.rpartition('.')
- try:
+ try:
message = self.FindMessageTypeByName(top_name)
assert (sub_name in message.extensions_by_name or
sub_name in message.fields_by_name or
sub_name in message.enum_values_by_name)
- return message.file
+ return message.file
except (KeyError, AssertionError):
raise KeyError('Cannot find a file containing %s' % symbol)
@@ -501,14 +501,14 @@ class DescriptorPool(object):
Returns:
Descriptor: The descriptor for the named type.
-
- Raises:
- KeyError: if the message cannot be found in the pool.
+
+ Raises:
+ KeyError: if the message cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._descriptors:
- self._FindFileContainingSymbolInDb(full_name)
+ self._FindFileContainingSymbolInDb(full_name)
return self._descriptors[full_name]
def FindEnumTypeByName(self, full_name):
@@ -519,14 +519,14 @@ class DescriptorPool(object):
Returns:
EnumDescriptor: The enum descriptor for the named type.
-
- Raises:
- KeyError: if the enum cannot be found in the pool.
+
+ Raises:
+ KeyError: if the enum cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._enum_descriptors:
- self._FindFileContainingSymbolInDb(full_name)
+ self._FindFileContainingSymbolInDb(full_name)
return self._enum_descriptors[full_name]
def FindFieldByName(self, full_name):
@@ -537,9 +537,9 @@ class DescriptorPool(object):
Returns:
FieldDescriptor: The field descriptor for the named field.
-
- Raises:
- KeyError: if the field cannot be found in the pool.
+
+ Raises:
+ KeyError: if the field cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, field_name = full_name.rpartition('.')
@@ -571,63 +571,63 @@ class DescriptorPool(object):
Returns:
FieldDescriptor: The field descriptor for the named extension.
-
- Raises:
- KeyError: if the extension cannot be found in the pool.
+
+ Raises:
+ KeyError: if the extension cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
- try:
- # The proto compiler does not give any link between the FileDescriptor
- # and top-level extensions unless the FileDescriptorProto is added to
- # the DescriptorDatabase, but this can impact memory usage.
- # So we registered these extensions by name explicitly.
- return self._toplevel_extensions[full_name]
- except KeyError:
- pass
+ try:
+ # The proto compiler does not give any link between the FileDescriptor
+ # and top-level extensions unless the FileDescriptorProto is added to
+ # the DescriptorDatabase, but this can impact memory usage.
+ # So we registered these extensions by name explicitly.
+ return self._toplevel_extensions[full_name]
+ except KeyError:
+ pass
message_name, _, extension_name = full_name.rpartition('.')
try:
# Most extensions are nested inside a message.
scope = self.FindMessageTypeByName(message_name)
except KeyError:
# Some extensions are defined at file scope.
- scope = self._FindFileContainingSymbolInDb(full_name)
+ scope = self._FindFileContainingSymbolInDb(full_name)
return scope.extensions_by_name[extension_name]
- def FindExtensionByNumber(self, message_descriptor, number):
- """Gets the extension of the specified message with the specified number.
-
+ def FindExtensionByNumber(self, message_descriptor, number):
+ """Gets the extension of the specified message with the specified number.
+
Extensions have to be registered to this pool by calling :func:`Add` or
:func:`AddExtensionDescriptor`.
-
- Args:
+
+ Args:
message_descriptor (Descriptor): descriptor of the extended message.
number (int): Number of the extension field.
-
- Returns:
+
+ Returns:
FieldDescriptor: The descriptor for the extension.
-
- Raises:
- KeyError: when no extension with the given number is known for the
- specified message.
- """
+
+ Raises:
+ KeyError: when no extension with the given number is known for the
+ specified message.
+ """
try:
return self._extensions_by_number[message_descriptor][number]
except KeyError:
self._TryLoadExtensionFromDB(message_descriptor, number)
return self._extensions_by_number[message_descriptor][number]
-
- def FindAllExtensions(self, message_descriptor):
+
+ def FindAllExtensions(self, message_descriptor):
"""Gets all the known extensions of a given message.
-
+
Extensions have to be registered to this pool by build related
:func:`Add` or :func:`AddExtensionDescriptor`.
-
- Args:
+
+ Args:
message_descriptor (Descriptor): Descriptor of the extended message.
-
- Returns:
+
+ Returns:
list[FieldDescriptor]: Field descriptors describing the extensions.
- """
+ """
# Fallback to descriptor db if FindAllExtensionNumbers is provided.
if self._descriptor_db and hasattr(
self._descriptor_db, 'FindAllExtensionNumbers'):
@@ -638,8 +638,8 @@ class DescriptorPool(object):
continue
self._TryLoadExtensionFromDB(message_descriptor, number)
- return list(self._extensions_by_number[message_descriptor].values())
-
+ return list(self._extensions_by_number[message_descriptor].values())
+
def _TryLoadExtensionFromDB(self, message_descriptor, number):
"""Try to Load extensions from descriptor db.
@@ -668,23 +668,23 @@ class DescriptorPool(object):
(file_proto.name, number))
warnings.warn(warn_msg, RuntimeWarning)
- def FindServiceByName(self, full_name):
- """Loads the named service descriptor from the pool.
-
- Args:
+ def FindServiceByName(self, full_name):
+ """Loads the named service descriptor from the pool.
+
+ Args:
full_name (str): The full name of the service descriptor to load.
-
- Returns:
+
+ Returns:
ServiceDescriptor: The service descriptor for the named service.
-
- Raises:
- KeyError: if the service cannot be found in the pool.
- """
- full_name = _NormalizeFullyQualifiedName(full_name)
- if full_name not in self._service_descriptors:
- self._FindFileContainingSymbolInDb(full_name)
- return self._service_descriptors[full_name]
-
+
+ Raises:
+ KeyError: if the service cannot be found in the pool.
+ """
+ full_name = _NormalizeFullyQualifiedName(full_name)
+ if full_name not in self._service_descriptors:
+ self._FindFileContainingSymbolInDb(full_name)
+ return self._service_descriptors[full_name]
+
def FindMethodByName(self, full_name):
"""Loads the named service method descriptor from the pool.
@@ -702,29 +702,29 @@ class DescriptorPool(object):
service_descriptor = self.FindServiceByName(service_name)
return service_descriptor.methods_by_name[method_name]
- def _FindFileContainingSymbolInDb(self, symbol):
- """Finds the file in descriptor DB containing the specified symbol.
-
- Args:
+ def _FindFileContainingSymbolInDb(self, symbol):
+ """Finds the file in descriptor DB containing the specified symbol.
+
+ Args:
symbol (str): The name of the symbol to search for.
-
- Returns:
+
+ Returns:
FileDescriptor: The file that contains the specified symbol.
-
- Raises:
- KeyError: if the file cannot be found in the descriptor database.
- """
- try:
- file_proto = self._internal_db.FindFileContainingSymbol(symbol)
- except KeyError as error:
- if self._descriptor_db:
- file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
- else:
- raise error
- if not file_proto:
- raise KeyError('Cannot find a file containing %s' % symbol)
- return self._ConvertFileProtoToFileDescriptor(file_proto)
-
+
+ Raises:
+ KeyError: if the file cannot be found in the descriptor database.
+ """
+ try:
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
+ except KeyError as error:
+ if self._descriptor_db:
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
+ else:
+ raise error
+ if not file_proto:
+ raise KeyError('Cannot find a file containing %s' % symbol)
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
+
def _ConvertFileProtoToFileDescriptor(self, file_proto):
"""Creates a FileDescriptor from a proto or returns a cached copy.
@@ -747,66 +747,66 @@ class DescriptorPool(object):
name=file_proto.name,
package=file_proto.package,
syntax=file_proto.syntax,
- options=_OptionsOrNone(file_proto),
+ options=_OptionsOrNone(file_proto),
serialized_pb=file_proto.SerializeToString(),
dependencies=direct_deps,
public_dependencies=public_deps,
# pylint: disable=protected-access
create_key=descriptor._internal_create_key)
- scope = {}
-
- # This loop extracts all the message and enum types from all the
- # dependencies of the file_proto. This is necessary to create the
- # scope of available message types when defining the passed in
- # file proto.
- for dependency in built_deps:
- scope.update(self._ExtractSymbols(
- dependency.message_types_by_name.values()))
- scope.update((_PrefixWithDot(enum.full_name), enum)
- for enum in dependency.enum_types_by_name.values())
-
- for message_type in file_proto.message_type:
- message_desc = self._ConvertMessageDescriptor(
- message_type, file_proto.package, file_descriptor, scope,
- file_proto.syntax)
- file_descriptor.message_types_by_name[message_desc.name] = (
- message_desc)
-
- for enum_type in file_proto.enum_type:
- file_descriptor.enum_types_by_name[enum_type.name] = (
- self._ConvertEnumDescriptor(enum_type, file_proto.package,
+ scope = {}
+
+ # This loop extracts all the message and enum types from all the
+ # dependencies of the file_proto. This is necessary to create the
+ # scope of available message types when defining the passed in
+ # file proto.
+ for dependency in built_deps:
+ scope.update(self._ExtractSymbols(
+ dependency.message_types_by_name.values()))
+ scope.update((_PrefixWithDot(enum.full_name), enum)
+ for enum in dependency.enum_types_by_name.values())
+
+ for message_type in file_proto.message_type:
+ message_desc = self._ConvertMessageDescriptor(
+ message_type, file_proto.package, file_descriptor, scope,
+ file_proto.syntax)
+ file_descriptor.message_types_by_name[message_desc.name] = (
+ message_desc)
+
+ for enum_type in file_proto.enum_type:
+ file_descriptor.enum_types_by_name[enum_type.name] = (
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
file_descriptor, None, scope, True))
- for index, extension_proto in enumerate(file_proto.extension):
- extension_desc = self._MakeFieldDescriptor(
- extension_proto, file_proto.package, index, file_descriptor,
- is_extension=True)
- extension_desc.containing_type = self._GetTypeFromScope(
- file_descriptor.package, extension_proto.extendee, scope)
- self._SetFieldType(extension_proto, extension_desc,
- file_descriptor.package, scope)
- file_descriptor.extensions_by_name[extension_desc.name] = (
- extension_desc)
+ for index, extension_proto in enumerate(file_proto.extension):
+ extension_desc = self._MakeFieldDescriptor(
+ extension_proto, file_proto.package, index, file_descriptor,
+ is_extension=True)
+ extension_desc.containing_type = self._GetTypeFromScope(
+ file_descriptor.package, extension_proto.extendee, scope)
+ self._SetFieldType(extension_proto, extension_desc,
+ file_descriptor.package, scope)
+ file_descriptor.extensions_by_name[extension_desc.name] = (
+ extension_desc)
self._file_desc_by_toplevel_extension[extension_desc.full_name] = (
file_descriptor)
- for desc_proto in file_proto.message_type:
- self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
+ for desc_proto in file_proto.message_type:
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
- if file_proto.package:
- desc_proto_prefix = _PrefixWithDot(file_proto.package)
- else:
- desc_proto_prefix = ''
+ if file_proto.package:
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
+ else:
+ desc_proto_prefix = ''
- for desc_proto in file_proto.message_type:
- desc = self._GetTypeFromScope(
- desc_proto_prefix, desc_proto.name, scope)
- file_descriptor.message_types_by_name[desc_proto.name] = desc
+ for desc_proto in file_proto.message_type:
+ desc = self._GetTypeFromScope(
+ desc_proto_prefix, desc_proto.name, scope)
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
- for index, service_proto in enumerate(file_proto.service):
- file_descriptor.services_by_name[service_proto.name] = (
- self._MakeServiceDescriptor(service_proto, index, scope,
- file_proto.package, file_descriptor))
+ for index, service_proto in enumerate(file_proto.service):
+ file_descriptor.services_by_name[service_proto.name] = (
+ self._MakeServiceDescriptor(service_proto, index, scope,
+ file_proto.package, file_descriptor))
self.Add(file_proto)
self._file_descriptors[file_proto.name] = file_descriptor
@@ -830,7 +830,7 @@ class DescriptorPool(object):
package: The package the proto should be located in.
file_desc: The file containing this message.
scope: Dict mapping short and full symbols to message and enum types.
- syntax: string indicating syntax of the file ("proto2" or "proto3")
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
Returns:
The added descriptor.
@@ -857,10 +857,10 @@ class DescriptorPool(object):
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
scope, False)
for enum in desc_proto.enum_type]
- fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
for index, field in enumerate(desc_proto.field)]
extensions = [
- self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
is_extension=True)
for index, extension in enumerate(desc_proto.extension)]
oneofs = [
@@ -885,7 +885,7 @@ class DescriptorPool(object):
nested_types=nested,
enum_types=enums,
extensions=extensions,
- options=_OptionsOrNone(desc_proto),
+ options=_OptionsOrNone(desc_proto),
is_extendable=is_extendable,
extension_ranges=extension_ranges,
file=file_desc,
@@ -962,7 +962,7 @@ class DescriptorPool(object):
return desc
def _MakeFieldDescriptor(self, field_proto, message_name, index,
- file_desc, is_extension=False):
+ file_desc, is_extension=False):
"""Creates a field descriptor from a FieldDescriptorProto.
For message and enum type fields, this method will do a look up
@@ -975,7 +975,7 @@ class DescriptorPool(object):
field_proto: The proto describing the field.
message_name: The name of the containing message.
index: Index of the field
- file_desc: The file containing the field descriptor.
+ file_desc: The file containing the field descriptor.
is_extension: Indication that this field is for an extension.
Returns:
@@ -1002,7 +1002,7 @@ class DescriptorPool(object):
default_value=None,
is_extension=is_extension,
extension_scope=None,
- options=_OptionsOrNone(field_proto),
+ options=_OptionsOrNone(field_proto),
file=file_desc,
# pylint: disable=protected-access
create_key=descriptor._internal_create_key)
@@ -1128,7 +1128,7 @@ class DescriptorPool(object):
name=value_proto.name,
index=index,
number=value_proto.number,
- options=_OptionsOrNone(value_proto),
+ options=_OptionsOrNone(value_proto),
type=None,
# pylint: disable=protected-access
create_key=descriptor._internal_create_key)
@@ -1166,7 +1166,7 @@ class DescriptorPool(object):
# pylint: disable=protected-access
create_key=descriptor._internal_create_key)
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
- self._service_descriptors[service_name] = desc
+ self._service_descriptors[service_name] = desc
return desc
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/api_implementation.py b/contrib/python/protobuf/py2/google/protobuf/internal/api_implementation.py
index 65f4708c8c..be1af7df6b 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/api_implementation.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/api_implementation.py
@@ -114,27 +114,27 @@ if _implementation_version_str != '2':
_implementation_version = int(_implementation_version_str)
-# Detect if serialization should be deterministic by default
-try:
- # The presence of this module in a build allows the proto implementation to
- # be upgraded merely via build deps.
- #
- # NOTE: Merely importing this automatically enables deterministic proto
- # serialization for C++ code, but we still need to export it as a boolean so
- # that we can do the same for `_implementation_type == 'python'`.
- #
- # NOTE2: It is possible for C++ code to enable deterministic serialization by
- # default _without_ affecting Python code, if the C++ implementation is not in
- # use by this module. That is intended behavior, so we don't actually expose
- # this boolean outside of this module.
- #
- # pylint: disable=g-import-not-at-top,unused-import
- from google.protobuf import enable_deterministic_proto_serialization
- _python_deterministic_proto_serialization = True
-except ImportError:
- _python_deterministic_proto_serialization = False
-
-
+# Detect if serialization should be deterministic by default
+try:
+ # The presence of this module in a build allows the proto implementation to
+ # be upgraded merely via build deps.
+ #
+ # NOTE: Merely importing this automatically enables deterministic proto
+ # serialization for C++ code, but we still need to export it as a boolean so
+ # that we can do the same for `_implementation_type == 'python'`.
+ #
+ # NOTE2: It is possible for C++ code to enable deterministic serialization by
+ # default _without_ affecting Python code, if the C++ implementation is not in
+ # use by this module. That is intended behavior, so we don't actually expose
+ # this boolean outside of this module.
+ #
+ # pylint: disable=g-import-not-at-top,unused-import
+ from google.protobuf import enable_deterministic_proto_serialization
+ _python_deterministic_proto_serialization = True
+except ImportError:
+ _python_deterministic_proto_serialization = False
+
+
# Usage of this function is discouraged. Clients shouldn't care which
# implementation of the API is in use. Note that there is no guarantee
# that differences between APIs will be maintained.
@@ -152,8 +152,8 @@ def _SetType(implementation_type):
# See comment on 'Type' above.
def Version():
return _implementation_version
-
-
-# For internal use only
-def IsPythonDefaultSerializationDeterministic():
- return _python_deterministic_proto_serialization
+
+
+# For internal use only
+def IsPythonDefaultSerializationDeterministic():
+ return _python_deterministic_proto_serialization
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/containers.py b/contrib/python/protobuf/py2/google/protobuf/internal/containers.py
index 7b13a801f4..92793490bb 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/containers.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/containers.py
@@ -285,7 +285,7 @@ class RepeatedScalarFieldContainer(BaseContainer):
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
if new_values:
self._values.extend(new_values)
- self._message_listener.Modified()
+ self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
@@ -463,11 +463,11 @@ class ScalarMap(MutableMapping):
"""Simple, type-checked, dict-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
- __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
- '_entry_descriptor']
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
+ '_entry_descriptor']
- def __init__(self, message_listener, key_checker, value_checker,
- entry_descriptor):
+ def __init__(self, message_listener, key_checker, value_checker,
+ entry_descriptor):
"""
Args:
message_listener: A MessageListener implementation.
@@ -477,12 +477,12 @@ class ScalarMap(MutableMapping):
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
- entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
"""
self._message_listener = message_listener
self._key_checker = key_checker
self._value_checker = value_checker
- self._entry_descriptor = entry_descriptor
+ self._entry_descriptor = entry_descriptor
self._values = {}
def __getitem__(self, key):
@@ -544,20 +544,20 @@ class ScalarMap(MutableMapping):
self._values.clear()
self._message_listener.Modified()
- def GetEntryClass(self):
- return self._entry_descriptor._concrete_class
+ def GetEntryClass(self):
+ return self._entry_descriptor._concrete_class
+
-
class MessageMap(MutableMapping):
"""Simple, type-checked, dict-like container for with submessage values."""
# Disallows assignment to other attributes.
__slots__ = ['_key_checker', '_values', '_message_listener',
- '_message_descriptor', '_entry_descriptor']
+ '_message_descriptor', '_entry_descriptor']
- def __init__(self, message_listener, message_descriptor, key_checker,
- entry_descriptor):
+ def __init__(self, message_listener, message_descriptor, key_checker,
+ entry_descriptor):
"""
Args:
message_listener: A MessageListener implementation.
@@ -567,12 +567,12 @@ class MessageMap(MutableMapping):
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
- entry_descriptor: The MessageDescriptor of a map entry: key and value.
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
"""
self._message_listener = message_listener
self._message_descriptor = message_descriptor
self._key_checker = key_checker
- self._entry_descriptor = entry_descriptor
+ self._entry_descriptor = entry_descriptor
self._values = {}
def __getitem__(self, key):
@@ -653,9 +653,9 @@ class MessageMap(MutableMapping):
def clear(self):
self._values.clear()
self._message_listener.Modified()
-
- def GetEntryClass(self):
- return self._entry_descriptor._concrete_class
+
+ def GetEntryClass(self):
+ return self._entry_descriptor._concrete_class
class _UnknownField(object):
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/decoder.py b/contrib/python/protobuf/py2/google/protobuf/internal/decoder.py
index ea424c3732..6804986b6e 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/decoder.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/decoder.py
@@ -136,12 +136,12 @@ def _VarintDecoder(mask, result_type):
return DecodeVarint
-def _SignedVarintDecoder(bits, result_type):
+def _SignedVarintDecoder(bits, result_type):
"""Like _VarintDecoder() but decodes signed values."""
- signbit = 1 << (bits - 1)
- mask = (1 << bits) - 1
-
+ signbit = 1 << (bits - 1)
+ mask = (1 << bits) - 1
+
def DecodeVarint(buffer, pos):
result = 0
shift = 0
@@ -150,8 +150,8 @@ def _SignedVarintDecoder(bits, result_type):
result |= ((b & 0x7f) << shift)
pos += 1
if not (b & 0x80):
- result &= mask
- result = (result ^ signbit) - signbit
+ result &= mask
+ result = (result ^ signbit) - signbit
result = result_type(result)
return (result, pos)
shift += 7
@@ -164,11 +164,11 @@ def _SignedVarintDecoder(bits, result_type):
# (e.g. the C++ implementation) simpler.
_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
-_DecodeSignedVarint = _SignedVarintDecoder(64, long)
+_DecodeSignedVarint = _SignedVarintDecoder(64, long)
# Use these versions for values which must be limited to 32 bits.
_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
-_DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
+_DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
def ReadTag(buffer, pos):
@@ -560,21 +560,21 @@ BoolDecoder = _ModifiedDecoder(
def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
is_strict_utf8=False, clear_if_default=False):
- """Returns a decoder for a string field."""
+ """Returns a decoder for a string field."""
- local_DecodeVarint = _DecodeVarint
- local_unicode = six.text_type
+ local_DecodeVarint = _DecodeVarint
+ local_unicode = six.text_type
def _ConvertToUnicode(memview):
"""Convert byte to unicode."""
byte_str = memview.tobytes()
- try:
+ try:
value = local_unicode(byte_str, 'utf-8')
- except UnicodeDecodeError as e:
- # add more information to the error message and re-raise it.
- e.reason = '%s in field: %s' % (e, key.full_name)
- raise
-
+ except UnicodeDecodeError as e:
+ # add more information to the error message and re-raise it.
+ e.reason = '%s in field: %s' % (e, key.full_name)
+ raise
+
if is_strict_utf8 and six.PY2 and sys.maxunicode > _UCS2_MAXUNICODE:
# Only do the check for python2 ucs4 when is_strict_utf8 enabled
if _SURROGATE_PATTERN.search(value):
@@ -586,41 +586,41 @@ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
return value
- assert not is_packed
- if is_repeated:
- tag_bytes = encoder.TagBytes(field_number,
- wire_format.WIRETYPE_LENGTH_DELIMITED)
- tag_len = len(tag_bytes)
- def DecodeRepeatedField(buffer, pos, end, message, field_dict):
- value = field_dict.get(key)
- if value is None:
- value = field_dict.setdefault(key, new_default(message))
- while 1:
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
- value.append(_ConvertToUnicode(buffer[pos:new_pos]))
- # Predict that the next tag is another copy of the same repeated field.
- pos = new_pos + tag_len
- if buffer[new_pos:pos] != tag_bytes or new_pos == end:
- # Prediction failed. Return.
- return new_pos
- return DecodeRepeatedField
- else:
- def DecodeField(buffer, pos, end, message, field_dict):
- (size, pos) = local_DecodeVarint(buffer, pos)
- new_pos = pos + size
- if new_pos > end:
- raise _DecodeError('Truncated string.')
+ assert not is_packed
+ if is_repeated:
+ tag_bytes = encoder.TagBytes(field_number,
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
+ tag_len = len(tag_bytes)
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
+ value = field_dict.get(key)
+ if value is None:
+ value = field_dict.setdefault(key, new_default(message))
+ while 1:
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
+ # Predict that the next tag is another copy of the same repeated field.
+ pos = new_pos + tag_len
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
+ # Prediction failed. Return.
+ return new_pos
+ return DecodeRepeatedField
+ else:
+ def DecodeField(buffer, pos, end, message, field_dict):
+ (size, pos) = local_DecodeVarint(buffer, pos)
+ new_pos = pos + size
+ if new_pos > end:
+ raise _DecodeError('Truncated string.')
if clear_if_default and not size:
field_dict.pop(key, None)
else:
field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
- return new_pos
- return DecodeField
-
-
+ return new_pos
+ return DecodeField
+
+
def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
clear_if_default=False):
"""Returns a decoder for a bytes field."""
@@ -763,10 +763,10 @@ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
-def MessageSetItemDecoder(descriptor):
+def MessageSetItemDecoder(descriptor):
"""Returns a decoder for a MessageSet item.
- The parameter is the message Descriptor.
+ The parameter is the message Descriptor.
The message set message looks like this:
message MessageSet {
@@ -827,7 +827,7 @@ def MessageSetItemDecoder(descriptor):
if message_start == -1:
raise _DecodeError('MessageSet item missing message.')
- extension = message.Extensions._FindExtensionByNumber(type_id)
+ extension = message.Extensions._FindExtensionByNumber(type_id)
# pylint: disable=protected-access
if extension is not None:
value = field_dict.get(extension)
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/encoder.py b/contrib/python/protobuf/py2/google/protobuf/internal/encoder.py
index e3a4a79848..0c016f3cfa 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/encoder.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/encoder.py
@@ -227,28 +227,28 @@ Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
BoolSizer = _FixedSizer(1)
-def StringSizer(field_number, is_repeated, is_packed):
- """Returns a sizer for a string field."""
-
- tag_size = _TagSize(field_number)
- local_VarintSize = _VarintSize
- local_len = len
- assert not is_packed
- if is_repeated:
- def RepeatedFieldSize(value):
- result = tag_size * len(value)
- for element in value:
- l = local_len(element.encode('utf-8'))
- result += local_VarintSize(l) + l
- return result
- return RepeatedFieldSize
- else:
- def FieldSize(value):
- l = local_len(value.encode('utf-8'))
- return tag_size + local_VarintSize(l) + l
- return FieldSize
-
-
+def StringSizer(field_number, is_repeated, is_packed):
+ """Returns a sizer for a string field."""
+
+ tag_size = _TagSize(field_number)
+ local_VarintSize = _VarintSize
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def RepeatedFieldSize(value):
+ result = tag_size * len(value)
+ for element in value:
+ l = local_len(element.encode('utf-8'))
+ result += local_VarintSize(l) + l
+ return result
+ return RepeatedFieldSize
+ else:
+ def FieldSize(value):
+ l = local_len(value.encode('utf-8'))
+ return tag_size + local_VarintSize(l) + l
+ return FieldSize
+
+
def BytesSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a bytes field."""
@@ -340,7 +340,7 @@ def MessageSetItemSizer(field_number):
# Map is special: it needs custom logic to compute its size properly.
-def MapSizer(field_descriptor, is_message_map):
+def MapSizer(field_descriptor, is_message_map):
"""Returns a sizer for a map field."""
# Can't look at field_descriptor.message_type._concrete_class because it may
@@ -355,12 +355,12 @@ def MapSizer(field_descriptor, is_message_map):
# It's wasteful to create the messages and throw them away one second
# later since we'll do the same for the actual encode. But there's not an
# obvious way to avoid this within the current design without tons of code
- # duplication. For message map, value.ByteSize() should be called to
- # update the status.
+ # duplication. For message map, value.ByteSize() should be called to
+ # update the status.
entry_msg = message_type._concrete_class(key=key, value=value)
total += message_sizer(entry_msg)
- if is_message_map:
- value.ByteSize()
+ if is_message_map:
+ value.ByteSize()
return total
return FieldSize
@@ -413,7 +413,7 @@ def _VarintBytes(value):
called at startup time so it doesn't need to be fast."""
pieces = []
- _EncodeVarint(pieces.append, value, True)
+ _EncodeVarint(pieces.append, value, True)
return b"".join(pieces)
@@ -443,27 +443,27 @@ def _SimpleEncoder(wire_type, encode_value, compute_value_size):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value, deterministic):
+ def EncodePackedField(write, value, deterministic):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(element)
- local_EncodeVarint(write, size, deterministic)
+ local_EncodeVarint(write, size, deterministic)
for element in value:
- encode_value(write, element, deterministic)
+ encode_value(write, element, deterministic)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value, deterministic):
+ def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag_bytes)
- encode_value(write, element, deterministic)
+ encode_value(write, element, deterministic)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(tag_bytes)
- return encode_value(write, value, deterministic)
+ return encode_value(write, value, deterministic)
return EncodeField
return SpecificEncoder
@@ -477,27 +477,27 @@ def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value, deterministic):
+ def EncodePackedField(write, value, deterministic):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
- local_EncodeVarint(write, size, deterministic)
+ local_EncodeVarint(write, size, deterministic)
for element in value:
- encode_value(write, modify_value(element), deterministic)
+ encode_value(write, modify_value(element), deterministic)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value, deterministic):
+ def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag_bytes)
- encode_value(write, modify_value(element), deterministic)
+ encode_value(write, modify_value(element), deterministic)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(tag_bytes)
- return encode_value(write, modify_value(value), deterministic)
+ return encode_value(write, modify_value(value), deterministic)
return EncodeField
return SpecificEncoder
@@ -518,9 +518,9 @@ def _StructPackEncoder(wire_type, format):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value, deterministic):
+ def EncodePackedField(write, value, deterministic):
write(tag_bytes)
- local_EncodeVarint(write, len(value) * value_size, deterministic)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
for element in value:
write(local_struct_pack(format, element))
return EncodePackedField
@@ -584,9 +584,9 @@ def _FloatingPointEncoder(wire_type, format):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value, deterministic):
+ def EncodePackedField(write, value, deterministic):
write(tag_bytes)
- local_EncodeVarint(write, len(value) * value_size, deterministic)
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
for element in value:
# This try/except block is going to be faster than any code that
# we could write to check whether element is finite.
@@ -653,9 +653,9 @@ def BoolEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
- def EncodePackedField(write, value, deterministic):
+ def EncodePackedField(write, value, deterministic):
write(tag_bytes)
- local_EncodeVarint(write, len(value), deterministic)
+ local_EncodeVarint(write, len(value), deterministic)
for element in value:
if element:
write(true_byte)
@@ -682,30 +682,30 @@ def BoolEncoder(field_number, is_repeated, is_packed):
return EncodeField
-def StringEncoder(field_number, is_repeated, is_packed):
- """Returns an encoder for a string field."""
-
- tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
- local_EncodeVarint = _EncodeVarint
- local_len = len
- assert not is_packed
- if is_repeated:
- def EncodeRepeatedField(write, value, deterministic):
- for element in value:
- encoded = element.encode('utf-8')
- write(tag)
- local_EncodeVarint(write, local_len(encoded), deterministic)
- write(encoded)
- return EncodeRepeatedField
- else:
- def EncodeField(write, value, deterministic):
- encoded = value.encode('utf-8')
- write(tag)
- local_EncodeVarint(write, local_len(encoded), deterministic)
- return write(encoded)
- return EncodeField
-
-
+def StringEncoder(field_number, is_repeated, is_packed):
+ """Returns an encoder for a string field."""
+
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
+ local_EncodeVarint = _EncodeVarint
+ local_len = len
+ assert not is_packed
+ if is_repeated:
+ def EncodeRepeatedField(write, value, deterministic):
+ for element in value:
+ encoded = element.encode('utf-8')
+ write(tag)
+ local_EncodeVarint(write, local_len(encoded), deterministic)
+ write(encoded)
+ return EncodeRepeatedField
+ else:
+ def EncodeField(write, value, deterministic):
+ encoded = value.encode('utf-8')
+ write(tag)
+ local_EncodeVarint(write, local_len(encoded), deterministic)
+ return write(encoded)
+ return EncodeField
+
+
def BytesEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a bytes field."""
@@ -714,16 +714,16 @@ def BytesEncoder(field_number, is_repeated, is_packed):
local_len = len
assert not is_packed
if is_repeated:
- def EncodeRepeatedField(write, value, deterministic):
+ def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag)
- local_EncodeVarint(write, local_len(element), deterministic)
+ local_EncodeVarint(write, local_len(element), deterministic)
write(element)
return EncodeRepeatedField
else:
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(tag)
- local_EncodeVarint(write, local_len(value), deterministic)
+ local_EncodeVarint(write, local_len(value), deterministic)
return write(value)
return EncodeField
@@ -735,16 +735,16 @@ def GroupEncoder(field_number, is_repeated, is_packed):
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
- def EncodeRepeatedField(write, value, deterministic):
+ def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(start_tag)
- element._InternalSerialize(write, deterministic)
+ element._InternalSerialize(write, deterministic)
write(end_tag)
return EncodeRepeatedField
else:
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(start_tag)
- value._InternalSerialize(write, deterministic)
+ value._InternalSerialize(write, deterministic)
return write(end_tag)
return EncodeField
@@ -756,17 +756,17 @@ def MessageEncoder(field_number, is_repeated, is_packed):
local_EncodeVarint = _EncodeVarint
assert not is_packed
if is_repeated:
- def EncodeRepeatedField(write, value, deterministic):
+ def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag)
- local_EncodeVarint(write, element.ByteSize(), deterministic)
- element._InternalSerialize(write, deterministic)
+ local_EncodeVarint(write, element.ByteSize(), deterministic)
+ element._InternalSerialize(write, deterministic)
return EncodeRepeatedField
else:
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(tag)
- local_EncodeVarint(write, value.ByteSize(), deterministic)
- return value._InternalSerialize(write, deterministic)
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
+ return value._InternalSerialize(write, deterministic)
return EncodeField
@@ -793,10 +793,10 @@ def MessageSetItemEncoder(field_number):
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
write(start_bytes)
- local_EncodeVarint(write, value.ByteSize(), deterministic)
- value._InternalSerialize(write, deterministic)
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
+ value._InternalSerialize(write, deterministic)
return write(end_bytes)
return EncodeField
@@ -821,10 +821,10 @@ def MapEncoder(field_descriptor):
message_type = field_descriptor.message_type
encode_message = MessageEncoder(field_descriptor.number, False, False)
- def EncodeField(write, value, deterministic):
+ def EncodeField(write, value, deterministic):
value_keys = sorted(value.keys()) if deterministic else value
- for key in value_keys:
+ for key in value_keys:
entry_msg = message_type._concrete_class(key=key, value=value[key])
- encode_message(write, entry_msg, deterministic)
+ encode_message(write, entry_msg, deterministic)
return EncodeField
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/python_message.py b/contrib/python/protobuf/py2/google/protobuf/internal/python_message.py
index b4e73f2bfe..99d2f078de 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/python_message.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/python_message.py
@@ -51,7 +51,7 @@ this file*.
__author__ = 'robinson@google.com (Will Robinson)'
from io import BytesIO
-import struct
+import struct
import sys
import weakref
@@ -59,7 +59,7 @@ import six
from six.moves import range
# We use "as" to avoid name collisions with variables.
-from google.protobuf.internal import api_implementation
+from google.protobuf.internal import api_implementation
from google.protobuf.internal import containers
from google.protobuf.internal import decoder
from google.protobuf.internal import encoder
@@ -189,7 +189,7 @@ class GeneratedProtocolMessageType(type):
if (descriptor.has_options and
descriptor.GetOptions().message_set_wire_format):
cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
- decoder.MessageSetItemDecoder(descriptor), None)
+ decoder.MessageSetItemDecoder(descriptor), None)
# Attach stuff to each FieldDescriptor for quick lookup later on.
for field in descriptor.fields:
@@ -310,8 +310,8 @@ def _AttachFieldHelpers(cls, field_descriptor):
if is_map_entry:
field_encoder = encoder.MapEncoder(field_descriptor)
- sizer = encoder.MapSizer(field_descriptor,
- _IsMessageMapField(field_descriptor))
+ sizer = encoder.MapSizer(field_descriptor,
+ _IsMessageMapField(field_descriptor))
elif _IsMessageSetExtension(field_descriptor):
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
@@ -408,15 +408,15 @@ def _GetInitializeDefaultForMap(field):
if _IsMessageMapField(field):
def MakeMessageMapDefault(message):
return containers.MessageMap(
- message._listener_for_children, value_field.message_type, key_checker,
- field.message_type)
+ message._listener_for_children, value_field.message_type, key_checker,
+ field.message_type)
return MakeMessageMapDefault
else:
value_checker = type_checkers.GetTypeChecker(value_field)
def MakePrimitiveMapDefault(message):
return containers.ScalarMap(
- message._listener_for_children, key_checker, value_checker,
- field.message_type)
+ message._listener_for_children, key_checker, value_checker,
+ field.message_type)
return MakePrimitiveMapDefault
def _DefaultValueConstructorForField(field):
@@ -789,19 +789,19 @@ def _AddPropertiesForExtensions(descriptor, cls):
constant_name = extension_name.upper() + '_FIELD_NUMBER'
setattr(cls, constant_name, extension_field.number)
- # TODO(amauryfa): Migrate all users of these attributes to functions like
- # pool.FindExtensionByNumber(descriptor).
- if descriptor.file is not None:
- # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
- pool = descriptor.file.pool
- cls._extensions_by_number = pool._extensions_by_number[descriptor]
- cls._extensions_by_name = pool._extensions_by_name[descriptor]
+ # TODO(amauryfa): Migrate all users of these attributes to functions like
+ # pool.FindExtensionByNumber(descriptor).
+ if descriptor.file is not None:
+ # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
+ pool = descriptor.file.pool
+ cls._extensions_by_number = pool._extensions_by_number[descriptor]
+ cls._extensions_by_name = pool._extensions_by_name[descriptor]
def _AddStaticMethods(cls):
# TODO(robinson): This probably needs to be thread-safe(?)
def RegisterExtension(extension_handle):
extension_handle.containing_type = cls.DESCRIPTOR
- # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
+ # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
# pylint: disable=protected-access
cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle)
_AttachFieldHelpers(cls, extension_handle)
@@ -949,7 +949,7 @@ def _AddHasExtensionMethod(cls):
def _InternalUnpackAny(msg):
"""Unpacks Any message and returns the unpacked message.
- This internal method is different from public Any Unpack method which takes
+ This internal method is different from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
@@ -1088,31 +1088,31 @@ def _AddByteSizeMethod(message_descriptor, cls):
def _AddSerializeToStringMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
- def SerializeToString(self, **kwargs):
+ def SerializeToString(self, **kwargs):
# Check if the message has all of its required fields set.
if not self.IsInitialized():
raise message_mod.EncodeError(
'Message %s is missing required fields: %s' % (
self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
- return self.SerializePartialToString(**kwargs)
+ return self.SerializePartialToString(**kwargs)
cls.SerializeToString = SerializeToString
def _AddSerializePartialToStringMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
- def SerializePartialToString(self, **kwargs):
+ def SerializePartialToString(self, **kwargs):
out = BytesIO()
- self._InternalSerialize(out.write, **kwargs)
+ self._InternalSerialize(out.write, **kwargs)
return out.getvalue()
cls.SerializePartialToString = SerializePartialToString
- def InternalSerialize(self, write_bytes, deterministic=None):
- if deterministic is None:
- deterministic = (
- api_implementation.IsPythonDefaultSerializationDeterministic())
- else:
- deterministic = bool(deterministic)
+ def InternalSerialize(self, write_bytes, deterministic=None):
+ if deterministic is None:
+ deterministic = (
+ api_implementation.IsPythonDefaultSerializationDeterministic())
+ else:
+ deterministic = bool(deterministic)
descriptor = self.DESCRIPTOR
if descriptor.GetOptions().map_entry:
@@ -1313,7 +1313,7 @@ def _AddMergeFromMethod(cls):
if not isinstance(msg, cls):
raise TypeError(
'Parameter to MergeFrom() must be instance of same class: '
- 'expected %s got %s.' % (cls.__name__, msg.__class__.__name__))
+ 'expected %s got %s.' % (cls.__name__, msg.__class__.__name__))
assert msg is not self
self._Modified()
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/type_checkers.py b/contrib/python/protobuf/py2/google/protobuf/internal/type_checkers.py
index d38994c9c5..eb66f9f6fb 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/type_checkers.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/type_checkers.py
@@ -50,7 +50,7 @@ try:
except Exception: # pylint: disable=broad-except
ctypes = None
import struct
-import numbers
+import numbers
import six
if six.PY3:
@@ -160,11 +160,11 @@ class IntValueChecker(object):
"""Checker used for integer fields. Performs type-check and range check."""
def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, numbers.Integral):
+ if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), six.integer_types))
raise TypeError(message)
- if not self._MIN <= int(proposed_value) <= self._MAX:
+ if not self._MIN <= int(proposed_value) <= self._MAX:
raise ValueError('Value out of range: %d' % proposed_value)
# We force 32-bit values to int and 64-bit values to long to make
# alternate implementations where the distinction is more significant
@@ -184,11 +184,11 @@ class EnumValueChecker(object):
self._enum_type = enum_type
def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, numbers.Integral):
+ if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), six.integer_types))
raise TypeError(message)
- if int(proposed_value) not in self._enum_type.values_by_number:
+ if int(proposed_value) not in self._enum_type.values_by_number:
raise ValueError('Unknown enum value: %d' % proposed_value)
return proposed_value
@@ -196,28 +196,28 @@ class EnumValueChecker(object):
return self._enum_type.values[0].number
-class UnicodeValueChecker(object):
-
- """Checker used for string fields.
-
- Always returns a unicode value, even if the input is of type str.
- """
-
- def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, (bytes, six.text_type)):
- message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), (bytes, six.text_type)))
- raise TypeError(message)
-
- # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
- if isinstance(proposed_value, bytes):
- try:
- proposed_value = proposed_value.decode('utf-8')
- except UnicodeDecodeError:
- raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
- 'encoding. Non-UTF-8 strings must be converted to '
- 'unicode objects before being added.' %
- (proposed_value))
+class UnicodeValueChecker(object):
+
+ """Checker used for string fields.
+
+ Always returns a unicode value, even if the input is of type str.
+ """
+
+ def CheckValue(self, proposed_value):
+ if not isinstance(proposed_value, (bytes, six.text_type)):
+ message = ('%.1024r has type %s, but expected one of: %s' %
+ (proposed_value, type(proposed_value), (bytes, six.text_type)))
+ raise TypeError(message)
+
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
+ if isinstance(proposed_value, bytes):
+ try:
+ proposed_value = proposed_value.decode('utf-8')
+ except UnicodeDecodeError:
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
+ 'encoding. Non-UTF-8 strings must be converted to '
+ 'unicode objects before being added.' %
+ (proposed_value))
else:
try:
proposed_value.encode('utf8')
@@ -226,12 +226,12 @@ class UnicodeValueChecker(object):
'can\'t be encoded in UTF-8.'%
(proposed_value))
- return proposed_value
-
- def DefaultValue(self):
- return u""
-
-
+ return proposed_value
+
+ def DefaultValue(self):
+ return u""
+
+
class Int32ValueChecker(IntValueChecker):
# We're sure to use ints instead of longs here since comparison may be more
# efficient.
@@ -301,7 +301,7 @@ _VALUE_CHECKERS = {
0.0, float, numbers.Real),
_FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
_FieldDescriptor.CPPTYPE_BOOL: TypeCheckerWithDefault(
- False, bool, numbers.Integral),
+ False, bool, numbers.Integral),
_FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
}
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/well_known_types.py b/contrib/python/protobuf/py2/google/protobuf/internal/well_known_types.py
index f444a91cf2..6f55d6b17b 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/well_known_types.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/well_known_types.py
@@ -61,7 +61,7 @@ _NANOS_PER_MICROSECOND = 1000
_MILLIS_PER_SECOND = 1000
_MICROS_PER_SECOND = 1000000
_SECONDS_PER_DAY = 24 * 3600
-_DURATION_SECONDS_MAX = 315576000000
+_DURATION_SECONDS_MAX = 315576000000
class Any(object):
@@ -269,7 +269,7 @@ class Duration(object):
represent the exact Duration value. For example: "1s", "1.010s",
"1.000000100s", "-3.100s"
"""
- _CheckDurationValid(self.seconds, self.nanos)
+ _CheckDurationValid(self.seconds, self.nanos)
if self.seconds < 0 or self.nanos < 0:
result = '-'
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
@@ -309,17 +309,17 @@ class Duration(object):
try:
pos = value.find('.')
if pos == -1:
- seconds = int(value[:-1])
- nanos = 0
+ seconds = int(value[:-1])
+ nanos = 0
else:
- seconds = int(value[:pos])
+ seconds = int(value[:pos])
if value[0] == '-':
- nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
+ nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
else:
- nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
- _CheckDurationValid(seconds, nanos)
- self.seconds = seconds
- self.nanos = nanos
+ nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
+ _CheckDurationValid(seconds, nanos)
+ self.seconds = seconds
+ self.nanos = nanos
except ValueError as e:
raise ValueError(
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
@@ -371,12 +371,12 @@ class Duration(object):
self.nanos, _NANOS_PER_MICROSECOND))
def FromTimedelta(self, td):
- """Converts timedelta to Duration."""
+ """Converts timedelta to Duration."""
self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
td.microseconds * _NANOS_PER_MICROSECOND)
def _NormalizeDuration(self, seconds, nanos):
- """Set Duration by seconds and nanos."""
+ """Set Duration by seconds and nanos."""
# Force nanos to be negative if the duration is negative.
if seconds < 0 and nanos > 0:
seconds += 1
@@ -385,20 +385,20 @@ class Duration(object):
self.nanos = nanos
-def _CheckDurationValid(seconds, nanos):
- if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
+def _CheckDurationValid(seconds, nanos):
+ if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
raise ValueError(
- 'Duration is not valid: Seconds {0} must be in range '
- '[-315576000000, 315576000000].'.format(seconds))
- if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
+ 'Duration is not valid: Seconds {0} must be in range '
+ '[-315576000000, 315576000000].'.format(seconds))
+ if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
raise ValueError(
- 'Duration is not valid: Nanos {0} must be in range '
- '[-999999999, 999999999].'.format(nanos))
+ 'Duration is not valid: Nanos {0} must be in range '
+ '[-999999999, 999999999].'.format(nanos))
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
raise ValueError(
'Duration is not valid: Sign mismatch.')
-
-
+
+
def _RoundTowardZero(value, divider):
"""Truncates the remainder part after division."""
# For some languages, the sign of the remainder is implementation
@@ -421,10 +421,10 @@ class FieldMask(object):
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
- camelcase_paths = []
- for path in self.paths:
- camelcase_paths.append(_SnakeCaseToCamelCase(path))
- return ','.join(camelcase_paths)
+ camelcase_paths = []
+ for path in self.paths:
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
+ return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
@@ -518,50 +518,50 @@ def _CheckFieldMaskMessage(message):
message_descriptor.full_name))
-def _SnakeCaseToCamelCase(path_name):
- """Converts a path name from snake_case to camelCase."""
- result = []
- after_underscore = False
- for c in path_name:
- if c.isupper():
+def _SnakeCaseToCamelCase(path_name):
+ """Converts a path name from snake_case to camelCase."""
+ result = []
+ after_underscore = False
+ for c in path_name:
+ if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
- if after_underscore:
- if c.islower():
- result.append(c.upper())
- after_underscore = False
- else:
+ if after_underscore:
+ if c.islower():
+ result.append(c.upper())
+ after_underscore = False
+ else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
- elif c == '_':
- after_underscore = True
- else:
- result += c
-
- if after_underscore:
+ elif c == '_':
+ after_underscore = True
+ else:
+ result += c
+
+ if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
- return ''.join(result)
-
-
-def _CamelCaseToSnakeCase(path_name):
- """Converts a field name from camelCase to snake_case."""
- result = []
- for c in path_name:
- if c == '_':
+ return ''.join(result)
+
+
+def _CamelCaseToSnakeCase(path_name):
+ """Converts a field name from camelCase to snake_case."""
+ result = []
+ for c in path_name:
+ if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
- '{0} must not contain "_"s.'.format(path_name))
- if c.isupper():
- result += '_'
- result += c.lower()
- else:
- result += c
- return ''.join(result)
-
-
+ '{0} must not contain "_"s.'.format(path_name))
+ if c.isupper():
+ result += '_'
+ result += c.lower()
+ else:
+ result += c
+ return ''.join(result)
+
+
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
diff --git a/contrib/python/protobuf/py2/google/protobuf/internal/wire_format.py b/contrib/python/protobuf/py2/google/protobuf/internal/wire_format.py
index 561c29f510..883f525585 100644
--- a/contrib/python/protobuf/py2/google/protobuf/internal/wire_format.py
+++ b/contrib/python/protobuf/py2/google/protobuf/internal/wire_format.py
@@ -180,8 +180,8 @@ def EnumByteSize(field_number, enum):
return UInt32ByteSize(field_number, enum)
-def StringByteSize(field_number, string):
- return BytesByteSize(field_number, string.encode('utf-8'))
+def StringByteSize(field_number, string):
+ return BytesByteSize(field_number, string.encode('utf-8'))
def BytesByteSize(field_number, b):
diff --git a/contrib/python/protobuf/py2/google/protobuf/json_format.py b/contrib/python/protobuf/py2/google/protobuf/json_format.py
index 80fc03c016..965614d803 100644
--- a/contrib/python/protobuf/py2/google/protobuf/json_format.py
+++ b/contrib/python/protobuf/py2/google/protobuf/json_format.py
@@ -44,7 +44,7 @@ __author__ = 'jieluo@google.com (Jie Luo)'
# pylint: disable=g-statement-before-imports,g-import-not-at-top
try:
- from collections import OrderedDict
+ from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # PY26
# pylint: enable=g-statement-before-imports,g-import-not-at-top
@@ -82,9 +82,9 @@ _UNPAIRED_SURROGATE_PATTERN = re.compile(six.u(
r'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
))
-_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
-
-
+_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
+
+
class Error(Exception):
"""Top-level module error for json_format."""
@@ -114,12 +114,12 @@ def MessageToJson(
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
- preserving_proto_field_name: If True, use the original proto field
- names as defined in the .proto file. If False, convert the field
- names to lowerCamelCase.
- indent: The JSON object will be pretty-printed with this indent level.
- An indent level of 0 or negative will only insert newlines.
- sort_keys: If True, then the output will be sorted by field names.
+ preserving_proto_field_name: If True, use the original proto field
+ names as defined in the .proto file. If False, convert the field
+ names to lowerCamelCase.
+ indent: The JSON object will be pretty-printed with this indent level.
+ An indent level of 0 or negative will only insert newlines.
+ sort_keys: If True, then the output will be sorted by field names.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
@@ -134,7 +134,7 @@ def MessageToJson(
use_integers_for_enums,
descriptor_pool,
float_precision=float_precision)
- return printer.ToJsonString(message, indent, sort_keys)
+ return printer.ToJsonString(message, indent, sort_keys)
def MessageToDict(
@@ -144,37 +144,37 @@ def MessageToDict(
use_integers_for_enums=False,
descriptor_pool=None,
float_precision=None):
- """Converts protobuf message to a dictionary.
-
- When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
-
- Args:
- message: The protocol buffers message instance to serialize.
- including_default_value_fields: If True, singular primitive fields,
- repeated fields, and map fields will always be serialized. If
- False, only serialize non-empty fields. Singular message fields
- and oneof fields are not affected by this option.
- preserving_proto_field_name: If True, use the original proto field
- names as defined in the .proto file. If False, convert the field
- names to lowerCamelCase.
+ """Converts protobuf message to a dictionary.
+
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
+
+ Args:
+ message: The protocol buffers message instance to serialize.
+ including_default_value_fields: If True, singular primitive fields,
+ repeated fields, and map fields will always be serialized. If
+ False, only serialize non-empty fields. Singular message fields
+ and oneof fields are not affected by this option.
+ preserving_proto_field_name: If True, use the original proto field
+ names as defined in the .proto file. If False, convert the field
+ names to lowerCamelCase.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
float_precision: If set, use this to specify float field valid digits.
-
- Returns:
- A dict representation of the protocol buffer message.
- """
+
+ Returns:
+ A dict representation of the protocol buffer message.
+ """
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool,
float_precision=float_precision)
- # pylint: disable=protected-access
- return printer._MessageToJsonObject(message)
-
-
+ # pylint: disable=protected-access
+ return printer._MessageToJsonObject(message)
+
+
def _IsMapEntry(field):
return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
field.message_type.has_options and
@@ -192,7 +192,7 @@ class _Printer(object):
descriptor_pool=None,
float_precision=None):
self.including_default_value_fields = including_default_value_fields
- self.preserving_proto_field_name = preserving_proto_field_name
+ self.preserving_proto_field_name = preserving_proto_field_name
self.use_integers_for_enums = use_integers_for_enums
self.descriptor_pool = descriptor_pool
if float_precision:
@@ -200,7 +200,7 @@ class _Printer(object):
else:
self.float_format = None
- def ToJsonString(self, message, indent, sort_keys):
+ def ToJsonString(self, message, indent, sort_keys):
js = self._MessageToJsonObject(message)
return json.dumps(js, indent=indent, sort_keys=sort_keys)
@@ -221,10 +221,10 @@ class _Printer(object):
try:
for field, value in fields:
- if self.preserving_proto_field_name:
- name = field.name
- else:
- name = field.json_name
+ if self.preserving_proto_field_name:
+ name = field.name
+ else:
+ name = field.json_name
if _IsMapEntry(field):
# Convert a map field.
v_field = field.message_type.fields_by_name['value']
@@ -244,9 +244,9 @@ class _Printer(object):
# Convert a repeated field.
js[name] = [self._FieldToJsonObject(field, k)
for k in value]
- elif field.is_extension:
+ elif field.is_extension:
name = '[%s]' % field.full_name
- js[name] = self._FieldToJsonObject(field, value)
+ js[name] = self._FieldToJsonObject(field, value)
else:
js[name] = self._FieldToJsonObject(field, value)
@@ -259,10 +259,10 @@ class _Printer(object):
field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
field.containing_oneof):
continue
- if self.preserving_proto_field_name:
- name = field.name
- else:
- name = field.json_name
+ if self.preserving_proto_field_name:
+ name = field.name
+ else:
+ name = field.json_name
if name in js:
# Skip the field which has been serialized already.
continue
@@ -415,7 +415,7 @@ def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
Args:
text: Message JSON representation.
- message: A protocol buffer message to merge into.
+ message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
@@ -432,26 +432,26 @@ def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool)
-
-
+
+
def ParseDict(js_dict,
message,
ignore_unknown_fields=False,
descriptor_pool=None):
- """Parses a JSON dictionary representation into a message.
-
- Args:
- js_dict: Dict representation of a JSON message.
- message: A protocol buffer message to merge into.
- ignore_unknown_fields: If True, do not raise errors for unknown fields.
+ """Parses a JSON dictionary representation into a message.
+
+ Args:
+ js_dict: Dict representation of a JSON message.
+ message: A protocol buffer message to merge into.
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
-
- Returns:
- The same message passed as argument.
- """
+
+ Returns:
+ The same message passed as argument.
+ """
parser = _Parser(ignore_unknown_fields, descriptor_pool)
- parser.ConvertMessage(js_dict, message)
+ parser.ConvertMessage(js_dict, message)
return message
@@ -496,21 +496,21 @@ class _Parser(object):
"""
names = []
message_descriptor = message.DESCRIPTOR
- fields_by_json_name = dict((f.json_name, f)
- for f in message_descriptor.fields)
+ fields_by_json_name = dict((f.json_name, f)
+ for f in message_descriptor.fields)
for name in js:
try:
- field = fields_by_json_name.get(name, None)
+ field = fields_by_json_name.get(name, None)
if not field:
- field = message_descriptor.fields_by_name.get(name, None)
- if not field and _VALID_EXTENSION_NAME.match(name):
- if not message_descriptor.is_extendable:
- raise ParseError('Message type {0} does not have extensions'.format(
- message_descriptor.full_name))
- identifier = name[1:-1] # strip [] brackets
- # pylint: disable=protected-access
- field = message.Extensions._FindExtensionByName(identifier)
- # pylint: enable=protected-access
+ field = message_descriptor.fields_by_name.get(name, None)
+ if not field and _VALID_EXTENSION_NAME.match(name):
+ if not message_descriptor.is_extendable:
+ raise ParseError('Message type {0} does not have extensions'.format(
+ message_descriptor.full_name))
+ identifier = name[1:-1] # strip [] brackets
+ # pylint: disable=protected-access
+ field = message.Extensions._FindExtensionByName(identifier)
+ # pylint: enable=protected-access
if not field:
# Try looking for extension by the message type name, dropping the
# field name following the final . separator in full_name.
@@ -518,13 +518,13 @@ class _Parser(object):
# pylint: disable=protected-access
field = message.Extensions._FindExtensionByName(identifier)
# pylint: enable=protected-access
- if not field:
+ if not field:
if self.ignore_unknown_fields:
continue
raise ParseError(
- ('Message type "{0}" has no field named "{1}".\n'
- ' Available Fields(except extensions): {2}').format(
- message_descriptor.full_name, name,
+ ('Message type "{0}" has no field named "{1}".\n'
+ ' Available Fields(except extensions): {2}').format(
+ message_descriptor.full_name, name,
[f.json_name for f in message_descriptor.fields]))
if name in names:
raise ParseError('Message type "{0}" should not have multiple '
@@ -542,15 +542,15 @@ class _Parser(object):
names.append(oneof_name)
if value is None:
- if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
- and field.message_type.full_name == 'google.protobuf.Value'):
- sub_message = getattr(message, field.name)
- sub_message.null_value = 0
+ if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
+ and field.message_type.full_name == 'google.protobuf.Value'):
+ sub_message = getattr(message, field.name)
+ sub_message.null_value = 0
elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
and field.enum_type.full_name == 'google.protobuf.NullValue'):
setattr(message, field.name, 0)
- else:
- message.ClearField(field.name)
+ else:
+ message.ClearField(field.name)
continue
# Parse field value.
@@ -581,11 +581,11 @@ class _Parser(object):
getattr(message, field.name).append(
_ConvertScalarFieldValue(item, field))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
- if field.is_extension:
- sub_message = message.Extensions[field]
- else:
- sub_message = getattr(message, field.name)
- sub_message.SetInParent()
+ if field.is_extension:
+ sub_message = message.Extensions[field]
+ else:
+ sub_message = getattr(message, field.name)
+ sub_message.SetInParent()
self.ConvertMessage(value, sub_message)
else:
if field.is_extension:
@@ -629,8 +629,8 @@ class _Parser(object):
def _ConvertGenericMessage(self, value, message):
"""Convert a JSON representation into message with FromJsonString."""
- # Duration, Timestamp, FieldMask have a FromJsonString method to do the
- # conversion. Users can also call the method directly.
+ # Duration, Timestamp, FieldMask have a FromJsonString method to do the
+ # conversion. Users can also call the method directly.
try:
message.FromJsonString(value)
except ValueError as e:
@@ -746,18 +746,18 @@ def _ConvertScalarFieldValue(value, field, require_str=False):
# Convert an enum value.
enum_value = field.enum_type.values_by_name.get(value, None)
if enum_value is None:
- try:
- number = int(value)
- enum_value = field.enum_type.values_by_number.get(number, None)
- except ValueError:
- raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
- value, field.enum_type.full_name))
- if enum_value is None:
+ try:
+ number = int(value)
+ enum_value = field.enum_type.values_by_number.get(number, None)
+ except ValueError:
+ raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
+ value, field.enum_type.full_name))
+ if enum_value is None:
if field.file.syntax == 'proto3':
# Proto3 accepts unknown enums.
return number
- raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
- value, field.enum_type.full_name))
+ raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
+ value, field.enum_type.full_name))
return enum_value.number
@@ -773,7 +773,7 @@ def _ConvertInteger(value):
Raises:
ParseError: If an integer couldn't be consumed.
"""
- if isinstance(value, float) and not value.is_integer():
+ if isinstance(value, float) and not value.is_integer():
raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
if isinstance(value, six.text_type) and value.find(' ') != -1:
diff --git a/contrib/python/protobuf/py2/google/protobuf/message.py b/contrib/python/protobuf/py2/google/protobuf/message.py
index 593021f528..224d2fc491 100644
--- a/contrib/python/protobuf/py2/google/protobuf/message.py
+++ b/contrib/python/protobuf/py2/google/protobuf/message.py
@@ -198,13 +198,13 @@ class Message(object):
self.Clear()
return self.MergeFromString(serialized)
- def SerializeToString(self, **kwargs):
+ def SerializeToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
-
+
Returns:
A binary string representation of the message if all of the required
fields in the message are set (i.e. the message is initialized).
@@ -214,7 +214,7 @@ class Message(object):
"""
raise NotImplementedError
- def SerializePartialToString(self, **kwargs):
+ def SerializePartialToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
This method is similar to SerializeToString but doesn't check if the
@@ -223,7 +223,7 @@ class Message(object):
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
-
+
Returns:
bytes: A serialized representation of the partial message.
"""
diff --git a/contrib/python/protobuf/py2/google/protobuf/message_factory.py b/contrib/python/protobuf/py2/google/protobuf/message_factory.py
index 79a9d7ea6b..7dfaec88e1 100644
--- a/contrib/python/protobuf/py2/google/protobuf/message_factory.py
+++ b/contrib/python/protobuf/py2/google/protobuf/message_factory.py
@@ -75,14 +75,14 @@ class MessageFactory(object):
Returns:
A class describing the passed in descriptor.
"""
- if descriptor not in self._classes:
+ if descriptor not in self._classes:
result_class = self.CreatePrototype(descriptor)
# The assignment to _classes is redundant for the base implementation, but
# might avoid confusion in cases where CreatePrototype gets overridden and
# does not call the base implementation.
- self._classes[descriptor] = result_class
+ self._classes[descriptor] = result_class
return result_class
- return self._classes[descriptor]
+ return self._classes[descriptor]
def CreatePrototype(self, descriptor):
"""Builds a proto2 message class based on the passed in descriptor.
@@ -139,8 +139,8 @@ class MessageFactory(object):
result = {}
for file_name in files:
file_desc = self.pool.FindFileByName(file_name)
- for desc in file_desc.message_types_by_name.values():
- result[desc.full_name] = self.GetPrototype(desc)
+ for desc in file_desc.message_types_by_name.values():
+ result[desc.full_name] = self.GetPrototype(desc)
# While the extension FieldDescriptors are created by the descriptor pool,
# the python classes created in the factory need them to be registered
@@ -151,10 +151,10 @@ class MessageFactory(object):
# ignore the registration if the original was the same, or raise
# an error if they were different.
- for extension in file_desc.extensions_by_name.values():
- if extension.containing_type not in self._classes:
+ for extension in file_desc.extensions_by_name.values():
+ if extension.containing_type not in self._classes:
self.GetPrototype(extension.containing_type)
- extended_class = self._classes[extension.containing_type]
+ extended_class = self._classes[extension.containing_type]
extended_class.RegisterExtension(extension)
return result
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.cc
index 6c9b5ba3ca..de788afa2f 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.cc
@@ -244,7 +244,7 @@ static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
const Message& options(descriptor->options());
const Descriptor *message_type = options.GetDescriptor();
CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
- message_factory, message_type);
+ message_factory, message_type);
if (message_class == NULL) {
PyErr_Format(PyExc_TypeError, "Could not retrieve class for Options: %s",
message_type->full_name().c_str());
@@ -490,9 +490,9 @@ static PyObject* GetConcreteClass(PyBaseDescriptor* self, void *closure) {
// which contains this descriptor.
// This might not be the one you expect! For example the returned object does
// not know about extensions defined in a custom pool.
- CMessageClass* concrete_class(message_factory::GetMessageClass(
- GetDescriptorPool_FromPool(
- _GetDescriptor(self)->file()->pool())->py_message_factory,
+ CMessageClass* concrete_class(message_factory::GetMessageClass(
+ GetDescriptorPool_FromPool(
+ _GetDescriptor(self)->file()->pool())->py_message_factory,
_GetDescriptor(self)));
Py_XINCREF(concrete_class);
return concrete_class->AsPyObject();
@@ -758,14 +758,14 @@ static PyObject* GetCamelcaseName(PyBaseDescriptor* self, void *closure) {
return PyString_FromCppString(_GetDescriptor(self)->camelcase_name());
}
-static PyObject* GetJsonName(PyBaseDescriptor* self, void *closure) {
- return PyString_FromCppString(_GetDescriptor(self)->json_name());
-}
-
-static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
- return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
-}
-
+static PyObject* GetJsonName(PyBaseDescriptor* self, void *closure) {
+ return PyString_FromCppString(_GetDescriptor(self)->json_name());
+}
+
+static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
+ return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
+}
+
static PyObject* GetType(PyBaseDescriptor *self, void *closure) {
return PyInt_FromLong(_GetDescriptor(self)->type());
}
@@ -968,8 +968,8 @@ static PyGetSetDef Getters[] = {
{ "full_name", (getter)GetFullName, NULL, "Full name"},
{ "name", (getter)GetName, NULL, "Unqualified name"},
{ "camelcase_name", (getter)GetCamelcaseName, NULL, "Camelcase name"},
- { "json_name", (getter)GetJsonName, NULL, "Json name"},
- { "file", (getter)GetFile, NULL, "File Descriptor"},
+ { "json_name", (getter)GetJsonName, NULL, "Json name"},
+ { "file", (getter)GetFile, NULL, "File Descriptor"},
{ "type", (getter)GetType, NULL, "C++ Type"},
{ "cpp_type", (getter)GetCppType, NULL, "C++ Type"},
{ "label", (getter)GetLabel, NULL, "Label"},
@@ -1670,10 +1670,10 @@ static PyObject* GetFullName(PyBaseDescriptor* self, void *closure) {
return PyString_FromCppString(_GetDescriptor(self)->full_name());
}
-static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
- return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
-}
-
+static PyObject* GetFile(PyBaseDescriptor *self, void *closure) {
+ return PyFileDescriptor_FromDescriptor(_GetDescriptor(self)->file());
+}
+
static PyObject* GetIndex(PyBaseDescriptor *self, void *closure) {
return PyInt_FromLong(_GetDescriptor(self)->index());
}
@@ -1715,7 +1715,7 @@ static PyObject* CopyToProto(PyBaseDescriptor *self, PyObject *target) {
static PyGetSetDef Getters[] = {
{ "name", (getter)GetName, NULL, "Name", NULL},
{ "full_name", (getter)GetFullName, NULL, "Full name", NULL},
- { "file", (getter)GetFile, NULL, "File descriptor"},
+ { "file", (getter)GetFile, NULL, "File descriptor"},
{ "index", (getter)GetIndex, NULL, "Index", NULL},
{ "methods", (getter)GetMethods, NULL, "Methods", NULL},
@@ -1772,15 +1772,15 @@ PyObject* PyServiceDescriptor_FromDescriptor(
&PyServiceDescriptor_Type, service_descriptor, NULL);
}
-const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj) {
- if (!PyObject_TypeCheck(obj, &PyServiceDescriptor_Type)) {
- PyErr_SetString(PyExc_TypeError, "Not a ServiceDescriptor");
- return NULL;
- }
- return reinterpret_cast<const ServiceDescriptor*>(
- reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
-}
-
+const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj) {
+ if (!PyObject_TypeCheck(obj, &PyServiceDescriptor_Type)) {
+ PyErr_SetString(PyExc_TypeError, "Not a ServiceDescriptor");
+ return NULL;
+ }
+ return reinterpret_cast<const ServiceDescriptor*>(
+ reinterpret_cast<PyBaseDescriptor*>(obj)->descriptor);
+}
+
namespace method_descriptor {
// Unchecked accessor to the C++ pointer.
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.h b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.h
index bf79c2253c..47efbe35d7 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor.h
@@ -83,7 +83,7 @@ const Descriptor* PyMessageDescriptor_AsDescriptor(PyObject* obj);
const FieldDescriptor* PyFieldDescriptor_AsDescriptor(PyObject* obj);
const EnumDescriptor* PyEnumDescriptor_AsDescriptor(PyObject* obj);
const FileDescriptor* PyFileDescriptor_AsDescriptor(PyObject* obj);
-const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj);
+const ServiceDescriptor* PyServiceDescriptor_AsDescriptor(PyObject* obj);
// Returns the raw C++ pointer.
const void* PyDescriptor_AsVoidPtr(PyObject* obj);
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.cc
index 7baaf34033..a53411e797 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.cc
@@ -114,13 +114,13 @@ static PyDescriptorPool* _CreateDescriptorPool() {
cpool->descriptor_options = new std::unordered_map<const void*, PyObject*>();
- cpool->py_message_factory = message_factory::NewMessageFactory(
- &PyMessageFactory_Type, cpool);
- if (cpool->py_message_factory == NULL) {
- Py_DECREF(cpool);
- return NULL;
- }
-
+ cpool->py_message_factory = message_factory::NewMessageFactory(
+ &PyMessageFactory_Type, cpool);
+ if (cpool->py_message_factory == NULL) {
+ Py_DECREF(cpool);
+ return NULL;
+ }
+
PyObject_GC_Track(cpool);
return cpool;
@@ -193,7 +193,7 @@ static PyObject* New(PyTypeObject* type,
static void Dealloc(PyObject* pself) {
PyDescriptorPool* self = reinterpret_cast<PyDescriptorPool*>(pself);
descriptor_pool_map->erase(self->pool);
- Py_CLEAR(self->py_message_factory);
+ Py_CLEAR(self->py_message_factory);
for (std::unordered_map<const void*, PyObject*>::iterator it =
self->descriptor_options->begin();
it != self->descriptor_options->end(); ++it) {
@@ -255,7 +255,7 @@ static PyObject* FindMessageByName(PyObject* self, PyObject* arg) {
-
+
static PyObject* FindFileByName(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
@@ -419,21 +419,21 @@ static PyObject* FindFileContainingSymbol(PyObject* self, PyObject* arg) {
}
static PyObject* FindExtensionByNumber(PyObject* self, PyObject* args) {
- PyObject* message_descriptor;
- int number;
- if (!PyArg_ParseTuple(args, "Oi", &message_descriptor, &number)) {
- return NULL;
- }
- const Descriptor* descriptor = PyMessageDescriptor_AsDescriptor(
- message_descriptor);
- if (descriptor == NULL) {
- return NULL;
- }
-
- const FieldDescriptor* extension_descriptor =
+ PyObject* message_descriptor;
+ int number;
+ if (!PyArg_ParseTuple(args, "Oi", &message_descriptor, &number)) {
+ return NULL;
+ }
+ const Descriptor* descriptor = PyMessageDescriptor_AsDescriptor(
+ message_descriptor);
+ if (descriptor == NULL) {
+ return NULL;
+ }
+
+ const FieldDescriptor* extension_descriptor =
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindExtensionByNumber(
descriptor, number);
- if (extension_descriptor == NULL) {
+ if (extension_descriptor == NULL) {
BuildFileErrorCollector* error_collector =
reinterpret_cast<BuildFileErrorCollector*>(
reinterpret_cast<PyDescriptorPool*>(self)->error_collector);
@@ -444,37 +444,37 @@ static PyObject* FindExtensionByNumber(PyObject* self, PyObject* args) {
return NULL;
}
PyErr_Format(PyExc_KeyError, "Couldn't find Extension %d", number);
- return NULL;
- }
-
+ return NULL;
+ }
+
+
+ return PyFieldDescriptor_FromDescriptor(extension_descriptor);
+}
- return PyFieldDescriptor_FromDescriptor(extension_descriptor);
-}
-
static PyObject* FindAllExtensions(PyObject* self, PyObject* arg) {
- const Descriptor* descriptor = PyMessageDescriptor_AsDescriptor(arg);
- if (descriptor == NULL) {
- return NULL;
- }
-
- std::vector<const FieldDescriptor*> extensions;
+ const Descriptor* descriptor = PyMessageDescriptor_AsDescriptor(arg);
+ if (descriptor == NULL) {
+ return NULL;
+ }
+
+ std::vector<const FieldDescriptor*> extensions;
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindAllExtensions(
descriptor, &extensions);
-
- ScopedPyObjectPtr result(PyList_New(extensions.size()));
- if (result == NULL) {
- return NULL;
- }
- for (int i = 0; i < extensions.size(); i++) {
- PyObject* extension = PyFieldDescriptor_FromDescriptor(extensions[i]);
- if (extension == NULL) {
- return NULL;
- }
- PyList_SET_ITEM(result.get(), i, extension); // Steals the reference.
- }
- return result.release();
-}
-
+
+ ScopedPyObjectPtr result(PyList_New(extensions.size()));
+ if (result == NULL) {
+ return NULL;
+ }
+ for (int i = 0; i < extensions.size(); i++) {
+ PyObject* extension = PyFieldDescriptor_FromDescriptor(extensions[i]);
+ if (extension == NULL) {
+ return NULL;
+ }
+ PyList_SET_ITEM(result.get(), i, extension); // Steals the reference.
+ }
+ return result.release();
+}
+
// These functions should not exist -- the only valid way to create
// descriptors is to call Add() or AddSerializedFile().
// But these AddDescriptor() functions were created in Python and some people
@@ -536,39 +536,39 @@ static PyObject* AddEnumDescriptor(PyObject* self, PyObject* descriptor) {
}
static PyObject* AddExtensionDescriptor(PyObject* self, PyObject* descriptor) {
- const FieldDescriptor* extension_descriptor =
- PyFieldDescriptor_AsDescriptor(descriptor);
- if (!extension_descriptor) {
- return NULL;
- }
- if (extension_descriptor !=
+ const FieldDescriptor* extension_descriptor =
+ PyFieldDescriptor_AsDescriptor(descriptor);
+ if (!extension_descriptor) {
+ return NULL;
+ }
+ if (extension_descriptor !=
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindExtensionByName(
extension_descriptor->full_name())) {
- PyErr_Format(PyExc_ValueError,
- "The extension descriptor %s does not belong to this pool",
- extension_descriptor->full_name().c_str());
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
+ PyErr_Format(PyExc_ValueError,
+ "The extension descriptor %s does not belong to this pool",
+ extension_descriptor->full_name().c_str());
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
static PyObject* AddServiceDescriptor(PyObject* self, PyObject* descriptor) {
- const ServiceDescriptor* service_descriptor =
- PyServiceDescriptor_AsDescriptor(descriptor);
- if (!service_descriptor) {
- return NULL;
- }
- if (service_descriptor !=
+ const ServiceDescriptor* service_descriptor =
+ PyServiceDescriptor_AsDescriptor(descriptor);
+ if (!service_descriptor) {
+ return NULL;
+ }
+ if (service_descriptor !=
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindServiceByName(
service_descriptor->full_name())) {
- PyErr_Format(PyExc_ValueError,
- "The service descriptor %s does not belong to this pool",
- service_descriptor->full_name().c_str());
- return NULL;
- }
- Py_RETURN_NONE;
-}
-
+ PyErr_Format(PyExc_ValueError,
+ "The service descriptor %s does not belong to this pool",
+ service_descriptor->full_name().c_str());
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
// The code below loads new Descriptors from a serialized FileDescriptorProto.
static PyObject* AddSerializedFile(PyObject* pself, PyObject* serialized_pb) {
PyDescriptorPool* self = reinterpret_cast<PyDescriptorPool*>(pself);
@@ -643,9 +643,9 @@ static PyMethodDef Methods[] = {
{ "AddEnumDescriptor", AddEnumDescriptor, METH_O,
"No-op. Add() must have been called before." },
{ "AddExtensionDescriptor", AddExtensionDescriptor, METH_O,
- "No-op. Add() must have been called before." },
+ "No-op. Add() must have been called before." },
{ "AddServiceDescriptor", AddServiceDescriptor, METH_O,
- "No-op. Add() must have been called before." },
+ "No-op. Add() must have been called before." },
{ "FindFileByName", FindFileByName, METH_O,
"Searches for a file descriptor by its .proto name." },
@@ -667,9 +667,9 @@ static PyMethodDef Methods[] = {
{ "FindFileContainingSymbol", FindFileContainingSymbol, METH_O,
"Gets the FileDescriptor containing the specified symbol." },
{ "FindExtensionByNumber", FindExtensionByNumber, METH_VARARGS,
- "Gets the extension descriptor for the given number." },
+ "Gets the extension descriptor for the given number." },
{ "FindAllExtensions", FindAllExtensions, METH_O,
- "Gets all known extensions of the given message descriptor." },
+ "Gets all known extensions of the given message descriptor." },
{NULL}
};
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.h b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.h
index e5133c0d6e..2d456f9088 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/descriptor_pool.h
@@ -40,8 +40,8 @@ namespace google {
namespace protobuf {
namespace python {
-struct PyMessageFactory;
-
+struct PyMessageFactory;
+
// The (meta) type of all Messages classes.
struct CMessageClass;
@@ -71,10 +71,10 @@ typedef struct PyDescriptorPool {
// This pointer is owned.
const DescriptorDatabase* database;
- // The preferred MessageFactory to be used by descriptors.
- // TODO(amauryfa): Don't create the Factory from the DescriptorPool, but
- // use the one passed while creating message classes. And remove this member.
- PyMessageFactory* py_message_factory;
+ // The preferred MessageFactory to be used by descriptors.
+ // TODO(amauryfa): Don't create the Factory from the DescriptorPool, but
+ // use the one passed while creating message classes. And remove this member.
+ PyMessageFactory* py_message_factory;
// Cache the options for any kind of descriptor.
// Descriptor pointers are owned by the DescriptorPool above.
@@ -87,7 +87,7 @@ extern PyTypeObject PyDescriptorPool_Type;
namespace cdescriptor_pool {
-
+
// The functions below are also exposed as methods of the DescriptorPool type.
// Looks up a field by name. Returns a PyFieldDescriptor corresponding to
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/extension_dict.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/extension_dict.cc
index 71c560fb97..37b414c375 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/extension_dict.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/extension_dict.cc
@@ -49,18 +49,18 @@
#include <google/protobuf/pyext/repeated_scalar_container.h>
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#if PY_MAJOR_VERSION >= 3
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #endif
+#if PY_MAJOR_VERSION >= 3
+ #if PY_VERSION_HEX < 0x03030000
+ #error "Python 3.0 - 3.2 are not supported."
+ #endif
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) ? ((*(charpp) = const_cast<char*>( \
PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL \
? -1 \
: 0) \
: PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
-#endif
-
+#endif
+
namespace google {
namespace protobuf {
namespace python {
@@ -156,7 +156,7 @@ PyObject* subscript(ExtensionDict* self, PyObject* key) {
if (descriptor->label() != FieldDescriptor::LABEL_REPEATED &&
descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- // TODO(plabatut): consider building the class on the fly!
+ // TODO(plabatut): consider building the class on the fly!
ContainerBase* sub_message = cmessage::InternalGetSubMessage(
self->parent, descriptor);
if (sub_message == NULL) {
@@ -168,21 +168,21 @@ PyObject* subscript(ExtensionDict* self, PyObject* key) {
if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- // On the fly message class creation is needed to support the following
- // situation:
- // 1- add FileDescriptor to the pool that contains extensions of a message
- // defined by another proto file. Do not create any message classes.
- // 2- instantiate an extended message, and access the extension using
- // the field descriptor.
- // 3- the extension submessage fails to be returned, because no class has
- // been created.
- // It happens when deserializing text proto format, or when enumerating
- // fields of a deserialized message.
- CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
- cmessage::GetFactoryForMessage(self->parent),
+ // On the fly message class creation is needed to support the following
+ // situation:
+ // 1- add FileDescriptor to the pool that contains extensions of a message
+ // defined by another proto file. Do not create any message classes.
+ // 2- instantiate an extended message, and access the extension using
+ // the field descriptor.
+ // 3- the extension submessage fails to be returned, because no class has
+ // been created.
+ // It happens when deserializing text proto format, or when enumerating
+ // fields of a deserialized message.
+ CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
+ cmessage::GetFactoryForMessage(self->parent),
descriptor->message_type());
- ScopedPyObjectPtr message_class_handler(
- reinterpret_cast<PyObject*>(message_class));
+ ScopedPyObjectPtr message_class_handler(
+ reinterpret_cast<PyObject*>(message_class));
if (message_class == NULL) {
return NULL;
}
@@ -233,51 +233,51 @@ int ass_subscript(ExtensionDict* self, PyObject* key, PyObject* value) {
return 0;
}
-PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* arg) {
- char* name;
- Py_ssize_t name_size;
- if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
+PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* arg) {
+ char* name;
+ Py_ssize_t name_size;
+ if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
return NULL;
}
-
- PyDescriptorPool* pool = cmessage::GetFactoryForMessage(self->parent)->pool;
- const FieldDescriptor* message_extension =
+
+ PyDescriptorPool* pool = cmessage::GetFactoryForMessage(self->parent)->pool;
+ const FieldDescriptor* message_extension =
pool->pool->FindExtensionByName(StringParam(name, name_size));
- if (message_extension == NULL) {
- // Is is the name of a message set extension?
+ if (message_extension == NULL) {
+ // Is is the name of a message set extension?
const Descriptor* message_descriptor =
pool->pool->FindMessageTypeByName(StringParam(name, name_size));
- if (message_descriptor && message_descriptor->extension_count() > 0) {
- const FieldDescriptor* extension = message_descriptor->extension(0);
- if (extension->is_extension() &&
- extension->containing_type()->options().message_set_wire_format() &&
- extension->type() == FieldDescriptor::TYPE_MESSAGE &&
- extension->label() == FieldDescriptor::LABEL_OPTIONAL) {
- message_extension = extension;
- }
- }
- }
- if (message_extension == NULL) {
+ if (message_descriptor && message_descriptor->extension_count() > 0) {
+ const FieldDescriptor* extension = message_descriptor->extension(0);
+ if (extension->is_extension() &&
+ extension->containing_type()->options().message_set_wire_format() &&
+ extension->type() == FieldDescriptor::TYPE_MESSAGE &&
+ extension->label() == FieldDescriptor::LABEL_OPTIONAL) {
+ message_extension = extension;
+ }
+ }
+ }
+ if (message_extension == NULL) {
Py_RETURN_NONE;
}
-
- return PyFieldDescriptor_FromDescriptor(message_extension);
+
+ return PyFieldDescriptor_FromDescriptor(message_extension);
}
-PyObject* _FindExtensionByNumber(ExtensionDict* self, PyObject* arg) {
+PyObject* _FindExtensionByNumber(ExtensionDict* self, PyObject* arg) {
int64_t number = PyLong_AsLong(arg);
- if (number == -1 && PyErr_Occurred()) {
+ if (number == -1 && PyErr_Occurred()) {
return NULL;
}
-
- PyDescriptorPool* pool = cmessage::GetFactoryForMessage(self->parent)->pool;
- const FieldDescriptor* message_extension = pool->pool->FindExtensionByNumber(
- self->parent->message->GetDescriptor(), number);
- if (message_extension == NULL) {
+
+ PyDescriptorPool* pool = cmessage::GetFactoryForMessage(self->parent)->pool;
+ const FieldDescriptor* message_extension = pool->pool->FindExtensionByNumber(
+ self->parent->message->GetDescriptor(), number);
+ if (message_extension == NULL) {
Py_RETURN_NONE;
}
-
- return PyFieldDescriptor_FromDescriptor(message_extension);
+
+ return PyFieldDescriptor_FromDescriptor(message_extension);
}
static int Contains(PyObject* _self, PyObject* key) {
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.cc
index 33234f6917..e7a9cca23b 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.cc
@@ -329,15 +329,15 @@ PyObject* Clear(PyObject* _self) {
Py_RETURN_NONE;
}
-PyObject* GetEntryClass(PyObject* _self) {
- MapContainer* self = GetMap(_self);
- CMessageClass* message_class = message_factory::GetMessageClass(
- cmessage::GetFactoryForMessage(self->parent),
- self->parent_field_descriptor->message_type());
- Py_XINCREF(message_class);
- return reinterpret_cast<PyObject*>(message_class);
-}
-
+PyObject* GetEntryClass(PyObject* _self) {
+ MapContainer* self = GetMap(_self);
+ CMessageClass* message_class = message_factory::GetMessageClass(
+ cmessage::GetFactoryForMessage(self->parent),
+ self->parent_field_descriptor->message_type());
+ Py_XINCREF(message_class);
+ return reinterpret_cast<PyObject*>(message_class);
+}
+
PyObject* MapReflectionFriend::MergeFrom(PyObject* _self, PyObject* arg) {
MapContainer* self = GetMap(_self);
if (!PyObject_TypeCheck(arg, ScalarMapContainer_Type) &&
@@ -550,7 +550,7 @@ static PyMethodDef ScalarMapMethods[] = {
{NULL, NULL},
};
-PyTypeObject *ScalarMapContainer_Type;
+PyTypeObject *ScalarMapContainer_Type;
#if PY_MAJOR_VERSION >= 3
static PyType_Slot ScalarMapContainer_Type_slots[] = {
{Py_tp_dealloc, (void *)ScalarMapDealloc},
@@ -577,7 +577,7 @@ PyTypeObject *ScalarMapContainer_Type;
MapReflectionFriend::ScalarMapSetItem, // mp_ass_subscript
};
- PyTypeObject _ScalarMapContainer_Type = {
+ PyTypeObject _ScalarMapContainer_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
FULL_MODULE_NAME ".ScalarMapContainer", // tp_name
sizeof(MapContainer), // tp_basicsize
@@ -639,7 +639,7 @@ MessageMapContainer* NewMessageMapContainer(
return NULL;
}
- PyObject* obj = PyType_GenericAlloc(MessageMapContainer_Type, 0);
+ PyObject* obj = PyType_GenericAlloc(MessageMapContainer_Type, 0);
if (obj == NULL) {
PyErr_SetString(PyExc_RuntimeError, "Could not allocate new container.");
return NULL;
@@ -681,12 +681,12 @@ int MapReflectionFriend::MessageMapSetItem(PyObject* _self, PyObject* key,
}
// Delete key from map.
- if (reflection->ContainsMapKey(*message, self->parent_field_descriptor,
+ if (reflection->ContainsMapKey(*message, self->parent_field_descriptor,
map_key)) {
- // Delete key from CMessage dict.
- MapValueRef value;
- reflection->InsertOrLookupMapValue(message, self->parent_field_descriptor,
- map_key, &value);
+ // Delete key from CMessage dict.
+ MapValueRef value;
+ reflection->InsertOrLookupMapValue(message, self->parent_field_descriptor,
+ map_key, &value);
Message* sub_message = value.MutableMessageValue();
// If there is a living weak reference to an item, we "Release" it,
// otherwise we just discard the C++ value.
@@ -696,10 +696,10 @@ int MapReflectionFriend::MessageMapSetItem(PyObject* _self, PyObject* key,
released->message = msg->New();
msg->GetReflection()->Swap(msg, released->message);
}
-
- // Delete key from map.
- reflection->DeleteMapValue(message, self->parent_field_descriptor,
- map_key);
+
+ // Delete key from map.
+ reflection->DeleteMapValue(message, self->parent_field_descriptor,
+ map_key);
return 0;
} else {
PyErr_Format(PyExc_KeyError, "Key not present in map");
@@ -819,7 +819,7 @@ static PyMethodDef MessageMapMethods[] = {
{NULL, NULL},
};
-PyTypeObject *MessageMapContainer_Type;
+PyTypeObject *MessageMapContainer_Type;
#if PY_MAJOR_VERSION >= 3
static PyType_Slot MessageMapContainer_Type_slots[] = {
{Py_tp_dealloc, (void *)MessageMapDealloc},
@@ -846,7 +846,7 @@ PyTypeObject *MessageMapContainer_Type;
MapReflectionFriend::MessageMapSetItem, // mp_ass_subscript
};
- PyTypeObject _MessageMapContainer_Type = {
+ PyTypeObject _MessageMapContainer_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
FULL_MODULE_NAME ".MessageMapContainer", // tp_name
sizeof(MessageMapContainer), // tp_basicsize
@@ -999,61 +999,61 @@ PyTypeObject MapIterator_Type = {
0, // tp_init
};
-bool InitMapContainers() {
- // ScalarMapContainer_Type derives from our MutableMapping type.
- ScopedPyObjectPtr containers(PyImport_ImportModule(
- "google.protobuf.internal.containers"));
- if (containers == NULL) {
- return false;
- }
-
- ScopedPyObjectPtr mutable_mapping(
- PyObject_GetAttrString(containers.get(), "MutableMapping"));
- if (mutable_mapping == NULL) {
- return false;
- }
-
+bool InitMapContainers() {
+ // ScalarMapContainer_Type derives from our MutableMapping type.
+ ScopedPyObjectPtr containers(PyImport_ImportModule(
+ "google.protobuf.internal.containers"));
+ if (containers == NULL) {
+ return false;
+ }
+
+ ScopedPyObjectPtr mutable_mapping(
+ PyObject_GetAttrString(containers.get(), "MutableMapping"));
+ if (mutable_mapping == NULL) {
+ return false;
+ }
+
Py_INCREF(mutable_mapping.get());
#if PY_MAJOR_VERSION >= 3
ScopedPyObjectPtr bases(PyTuple_Pack(1, mutable_mapping.get()));
if (bases == NULL) {
- return false;
- }
-
- ScalarMapContainer_Type = reinterpret_cast<PyTypeObject*>(
+ return false;
+ }
+
+ ScalarMapContainer_Type = reinterpret_cast<PyTypeObject*>(
PyType_FromSpecWithBases(&ScalarMapContainer_Type_spec, bases.get()));
-#else
- _ScalarMapContainer_Type.tp_base =
- reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
-
- if (PyType_Ready(&_ScalarMapContainer_Type) < 0) {
- return false;
- }
-
- ScalarMapContainer_Type = &_ScalarMapContainer_Type;
-#endif
-
- if (PyType_Ready(&MapIterator_Type) < 0) {
- return false;
- }
-
-#if PY_MAJOR_VERSION >= 3
- MessageMapContainer_Type = reinterpret_cast<PyTypeObject*>(
+#else
+ _ScalarMapContainer_Type.tp_base =
+ reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
+
+ if (PyType_Ready(&_ScalarMapContainer_Type) < 0) {
+ return false;
+ }
+
+ ScalarMapContainer_Type = &_ScalarMapContainer_Type;
+#endif
+
+ if (PyType_Ready(&MapIterator_Type) < 0) {
+ return false;
+ }
+
+#if PY_MAJOR_VERSION >= 3
+ MessageMapContainer_Type = reinterpret_cast<PyTypeObject*>(
PyType_FromSpecWithBases(&MessageMapContainer_Type_spec, bases.get()));
-#else
- Py_INCREF(mutable_mapping.get());
- _MessageMapContainer_Type.tp_base =
- reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
-
- if (PyType_Ready(&_MessageMapContainer_Type) < 0) {
- return false;
- }
-
- MessageMapContainer_Type = &_MessageMapContainer_Type;
-#endif
- return true;
-}
-
+#else
+ Py_INCREF(mutable_mapping.get());
+ _MessageMapContainer_Type.tp_base =
+ reinterpret_cast<PyTypeObject*>(mutable_mapping.get());
+
+ if (PyType_Ready(&_MessageMapContainer_Type) < 0) {
+ return false;
+ }
+
+ MessageMapContainer_Type = &_MessageMapContainer_Type;
+#endif
+ return true;
+}
+
} // namespace python
} // namespace protobuf
} // namespace google
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.h b/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.h
index d5706034c5..842602e79f 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/map_container.h
@@ -65,10 +65,10 @@ struct MessageMapContainer : public MapContainer {
CMessageClass* message_class;
};
-bool InitMapContainers();
+bool InitMapContainers();
-extern PyTypeObject* MessageMapContainer_Type;
-extern PyTypeObject* ScalarMapContainer_Type;
+extern PyTypeObject* MessageMapContainer_Type;
+extern PyTypeObject* ScalarMapContainer_Type;
extern PyTypeObject MapIterator_Type; // Both map types use the same iterator.
// Builds a MapContainer object, from a parent message and a
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/message.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/message.cc
index 00dd57307e..8b41ca47dd 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/message.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/message.cc
@@ -239,12 +239,12 @@ static PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
return nullptr;
}
- // Messages have no __dict__
- ScopedPyObjectPtr slots(PyTuple_New(0));
- if (PyDict_SetItemString(dict, "__slots__", slots.get()) < 0) {
- return NULL;
- }
-
+ // Messages have no __dict__
+ ScopedPyObjectPtr slots(PyTuple_New(0));
+ if (PyDict_SetItemString(dict, "__slots__", slots.get()) < 0) {
+ return NULL;
+ }
+
// Build the arguments to the base metaclass.
// We change the __bases__ classes.
ScopedPyObjectPtr new_args;
@@ -296,19 +296,19 @@ static PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
newtype->message_descriptor = descriptor;
// TODO(amauryfa): Don't always use the canonical pool of the descriptor,
// use the MessageFactory optionally passed in the class dict.
- PyDescriptorPool* py_descriptor_pool =
- GetDescriptorPool_FromPool(descriptor->file()->pool());
- if (py_descriptor_pool == NULL) {
+ PyDescriptorPool* py_descriptor_pool =
+ GetDescriptorPool_FromPool(descriptor->file()->pool());
+ if (py_descriptor_pool == NULL) {
return NULL;
}
- newtype->py_message_factory = py_descriptor_pool->py_message_factory;
- Py_INCREF(newtype->py_message_factory);
+ newtype->py_message_factory = py_descriptor_pool->py_message_factory;
+ Py_INCREF(newtype->py_message_factory);
- // Register the message in the MessageFactory.
- // TODO(amauryfa): Move this call to MessageFactory.GetPrototype() when the
- // MessageFactory is fully implemented in C++.
- if (message_factory::RegisterMessageClass(newtype->py_message_factory,
- descriptor, newtype) < 0) {
+ // Register the message in the MessageFactory.
+ // TODO(amauryfa): Move this call to MessageFactory.GetPrototype() when the
+ // MessageFactory is fully implemented in C++.
+ if (message_factory::RegisterMessageClass(newtype->py_message_factory,
+ descriptor, newtype) < 0) {
return NULL;
}
@@ -321,8 +321,8 @@ static PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
static void Dealloc(PyObject* pself) {
CMessageClass* self = reinterpret_cast<CMessageClass*>(pself);
- Py_XDECREF(self->py_message_descriptor);
- Py_XDECREF(self->py_message_factory);
+ Py_XDECREF(self->py_message_descriptor);
+ Py_XDECREF(self->py_message_factory);
return PyType_Type.tp_dealloc(pself);
}
@@ -365,9 +365,9 @@ static int InsertEmptyWeakref(PyTypeObject *base_type) {
#endif // PY_MAJOR_VERSION >= 3
}
-// The _extensions_by_name dictionary is built on every access.
-// TODO(amauryfa): Migrate all users to pool.FindAllExtensions()
-static PyObject* GetExtensionsByName(CMessageClass *self, void *closure) {
+// The _extensions_by_name dictionary is built on every access.
+// TODO(amauryfa): Migrate all users to pool.FindAllExtensions()
+static PyObject* GetExtensionsByName(CMessageClass *self, void *closure) {
if (self->message_descriptor == NULL) {
// This is the base Message object, simply raise AttributeError.
PyErr_SetString(PyExc_AttributeError,
@@ -375,29 +375,29 @@ static PyObject* GetExtensionsByName(CMessageClass *self, void *closure) {
return NULL;
}
- const PyDescriptorPool* pool = self->py_message_factory->pool;
-
- std::vector<const FieldDescriptor*> extensions;
- pool->pool->FindAllExtensions(self->message_descriptor, &extensions);
-
- ScopedPyObjectPtr result(PyDict_New());
- for (int i = 0; i < extensions.size(); i++) {
- ScopedPyObjectPtr extension(
- PyFieldDescriptor_FromDescriptor(extensions[i]));
- if (extension == NULL) {
- return NULL;
- }
- if (PyDict_SetItemString(result.get(), extensions[i]->full_name().c_str(),
- extension.get()) < 0) {
- return NULL;
- }
- }
- return result.release();
-}
-
-// The _extensions_by_number dictionary is built on every access.
-// TODO(amauryfa): Migrate all users to pool.FindExtensionByNumber()
-static PyObject* GetExtensionsByNumber(CMessageClass *self, void *closure) {
+ const PyDescriptorPool* pool = self->py_message_factory->pool;
+
+ std::vector<const FieldDescriptor*> extensions;
+ pool->pool->FindAllExtensions(self->message_descriptor, &extensions);
+
+ ScopedPyObjectPtr result(PyDict_New());
+ for (int i = 0; i < extensions.size(); i++) {
+ ScopedPyObjectPtr extension(
+ PyFieldDescriptor_FromDescriptor(extensions[i]));
+ if (extension == NULL) {
+ return NULL;
+ }
+ if (PyDict_SetItemString(result.get(), extensions[i]->full_name().c_str(),
+ extension.get()) < 0) {
+ return NULL;
+ }
+ }
+ return result.release();
+}
+
+// The _extensions_by_number dictionary is built on every access.
+// TODO(amauryfa): Migrate all users to pool.FindExtensionByNumber()
+static PyObject* GetExtensionsByNumber(CMessageClass *self, void *closure) {
if (self->message_descriptor == NULL) {
// This is the base Message object, simply raise AttributeError.
PyErr_SetString(PyExc_AttributeError,
@@ -405,35 +405,35 @@ static PyObject* GetExtensionsByNumber(CMessageClass *self, void *closure) {
return NULL;
}
- const PyDescriptorPool* pool = self->py_message_factory->pool;
-
- std::vector<const FieldDescriptor*> extensions;
- pool->pool->FindAllExtensions(self->message_descriptor, &extensions);
-
- ScopedPyObjectPtr result(PyDict_New());
- for (int i = 0; i < extensions.size(); i++) {
- ScopedPyObjectPtr extension(
- PyFieldDescriptor_FromDescriptor(extensions[i]));
- if (extension == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr number(PyInt_FromLong(extensions[i]->number()));
- if (number == NULL) {
- return NULL;
- }
- if (PyDict_SetItem(result.get(), number.get(), extension.get()) < 0) {
- return NULL;
- }
- }
- return result.release();
-}
-
-static PyGetSetDef Getters[] = {
- {"_extensions_by_name", (getter)GetExtensionsByName, NULL},
- {"_extensions_by_number", (getter)GetExtensionsByNumber, NULL},
- {NULL}
-};
-
+ const PyDescriptorPool* pool = self->py_message_factory->pool;
+
+ std::vector<const FieldDescriptor*> extensions;
+ pool->pool->FindAllExtensions(self->message_descriptor, &extensions);
+
+ ScopedPyObjectPtr result(PyDict_New());
+ for (int i = 0; i < extensions.size(); i++) {
+ ScopedPyObjectPtr extension(
+ PyFieldDescriptor_FromDescriptor(extensions[i]));
+ if (extension == NULL) {
+ return NULL;
+ }
+ ScopedPyObjectPtr number(PyInt_FromLong(extensions[i]->number()));
+ if (number == NULL) {
+ return NULL;
+ }
+ if (PyDict_SetItem(result.get(), number.get(), extension.get()) < 0) {
+ return NULL;
+ }
+ }
+ return result.release();
+}
+
+static PyGetSetDef Getters[] = {
+ {"_extensions_by_name", (getter)GetExtensionsByName, NULL},
+ {"_extensions_by_number", (getter)GetExtensionsByNumber, NULL},
+ {NULL}
+};
+
// Compute some class attributes on the fly:
// - All the _FIELD_NUMBER attributes, for all fields and nested extensions.
// Returns a new reference, or NULL with an exception set.
@@ -566,122 +566,122 @@ void FormatTypeError(PyObject* arg, const char* expected_types) {
}
}
-void OutOfRangeError(PyObject* arg) {
- PyObject *s = PyObject_Str(arg);
- if (s) {
- PyErr_Format(PyExc_ValueError,
- "Value out of range: %s",
- PyString_AsString(s));
- Py_DECREF(s);
- }
-}
-
-template<class RangeType, class ValueType>
-bool VerifyIntegerCastAndRange(PyObject* arg, ValueType value) {
+void OutOfRangeError(PyObject* arg) {
+ PyObject *s = PyObject_Str(arg);
+ if (s) {
+ PyErr_Format(PyExc_ValueError,
+ "Value out of range: %s",
+ PyString_AsString(s));
+ Py_DECREF(s);
+ }
+}
+
+template<class RangeType, class ValueType>
+bool VerifyIntegerCastAndRange(PyObject* arg, ValueType value) {
if (PROTOBUF_PREDICT_FALSE(value == -1 && PyErr_Occurred())) {
- if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
- // Replace it with the same ValueError as pure python protos instead of
- // the default one.
- PyErr_Clear();
- OutOfRangeError(arg);
- } // Otherwise propagate existing error.
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ // Replace it with the same ValueError as pure python protos instead of
+ // the default one.
+ PyErr_Clear();
+ OutOfRangeError(arg);
+ } // Otherwise propagate existing error.
return false;
}
if (PROTOBUF_PREDICT_FALSE(!IsValidNumericCast<RangeType>(value))) {
- OutOfRangeError(arg);
+ OutOfRangeError(arg);
return false;
}
- return true;
-}
-
+ return true;
+}
+
template <class T>
-bool CheckAndGetInteger(PyObject* arg, T* value) {
- // The fast path.
-#if PY_MAJOR_VERSION < 3
- // For the typical case, offer a fast path.
+bool CheckAndGetInteger(PyObject* arg, T* value) {
+ // The fast path.
+#if PY_MAJOR_VERSION < 3
+ // For the typical case, offer a fast path.
if (PROTOBUF_PREDICT_TRUE(PyInt_Check(arg))) {
long int_result = PyInt_AsLong(arg);
if (PROTOBUF_PREDICT_TRUE(IsValidNumericCast<T>(int_result))) {
- *value = static_cast<T>(int_result);
- return true;
- } else {
- OutOfRangeError(arg);
- return false;
- }
- }
+ *value = static_cast<T>(int_result);
+ return true;
+ } else {
+ OutOfRangeError(arg);
+ return false;
+ }
+ }
#endif
- // This effectively defines an integer as "an object that can be cast as
- // an integer and can be used as an ordinal number".
- // This definition includes everything that implements numbers.Integral
- // and shouldn't cast the net too wide.
+ // This effectively defines an integer as "an object that can be cast as
+ // an integer and can be used as an ordinal number".
+ // This definition includes everything that implements numbers.Integral
+ // and shouldn't cast the net too wide.
if (PROTOBUF_PREDICT_FALSE(!PyIndex_Check(arg))) {
- FormatTypeError(arg, "int, long");
+ FormatTypeError(arg, "int, long");
return false;
}
-
- // Now we have an integral number so we can safely use PyLong_ functions.
- // We need to treat the signed and unsigned cases differently in case arg is
- // holding a value above the maximum for signed longs.
- if (std::numeric_limits<T>::min() == 0) {
- // Unsigned case.
- unsigned PY_LONG_LONG ulong_result;
- if (PyLong_Check(arg)) {
- ulong_result = PyLong_AsUnsignedLongLong(arg);
+
+ // Now we have an integral number so we can safely use PyLong_ functions.
+ // We need to treat the signed and unsigned cases differently in case arg is
+ // holding a value above the maximum for signed longs.
+ if (std::numeric_limits<T>::min() == 0) {
+ // Unsigned case.
+ unsigned PY_LONG_LONG ulong_result;
+ if (PyLong_Check(arg)) {
+ ulong_result = PyLong_AsUnsignedLongLong(arg);
} else {
- // Unlike PyLong_AsLongLong, PyLong_AsUnsignedLongLong is very
- // picky about the exact type.
- PyObject* casted = PyNumber_Long(arg);
+ // Unlike PyLong_AsLongLong, PyLong_AsUnsignedLongLong is very
+ // picky about the exact type.
+ PyObject* casted = PyNumber_Long(arg);
if (PROTOBUF_PREDICT_FALSE(casted == nullptr)) {
- // Propagate existing error.
- return false;
- }
- ulong_result = PyLong_AsUnsignedLongLong(casted);
- Py_DECREF(casted);
- }
- if (VerifyIntegerCastAndRange<T, unsigned PY_LONG_LONG>(arg,
- ulong_result)) {
- *value = static_cast<T>(ulong_result);
- } else {
- return false;
- }
- } else {
- // Signed case.
- PY_LONG_LONG long_result;
- PyNumberMethods *nb;
- if ((nb = arg->ob_type->tp_as_number) != NULL && nb->nb_int != NULL) {
- // PyLong_AsLongLong requires it to be a long or to have an __int__()
- // method.
- long_result = PyLong_AsLongLong(arg);
- } else {
- // Valid subclasses of numbers.Integral should have a __long__() method
- // so fall back to that.
- PyObject* casted = PyNumber_Long(arg);
+ // Propagate existing error.
+ return false;
+ }
+ ulong_result = PyLong_AsUnsignedLongLong(casted);
+ Py_DECREF(casted);
+ }
+ if (VerifyIntegerCastAndRange<T, unsigned PY_LONG_LONG>(arg,
+ ulong_result)) {
+ *value = static_cast<T>(ulong_result);
+ } else {
+ return false;
+ }
+ } else {
+ // Signed case.
+ PY_LONG_LONG long_result;
+ PyNumberMethods *nb;
+ if ((nb = arg->ob_type->tp_as_number) != NULL && nb->nb_int != NULL) {
+ // PyLong_AsLongLong requires it to be a long or to have an __int__()
+ // method.
+ long_result = PyLong_AsLongLong(arg);
+ } else {
+ // Valid subclasses of numbers.Integral should have a __long__() method
+ // so fall back to that.
+ PyObject* casted = PyNumber_Long(arg);
if (PROTOBUF_PREDICT_FALSE(casted == nullptr)) {
- // Propagate existing error.
- return false;
- }
- long_result = PyLong_AsLongLong(casted);
- Py_DECREF(casted);
- }
- if (VerifyIntegerCastAndRange<T, PY_LONG_LONG>(arg, long_result)) {
- *value = static_cast<T>(long_result);
- } else {
- return false;
- }
- }
-
+ // Propagate existing error.
+ return false;
+ }
+ long_result = PyLong_AsLongLong(casted);
+ Py_DECREF(casted);
+ }
+ if (VerifyIntegerCastAndRange<T, PY_LONG_LONG>(arg, long_result)) {
+ *value = static_cast<T>(long_result);
+ } else {
+ return false;
+ }
+ }
+
return true;
}
// These are referenced by repeated_scalar_container, and must
// be explicitly instantiated.
-template bool CheckAndGetInteger<int32>(PyObject*, int32*);
-template bool CheckAndGetInteger<int64>(PyObject*, int64*);
-template bool CheckAndGetInteger<uint32>(PyObject*, uint32*);
-template bool CheckAndGetInteger<uint64>(PyObject*, uint64*);
+template bool CheckAndGetInteger<int32>(PyObject*, int32*);
+template bool CheckAndGetInteger<int64>(PyObject*, int64*);
+template bool CheckAndGetInteger<uint32>(PyObject*, uint32*);
+template bool CheckAndGetInteger<uint64>(PyObject*, uint64*);
bool CheckAndGetDouble(PyObject* arg, double* value) {
- *value = PyFloat_AsDouble(arg);
+ *value = PyFloat_AsDouble(arg);
if (PROTOBUF_PREDICT_FALSE(*value == -1 && PyErr_Occurred())) {
FormatTypeError(arg, "int, long, float");
return false;
@@ -699,13 +699,13 @@ bool CheckAndGetFloat(PyObject* arg, float* value) {
}
bool CheckAndGetBool(PyObject* arg, bool* value) {
- long long_value = PyInt_AsLong(arg);
- if (long_value == -1 && PyErr_Occurred()) {
+ long long_value = PyInt_AsLong(arg);
+ if (long_value == -1 && PyErr_Occurred()) {
FormatTypeError(arg, "int, long, bool");
return false;
}
- *value = static_cast<bool>(long_value);
-
+ *value = static_cast<bool>(long_value);
+
return true;
}
@@ -735,43 +735,43 @@ bool AllowInvalidUTF8(const FieldDescriptor* field) { return false; }
PyObject* CheckString(PyObject* arg, const FieldDescriptor* descriptor) {
GOOGLE_DCHECK(descriptor->type() == FieldDescriptor::TYPE_STRING ||
descriptor->type() == FieldDescriptor::TYPE_BYTES);
- if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
- if (!PyBytes_Check(arg) && !PyUnicode_Check(arg)) {
- FormatTypeError(arg, "bytes, unicode");
- return NULL;
- }
+ if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
+ if (!PyBytes_Check(arg) && !PyUnicode_Check(arg)) {
+ FormatTypeError(arg, "bytes, unicode");
+ return NULL;
+ }
if (!IsValidUTF8(arg) && !AllowInvalidUTF8(descriptor)) {
- PyObject* repr = PyObject_Repr(arg);
- PyErr_Format(PyExc_ValueError,
- "%s has type str, but isn't valid UTF-8 "
- "encoding. Non-UTF-8 strings must be converted to "
- "unicode objects before being added.",
- PyString_AsString(repr));
- Py_DECREF(repr);
- return NULL;
- }
- } else if (!PyBytes_Check(arg)) {
+ PyObject* repr = PyObject_Repr(arg);
+ PyErr_Format(PyExc_ValueError,
+ "%s has type str, but isn't valid UTF-8 "
+ "encoding. Non-UTF-8 strings must be converted to "
+ "unicode objects before being added.",
+ PyString_AsString(repr));
+ Py_DECREF(repr);
+ return NULL;
+ }
+ } else if (!PyBytes_Check(arg)) {
FormatTypeError(arg, "bytes");
return NULL;
}
- PyObject* encoded_string = NULL;
- if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
- if (PyBytes_Check(arg)) {
- // The bytes were already validated as correctly encoded UTF-8 above.
- encoded_string = arg; // Already encoded.
- Py_INCREF(encoded_string);
- } else {
- encoded_string = PyUnicode_AsEncodedString(arg, "utf-8", NULL);
- }
- } else {
- // In this case field type is "bytes".
- encoded_string = arg;
- Py_INCREF(encoded_string);
- }
+ PyObject* encoded_string = NULL;
+ if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
+ if (PyBytes_Check(arg)) {
+ // The bytes were already validated as correctly encoded UTF-8 above.
+ encoded_string = arg; // Already encoded.
+ Py_INCREF(encoded_string);
+ } else {
+ encoded_string = PyUnicode_AsEncodedString(arg, "utf-8", NULL);
+ }
+ } else {
+ // In this case field type is "bytes".
+ encoded_string = arg;
+ Py_INCREF(encoded_string);
+ }
- return encoded_string;
+ return encoded_string;
}
bool CheckAndSetString(
@@ -839,9 +839,9 @@ bool CheckFieldBelongsToMessage(const FieldDescriptor* field_descriptor,
namespace cmessage {
-PyMessageFactory* GetFactoryForMessage(CMessage* message) {
+PyMessageFactory* GetFactoryForMessage(CMessage* message) {
GOOGLE_DCHECK(PyObject_TypeCheck(message, CMessage_Type));
- return reinterpret_cast<CMessageClass*>(Py_TYPE(message))->py_message_factory;
+ return reinterpret_cast<CMessageClass*>(Py_TYPE(message))->py_message_factory;
}
static int MaybeReleaseOverlappingOneofField(
@@ -996,16 +996,16 @@ int DeleteRepeatedField(
int min, max;
length = reflection->FieldSize(*message, field_descriptor);
- if (PySlice_Check(slice)) {
+ if (PySlice_Check(slice)) {
from = to = step = slice_length = 0;
-#if PY_MAJOR_VERSION < 3
+#if PY_MAJOR_VERSION < 3
PySlice_GetIndicesEx(
reinterpret_cast<PySliceObject*>(slice),
- length, &from, &to, &step, &slice_length);
+ length, &from, &to, &step, &slice_length);
#else
- PySlice_GetIndicesEx(
+ PySlice_GetIndicesEx(
slice,
- length, &from, &to, &step, &slice_length);
+ length, &from, &to, &step, &slice_length);
#endif
if (from < to) {
min = from;
@@ -1015,23 +1015,23 @@ int DeleteRepeatedField(
max = from;
}
} else {
- from = to = PyLong_AsLong(slice);
- if (from == -1 && PyErr_Occurred()) {
- PyErr_SetString(PyExc_TypeError, "list indices must be integers");
- return -1;
- }
-
- if (from < 0) {
- from = to = length + from;
- }
- step = 1;
- min = max = from;
-
- // Range check.
- if (from < 0 || from >= length) {
- PyErr_Format(PyExc_IndexError, "list assignment index out of range");
- return -1;
- }
+ from = to = PyLong_AsLong(slice);
+ if (from == -1 && PyErr_Occurred()) {
+ PyErr_SetString(PyExc_TypeError, "list indices must be integers");
+ return -1;
+ }
+
+ if (from < 0) {
+ from = to = length + from;
+ }
+ step = 1;
+ min = max = from;
+
+ // Range check.
+ if (from < 0 || from >= length) {
+ PyErr_Format(PyExc_IndexError, "list assignment index out of range");
+ return -1;
+ }
}
Py_ssize_t i = from;
@@ -1214,8 +1214,8 @@ int InitAttributes(CMessage* self, PyObject* args, PyObject* kwargs) {
}
CMessage* cmessage = reinterpret_cast<CMessage*>(message.get());
if (PyDict_Check(value)) {
- // Make the message exist even if the dict is empty.
- AssureWritable(cmessage);
+ // Make the message exist even if the dict is empty.
+ AssureWritable(cmessage);
if (InitAttributes(cmessage, NULL, value) < 0) {
return -1;
}
@@ -1313,9 +1313,9 @@ static int Init(CMessage* self, PyObject* args, PyObject* kwargs) {
// Deallocating a CMessage
static void Dealloc(CMessage* self) {
- if (self->weakreflist) {
- PyObject_ClearWeakRefs(reinterpret_cast<PyObject*>(self));
- }
+ if (self->weakreflist) {
+ PyObject_ClearWeakRefs(reinterpret_cast<PyObject*>(self));
+ }
// At this point all dependent objects have been removed.
GOOGLE_DCHECK(!self->child_submessages || self->child_submessages->empty());
GOOGLE_DCHECK(!self->composite_fields || self->composite_fields->empty());
@@ -1442,7 +1442,7 @@ PyObject* HasField(CMessage* self, PyObject* arg) {
return NULL;
}
#else
- field_name = const_cast<char*>(PyUnicode_AsUTF8AndSize(arg, &size));
+ field_name = const_cast<char*>(PyUnicode_AsUTF8AndSize(arg, &size));
if (!field_name) {
return NULL;
}
@@ -1474,10 +1474,10 @@ PyObject* HasField(CMessage* self, PyObject* arg) {
}
PyObject* ClearExtension(CMessage* self, PyObject* extension) {
- const FieldDescriptor* descriptor = GetExtensionDescriptor(extension);
- if (descriptor == NULL) {
- return NULL;
- }
+ const FieldDescriptor* descriptor = GetExtensionDescriptor(extension);
+ if (descriptor == NULL) {
+ return NULL;
+ }
if (ClearFieldByDescriptor(self, descriptor) < 0) {
return nullptr;
}
@@ -1614,15 +1614,15 @@ int InternalReleaseFieldByDescriptor(
int ClearFieldByDescriptor(CMessage* self,
const FieldDescriptor* field_descriptor) {
- if (!CheckFieldBelongsToMessage(field_descriptor, self->message)) {
+ if (!CheckFieldBelongsToMessage(field_descriptor, self->message)) {
return -1;
}
if (InternalReleaseFieldByDescriptor(self, field_descriptor) < 0) {
return -1;
}
AssureWritable(self);
- Message* message = self->message;
- message->GetReflection()->ClearField(message, field_descriptor);
+ Message* message = self->message;
+ message->GetReflection()->ClearField(message, field_descriptor);
return 0;
}
@@ -1691,25 +1691,25 @@ static TProtoStringType GetMessageName(CMessage* self) {
}
}
-static PyObject* InternalSerializeToString(
- CMessage* self, PyObject* args, PyObject* kwargs,
- bool require_initialized) {
- // Parse the "deterministic" kwarg; defaults to False.
+static PyObject* InternalSerializeToString(
+ CMessage* self, PyObject* args, PyObject* kwargs,
+ bool require_initialized) {
+ // Parse the "deterministic" kwarg; defaults to False.
static const char* kwlist[] = {"deterministic", 0};
- PyObject* deterministic_obj = Py_None;
+ PyObject* deterministic_obj = Py_None;
if (!PyArg_ParseTupleAndKeywords(
args, kwargs, "|O", const_cast<char**>(kwlist), &deterministic_obj)) {
- return NULL;
- }
- // Preemptively convert to a bool first, so we don't need to back out of
- // allocating memory if this raises an exception.
- // NOTE: This is unused later if deterministic == Py_None, but that's fine.
- int deterministic = PyObject_IsTrue(deterministic_obj);
- if (deterministic < 0) {
- return NULL;
- }
-
- if (require_initialized && !self->message->IsInitialized()) {
+ return NULL;
+ }
+ // Preemptively convert to a bool first, so we don't need to back out of
+ // allocating memory if this raises an exception.
+ // NOTE: This is unused later if deterministic == Py_None, but that's fine.
+ int deterministic = PyObject_IsTrue(deterministic_obj);
+ if (deterministic < 0) {
+ return NULL;
+ }
+
+ if (require_initialized && !self->message->IsInitialized()) {
ScopedPyObjectPtr errors(FindInitializationErrors(self));
if (errors == NULL) {
return NULL;
@@ -1747,10 +1747,10 @@ static PyObject* InternalSerializeToString(
GetMessageName(self).c_str(), PyString_AsString(joined.get()));
return NULL;
}
-
- // Ok, arguments parsed and errors checked, now encode to a string
- const size_t size = self->message->ByteSizeLong();
- if (size == 0) {
+
+ // Ok, arguments parsed and errors checked, now encode to a string
+ const size_t size = self->message->ByteSizeLong();
+ if (size == 0) {
return PyBytes_FromString("");
}
@@ -1766,28 +1766,28 @@ static PyObject* InternalSerializeToString(
if (result == NULL) {
return NULL;
}
- io::ArrayOutputStream out(PyBytes_AS_STRING(result), size);
- io::CodedOutputStream coded_out(&out);
- if (deterministic_obj != Py_None) {
- coded_out.SetSerializationDeterministic(deterministic);
- }
- self->message->SerializeWithCachedSizes(&coded_out);
- GOOGLE_CHECK(!coded_out.HadError());
+ io::ArrayOutputStream out(PyBytes_AS_STRING(result), size);
+ io::CodedOutputStream coded_out(&out);
+ if (deterministic_obj != Py_None) {
+ coded_out.SetSerializationDeterministic(deterministic);
+ }
+ self->message->SerializeWithCachedSizes(&coded_out);
+ GOOGLE_CHECK(!coded_out.HadError());
return result;
}
-static PyObject* SerializeToString(
- CMessage* self, PyObject* args, PyObject* kwargs) {
- return InternalSerializeToString(self, args, kwargs,
- /*require_initialized=*/true);
+static PyObject* SerializeToString(
+ CMessage* self, PyObject* args, PyObject* kwargs) {
+ return InternalSerializeToString(self, args, kwargs,
+ /*require_initialized=*/true);
+}
+
+static PyObject* SerializePartialToString(
+ CMessage* self, PyObject* args, PyObject* kwargs) {
+ return InternalSerializeToString(self, args, kwargs,
+ /*require_initialized=*/false);
}
-static PyObject* SerializePartialToString(
- CMessage* self, PyObject* args, PyObject* kwargs) {
- return InternalSerializeToString(self, args, kwargs,
- /*require_initialized=*/false);
-}
-
// Formats proto fields for ascii dumps using python formatting functions where
// appropriate.
class PythonFieldValuePrinter : public TextFormat::FastFieldValuePrinter {
@@ -1920,11 +1920,11 @@ static PyObject* CopyFrom(CMessage* self, PyObject* arg) {
// get OOM errors. The protobuf APIs do not provide any tools for processing
// protobufs in chunks. If you have protos this big you should break them up if
// it is at all convenient to do so.
-#ifdef PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
-static bool allow_oversize_protos = true;
-#else
+#ifdef PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
+static bool allow_oversize_protos = true;
+#else
static bool allow_oversize_protos = false;
-#endif
+#endif
// Provide a method in the module to set allow_oversize_protos to a boolean
// value. This method returns the newly value of allow_oversize_protos.
@@ -1950,7 +1950,7 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
AssureWritable(self);
- PyMessageFactory* factory = GetFactoryForMessage(self);
+ PyMessageFactory* factory = GetFactoryForMessage(self);
int depth = allow_oversize_protos
? INT_MAX
: io::CodedInputStream::GetDefaultRecursionLimit();
@@ -1998,27 +1998,27 @@ static PyObject* ByteSize(CMessage* self, PyObject* args) {
return PyLong_FromLong(self->message->ByteSizeLong());
}
-PyObject* RegisterExtension(PyObject* cls, PyObject* extension_handle) {
+PyObject* RegisterExtension(PyObject* cls, PyObject* extension_handle) {
const FieldDescriptor* descriptor =
GetExtensionDescriptor(extension_handle);
if (descriptor == NULL) {
return NULL;
}
if (!PyObject_TypeCheck(cls, CMessageClass_Type)) {
- PyErr_Format(PyExc_TypeError, "Expected a message class, got %s",
- cls->ob_type->tp_name);
+ PyErr_Format(PyExc_TypeError, "Expected a message class, got %s",
+ cls->ob_type->tp_name);
return NULL;
}
- CMessageClass *message_class = reinterpret_cast<CMessageClass*>(cls);
- if (message_class == NULL) {
+ CMessageClass *message_class = reinterpret_cast<CMessageClass*>(cls);
+ if (message_class == NULL) {
return NULL;
}
// If the extension was already registered, check that it is the same.
- const FieldDescriptor* existing_extension =
- message_class->py_message_factory->pool->pool->FindExtensionByNumber(
- descriptor->containing_type(), descriptor->number());
- if (existing_extension != NULL && existing_extension != descriptor) {
- PyErr_SetString(PyExc_ValueError, "Double registration of Extensions");
+ const FieldDescriptor* existing_extension =
+ message_class->py_message_factory->pool->pool->FindExtensionByNumber(
+ descriptor->containing_type(), descriptor->number());
+ if (existing_extension != NULL && existing_extension != descriptor) {
+ PyErr_SetString(PyExc_ValueError, "Double registration of Extensions");
return NULL;
}
Py_RETURN_NONE;
@@ -2086,8 +2086,8 @@ static PyObject* ListFields(CMessage* self) {
// is no message class and we cannot retrieve the value.
// TODO(amauryfa): consider building the class on the fly!
if (fields[i]->message_type() != NULL &&
- message_factory::GetMessageClass(
- GetFactoryForMessage(self),
+ message_factory::GetMessageClass(
+ GetFactoryForMessage(self),
fields[i]->message_type()) == NULL) {
PyErr_Clear();
continue;
@@ -2264,14 +2264,14 @@ PyObject* InternalGetScalar(const Message* message,
CMessage* InternalGetSubMessage(
CMessage* self, const FieldDescriptor* field_descriptor) {
const Reflection* reflection = self->message->GetReflection();
- PyMessageFactory* factory = GetFactoryForMessage(self);
+ PyMessageFactory* factory = GetFactoryForMessage(self);
const Message& sub_message = reflection->GetMessage(
- *self->message, field_descriptor, factory->message_factory);
+ *self->message, field_descriptor, factory->message_factory);
- CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
- factory, field_descriptor->message_type());
+ CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
+ factory, field_descriptor->message_type());
ScopedPyObjectPtr message_class_owner(
- reinterpret_cast<PyObject*>(message_class));
+ reinterpret_cast<PyObject*>(message_class));
if (message_class == NULL) {
return NULL;
}
@@ -2488,24 +2488,24 @@ static PyObject* UnknownFieldSet(CMessage* self) {
return self->unknown_field_set;
}
-static PyObject* GetExtensionsByName(CMessage *self, void *closure) {
- return message_meta::GetExtensionsByName(
- reinterpret_cast<CMessageClass*>(Py_TYPE(self)), closure);
-}
-
-static PyObject* GetExtensionsByNumber(CMessage *self, void *closure) {
- return message_meta::GetExtensionsByNumber(
- reinterpret_cast<CMessageClass*>(Py_TYPE(self)), closure);
-}
-
+static PyObject* GetExtensionsByName(CMessage *self, void *closure) {
+ return message_meta::GetExtensionsByName(
+ reinterpret_cast<CMessageClass*>(Py_TYPE(self)), closure);
+}
+
+static PyObject* GetExtensionsByNumber(CMessage *self, void *closure) {
+ return message_meta::GetExtensionsByNumber(
+ reinterpret_cast<CMessageClass*>(Py_TYPE(self)), closure);
+}
+
static PyGetSetDef Getters[] = {
{"Extensions", (getter)GetExtensionDict, NULL, "Extension dict"},
- {"_extensions_by_name", (getter)GetExtensionsByName, NULL},
- {"_extensions_by_number", (getter)GetExtensionsByNumber, NULL},
+ {"_extensions_by_name", (getter)GetExtensionsByName, NULL},
+ {"_extensions_by_number", (getter)GetExtensionsByNumber, NULL},
{NULL}
};
-
+
static PyMethodDef Methods[] = {
{ "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
"Makes a deep copy of the class." },
@@ -2545,10 +2545,10 @@ static PyMethodDef Methods[] = {
{ "RegisterExtension", (PyCFunction)RegisterExtension, METH_O | METH_CLASS,
"Registers an extension with the current message." },
{ "SerializePartialToString", (PyCFunction)SerializePartialToString,
- METH_VARARGS | METH_KEYWORDS,
+ METH_VARARGS | METH_KEYWORDS,
"Serializes the message to a string, even if it isn't initialized." },
- { "SerializeToString", (PyCFunction)SerializeToString,
- METH_VARARGS | METH_KEYWORDS,
+ { "SerializeToString", (PyCFunction)SerializeToString,
+ METH_VARARGS | METH_KEYWORDS,
"Serializes the message to a string, only for initialized messages." },
{ "SetInParent", (PyCFunction)SetInParent, METH_NOARGS,
"Sets the has bit of the given field in its parent message." },
@@ -2628,8 +2628,8 @@ PyObject* GetFieldValue(CMessage* self,
const Descriptor* entry_type = field_descriptor->message_type();
const FieldDescriptor* value_type = entry_type->FindFieldByName("value");
if (value_type->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- CMessageClass* value_class = message_factory::GetMessageClass(
- GetFactoryForMessage(self), value_type->message_type());
+ CMessageClass* value_class = message_factory::GetMessageClass(
+ GetFactoryForMessage(self), value_type->message_type());
if (value_class == NULL) {
return NULL;
}
@@ -2640,8 +2640,8 @@ PyObject* GetFieldValue(CMessage* self,
}
} else if (field_descriptor->is_repeated()) {
if (field_descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
- CMessageClass* message_class = message_factory::GetMessageClass(
- GetFactoryForMessage(self), field_descriptor->message_type());
+ CMessageClass* message_class = message_factory::GetMessageClass(
+ GetFactoryForMessage(self), field_descriptor->message_type());
if (message_class == NULL) {
return NULL;
}
@@ -2795,7 +2795,7 @@ static CMessageClass _CMessage_Type = { { {
0, // tp_traverse
0, // tp_clear
(richcmpfunc)cmessage::RichCompare, // tp_richcompare
- offsetof(CMessage, weakreflist), // tp_weaklistoffset
+ offsetof(CMessage, weakreflist), // tp_weaklistoffset
0, // tp_iter
0, // tp_iternext
cmessage::Methods, // tp_methods
@@ -2946,11 +2946,11 @@ bool InitProto2MessageModule(PyObject *m) {
return false;
}
- // Initialize types and globals in message_factory.cc
- if (!InitMessageFactory()) {
- return false;
- }
-
+ // Initialize types and globals in message_factory.cc
+ if (!InitMessageFactory()) {
+ return false;
+ }
+
// Initialize constants defined in this file.
InitGlobals();
@@ -3037,15 +3037,15 @@ bool InitProto2MessageModule(PyObject *m) {
reinterpret_cast<PyObject*>(&PyUnknownFieldRef_Type));
// Initialize Map container types.
- if (!InitMapContainers()) {
- return false;
- }
- PyModule_AddObject(m, "ScalarMapContainer",
- reinterpret_cast<PyObject*>(ScalarMapContainer_Type));
- PyModule_AddObject(m, "MessageMapContainer",
- reinterpret_cast<PyObject*>(MessageMapContainer_Type));
- PyModule_AddObject(m, "MapIterator",
- reinterpret_cast<PyObject*>(&MapIterator_Type));
+ if (!InitMapContainers()) {
+ return false;
+ }
+ PyModule_AddObject(m, "ScalarMapContainer",
+ reinterpret_cast<PyObject*>(ScalarMapContainer_Type));
+ PyModule_AddObject(m, "MessageMapContainer",
+ reinterpret_cast<PyObject*>(MessageMapContainer_Type));
+ PyModule_AddObject(m, "MapIterator",
+ reinterpret_cast<PyObject*>(&MapIterator_Type));
if (PyType_Ready(&ExtensionDict_Type) < 0) {
return false;
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/message.h b/contrib/python/protobuf/py2/google/protobuf/pyext/message.h
index f4dcfd2150..ca81a87521 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/message.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/message.h
@@ -56,7 +56,7 @@ class MessageFactory;
namespace python {
struct ExtensionDict;
-struct PyMessageFactory;
+struct PyMessageFactory;
struct CMessageClass;
// Most of the complexity of the Message class comes from the "Release"
@@ -122,12 +122,12 @@ typedef struct CMessage : public ContainerBase {
// This avoid the creation of similar maps in each of those containers.
typedef std::unordered_map<const Message*, CMessage*> SubMessagesMap;
SubMessagesMap* child_submessages;
-
+
// A reference to PyUnknownFields.
PyObject* unknown_field_set;
- // Implements the "weakref" protocol for this object.
- PyObject* weakreflist;
+ // Implements the "weakref" protocol for this object.
+ PyObject* weakreflist;
// Return a *borrowed* reference to the message class.
CMessageClass* GetMessageClass() {
@@ -158,11 +158,11 @@ struct CMessageClass {
// This reference must stay alive until all message pointers are destructed.
PyObject* py_message_descriptor;
- // The Python MessageFactory used to create the class. It is needed to resolve
+ // The Python MessageFactory used to create the class. It is needed to resolve
// fields descriptors, including extensions fields; its C++ MessageFactory is
// used to instantiate submessages.
// This reference must stay alive until all message pointers are destructed.
- PyMessageFactory* py_message_factory;
+ PyMessageFactory* py_message_factory;
PyObject* AsPyObject() {
return reinterpret_cast<PyObject*>(this);
@@ -251,10 +251,10 @@ int InitAttributes(CMessage* self, PyObject* args, PyObject* kwargs);
PyObject* MergeFrom(CMessage* self, PyObject* arg);
-// This method does not do anything beyond checking that no other extension
-// has been registered with the same field number on this class.
-PyObject* RegisterExtension(PyObject* cls, PyObject* extension_handle);
-
+// This method does not do anything beyond checking that no other extension
+// has been registered with the same field number on this class.
+PyObject* RegisterExtension(PyObject* cls, PyObject* extension_handle);
+
// Get a field from a message.
PyObject* GetFieldValue(CMessage* self,
const FieldDescriptor* field_descriptor);
@@ -267,13 +267,13 @@ PyObject* FindInitializationErrors(CMessage* self);
int AssureWritable(CMessage* self);
-// Returns the message factory for the given message.
-// This is equivalent to message.MESSAGE_FACTORY
+// Returns the message factory for the given message.
+// This is equivalent to message.MESSAGE_FACTORY
//
-// The returned factory is suitable for finding fields and building submessages,
+// The returned factory is suitable for finding fields and building submessages,
// even in the case of extensions.
-// Returns a *borrowed* reference, and never fails because we pass a CMessage.
-PyMessageFactory* GetFactoryForMessage(CMessage* message);
+// Returns a *borrowed* reference, and never fails because we pass a CMessage.
+PyMessageFactory* GetFactoryForMessage(CMessage* message);
PyObject* SetAllowOversizeProtos(PyObject* m, PyObject* arg);
@@ -332,7 +332,7 @@ PyObject* SetAllowOversizeProtos(PyObject* m, PyObject* arg);
void FormatTypeError(PyObject* arg, const char* expected_types);
template<class T>
-bool CheckAndGetInteger(PyObject* arg, T* value);
+bool CheckAndGetInteger(PyObject* arg, T* value);
bool CheckAndGetDouble(PyObject* arg, double* value);
bool CheckAndGetFloat(PyObject* arg, float* value);
bool CheckAndGetBool(PyObject* arg, bool* value);
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.cc
index 1e4bb99b7b..7905be0214 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.cc
@@ -1,122 +1,122 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
#include <unordered_map>
-#include <Python.h>
-
+#include <Python.h>
+
#include <google/protobuf/dynamic_message.h>
#include <google/protobuf/pyext/descriptor.h>
#include <google/protobuf/pyext/message.h>
#include <google/protobuf/pyext/message_factory.h>
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-
-#if PY_MAJOR_VERSION >= 3
- #if PY_VERSION_HEX < 0x03030000
- #error "Python 3.0 - 3.2 are not supported."
- #endif
- #define PyString_AsStringAndSize(ob, charpp, sizep) \
+
+#if PY_MAJOR_VERSION >= 3
+ #if PY_VERSION_HEX < 0x03030000
+ #error "Python 3.0 - 3.2 are not supported."
+ #endif
+ #define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) ? ((*(charpp) = const_cast<char*>( \
PyUnicode_AsUTF8AndSize(ob, (sizep)))) == NULL \
? -1 \
: 0) \
: PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
-#endif
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-namespace message_factory {
-
-PyMessageFactory* NewMessageFactory(PyTypeObject* type, PyDescriptorPool* pool) {
- PyMessageFactory* factory = reinterpret_cast<PyMessageFactory*>(
- PyType_GenericAlloc(type, 0));
- if (factory == NULL) {
- return NULL;
- }
-
- DynamicMessageFactory* message_factory = new DynamicMessageFactory();
- // This option might be the default some day.
- message_factory->SetDelegateToGeneratedFactory(true);
- factory->message_factory = message_factory;
-
- factory->pool = pool;
+#endif
+
+namespace google {
+namespace protobuf {
+namespace python {
+
+namespace message_factory {
+
+PyMessageFactory* NewMessageFactory(PyTypeObject* type, PyDescriptorPool* pool) {
+ PyMessageFactory* factory = reinterpret_cast<PyMessageFactory*>(
+ PyType_GenericAlloc(type, 0));
+ if (factory == NULL) {
+ return NULL;
+ }
+
+ DynamicMessageFactory* message_factory = new DynamicMessageFactory();
+ // This option might be the default some day.
+ message_factory->SetDelegateToGeneratedFactory(true);
+ factory->message_factory = message_factory;
+
+ factory->pool = pool;
Py_INCREF(pool);
-
- factory->classes_by_descriptor = new PyMessageFactory::ClassesByMessageMap();
-
- return factory;
-}
-
-PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
+
+ factory->classes_by_descriptor = new PyMessageFactory::ClassesByMessageMap();
+
+ return factory;
+}
+
+PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
static const char* kwlist[] = {"pool", 0};
- PyObject* pool = NULL;
+ PyObject* pool = NULL;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O",
const_cast<char**>(kwlist), &pool)) {
- return NULL;
- }
- ScopedPyObjectPtr owned_pool;
- if (pool == NULL || pool == Py_None) {
- owned_pool.reset(PyObject_CallFunction(
- reinterpret_cast<PyObject*>(&PyDescriptorPool_Type), NULL));
- if (owned_pool == NULL) {
- return NULL;
- }
- pool = owned_pool.get();
- } else {
- if (!PyObject_TypeCheck(pool, &PyDescriptorPool_Type)) {
- PyErr_Format(PyExc_TypeError, "Expected a DescriptorPool, got %s",
- pool->ob_type->tp_name);
- return NULL;
- }
- }
-
- return reinterpret_cast<PyObject*>(
- NewMessageFactory(type, reinterpret_cast<PyDescriptorPool*>(pool)));
-}
-
+ return NULL;
+ }
+ ScopedPyObjectPtr owned_pool;
+ if (pool == NULL || pool == Py_None) {
+ owned_pool.reset(PyObject_CallFunction(
+ reinterpret_cast<PyObject*>(&PyDescriptorPool_Type), NULL));
+ if (owned_pool == NULL) {
+ return NULL;
+ }
+ pool = owned_pool.get();
+ } else {
+ if (!PyObject_TypeCheck(pool, &PyDescriptorPool_Type)) {
+ PyErr_Format(PyExc_TypeError, "Expected a DescriptorPool, got %s",
+ pool->ob_type->tp_name);
+ return NULL;
+ }
+ }
+
+ return reinterpret_cast<PyObject*>(
+ NewMessageFactory(type, reinterpret_cast<PyDescriptorPool*>(pool)));
+}
+
static void Dealloc(PyObject* pself) {
PyMessageFactory* self = reinterpret_cast<PyMessageFactory*>(pself);
- typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
- for (iterator it = self->classes_by_descriptor->begin();
- it != self->classes_by_descriptor->end(); ++it) {
+ typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
+ for (iterator it = self->classes_by_descriptor->begin();
+ it != self->classes_by_descriptor->end(); ++it) {
Py_CLEAR(it->second);
- }
- delete self->classes_by_descriptor;
- delete self->message_factory;
+ }
+ delete self->classes_by_descriptor;
+ delete self->message_factory;
Py_CLEAR(self->pool);
Py_TYPE(self)->tp_free(pself);
-}
-
+}
+
static int GcTraverse(PyObject* pself, visitproc visit, void* arg) {
PyMessageFactory* self = reinterpret_cast<PyMessageFactory*>(pself);
Py_VISIT(self->pool);
@@ -137,120 +137,120 @@ static int GcClear(PyObject* pself) {
return 0;
}
-// Add a message class to our database.
-int RegisterMessageClass(PyMessageFactory* self,
- const Descriptor* message_descriptor,
- CMessageClass* message_class) {
- Py_INCREF(message_class);
- typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
- std::pair<iterator, bool> ret = self->classes_by_descriptor->insert(
- std::make_pair(message_descriptor, message_class));
- if (!ret.second) {
- // Update case: DECREF the previous value.
- Py_DECREF(ret.first->second);
- ret.first->second = message_class;
- }
- return 0;
-}
-
-CMessageClass* GetOrCreateMessageClass(PyMessageFactory* self,
- const Descriptor* descriptor) {
- // This is the same implementation as MessageFactory.GetPrototype().
-
- // Do not create a MessageClass that already exists.
+// Add a message class to our database.
+int RegisterMessageClass(PyMessageFactory* self,
+ const Descriptor* message_descriptor,
+ CMessageClass* message_class) {
+ Py_INCREF(message_class);
+ typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
+ std::pair<iterator, bool> ret = self->classes_by_descriptor->insert(
+ std::make_pair(message_descriptor, message_class));
+ if (!ret.second) {
+ // Update case: DECREF the previous value.
+ Py_DECREF(ret.first->second);
+ ret.first->second = message_class;
+ }
+ return 0;
+}
+
+CMessageClass* GetOrCreateMessageClass(PyMessageFactory* self,
+ const Descriptor* descriptor) {
+ // This is the same implementation as MessageFactory.GetPrototype().
+
+ // Do not create a MessageClass that already exists.
std::unordered_map<const Descriptor*, CMessageClass*>::iterator it =
- self->classes_by_descriptor->find(descriptor);
- if (it != self->classes_by_descriptor->end()) {
- Py_INCREF(it->second);
- return it->second;
- }
- ScopedPyObjectPtr py_descriptor(
- PyMessageDescriptor_FromDescriptor(descriptor));
- if (py_descriptor == NULL) {
- return NULL;
- }
- // Create a new message class.
- ScopedPyObjectPtr args(Py_BuildValue(
- "s(){sOsOsO}", descriptor->name().c_str(),
- "DESCRIPTOR", py_descriptor.get(),
- "__module__", Py_None,
- "message_factory", self));
- if (args == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr message_class(PyObject_CallObject(
+ self->classes_by_descriptor->find(descriptor);
+ if (it != self->classes_by_descriptor->end()) {
+ Py_INCREF(it->second);
+ return it->second;
+ }
+ ScopedPyObjectPtr py_descriptor(
+ PyMessageDescriptor_FromDescriptor(descriptor));
+ if (py_descriptor == NULL) {
+ return NULL;
+ }
+ // Create a new message class.
+ ScopedPyObjectPtr args(Py_BuildValue(
+ "s(){sOsOsO}", descriptor->name().c_str(),
+ "DESCRIPTOR", py_descriptor.get(),
+ "__module__", Py_None,
+ "message_factory", self));
+ if (args == NULL) {
+ return NULL;
+ }
+ ScopedPyObjectPtr message_class(PyObject_CallObject(
reinterpret_cast<PyObject*>(CMessageClass_Type), args.get()));
- if (message_class == NULL) {
- return NULL;
- }
- // Create messages class for the messages used by the fields, and registers
- // all extensions for these messages during the recursion.
- for (int field_idx = 0; field_idx < descriptor->field_count(); field_idx++) {
- const Descriptor* sub_descriptor =
- descriptor->field(field_idx)->message_type();
- // It is NULL if the field type is not a message.
- if (sub_descriptor != NULL) {
- CMessageClass* result = GetOrCreateMessageClass(self, sub_descriptor);
- if (result == NULL) {
- return NULL;
- }
- Py_DECREF(result);
- }
- }
-
- // Register extensions defined in this message.
- for (int ext_idx = 0 ; ext_idx < descriptor->extension_count() ; ext_idx++) {
- const FieldDescriptor* extension = descriptor->extension(ext_idx);
- ScopedPyObjectPtr py_extended_class(
- GetOrCreateMessageClass(self, extension->containing_type())
- ->AsPyObject());
- if (py_extended_class == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr py_extension(PyFieldDescriptor_FromDescriptor(extension));
- if (py_extension == NULL) {
- return NULL;
- }
- ScopedPyObjectPtr result(cmessage::RegisterExtension(
- py_extended_class.get(), py_extension.get()));
- if (result == NULL) {
- return NULL;
- }
- }
- return reinterpret_cast<CMessageClass*>(message_class.release());
-}
-
-// Retrieve the message class added to our database.
-CMessageClass* GetMessageClass(PyMessageFactory* self,
- const Descriptor* message_descriptor) {
- typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
- iterator ret = self->classes_by_descriptor->find(message_descriptor);
- if (ret == self->classes_by_descriptor->end()) {
- PyErr_Format(PyExc_TypeError, "No message class registered for '%s'",
- message_descriptor->full_name().c_str());
- return NULL;
- } else {
- return ret->second;
- }
-}
-
-static PyMethodDef Methods[] = {
- {NULL}};
-
-static PyObject* GetPool(PyMessageFactory* self, void* closure) {
- Py_INCREF(self->pool);
- return reinterpret_cast<PyObject*>(self->pool);
-}
-
-static PyGetSetDef Getters[] = {
- {"pool", (getter)GetPool, NULL, "DescriptorPool"},
- {NULL}
-};
-
-} // namespace message_factory
-
-PyTypeObject PyMessageFactory_Type = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
+ if (message_class == NULL) {
+ return NULL;
+ }
+ // Create messages class for the messages used by the fields, and registers
+ // all extensions for these messages during the recursion.
+ for (int field_idx = 0; field_idx < descriptor->field_count(); field_idx++) {
+ const Descriptor* sub_descriptor =
+ descriptor->field(field_idx)->message_type();
+ // It is NULL if the field type is not a message.
+ if (sub_descriptor != NULL) {
+ CMessageClass* result = GetOrCreateMessageClass(self, sub_descriptor);
+ if (result == NULL) {
+ return NULL;
+ }
+ Py_DECREF(result);
+ }
+ }
+
+ // Register extensions defined in this message.
+ for (int ext_idx = 0 ; ext_idx < descriptor->extension_count() ; ext_idx++) {
+ const FieldDescriptor* extension = descriptor->extension(ext_idx);
+ ScopedPyObjectPtr py_extended_class(
+ GetOrCreateMessageClass(self, extension->containing_type())
+ ->AsPyObject());
+ if (py_extended_class == NULL) {
+ return NULL;
+ }
+ ScopedPyObjectPtr py_extension(PyFieldDescriptor_FromDescriptor(extension));
+ if (py_extension == NULL) {
+ return NULL;
+ }
+ ScopedPyObjectPtr result(cmessage::RegisterExtension(
+ py_extended_class.get(), py_extension.get()));
+ if (result == NULL) {
+ return NULL;
+ }
+ }
+ return reinterpret_cast<CMessageClass*>(message_class.release());
+}
+
+// Retrieve the message class added to our database.
+CMessageClass* GetMessageClass(PyMessageFactory* self,
+ const Descriptor* message_descriptor) {
+ typedef PyMessageFactory::ClassesByMessageMap::iterator iterator;
+ iterator ret = self->classes_by_descriptor->find(message_descriptor);
+ if (ret == self->classes_by_descriptor->end()) {
+ PyErr_Format(PyExc_TypeError, "No message class registered for '%s'",
+ message_descriptor->full_name().c_str());
+ return NULL;
+ } else {
+ return ret->second;
+ }
+}
+
+static PyMethodDef Methods[] = {
+ {NULL}};
+
+static PyObject* GetPool(PyMessageFactory* self, void* closure) {
+ Py_INCREF(self->pool);
+ return reinterpret_cast<PyObject*>(self->pool);
+}
+
+static PyGetSetDef Getters[] = {
+ {"pool", (getter)GetPool, NULL, "DescriptorPool"},
+ {NULL}
+};
+
+} // namespace message_factory
+
+PyTypeObject PyMessageFactory_Type = {
+ PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
".MessageFactory", // tp_name
sizeof(PyMessageFactory), // tp_basicsize
0, // tp_itemsize
@@ -289,16 +289,16 @@ PyTypeObject PyMessageFactory_Type = {
0, // tp_alloc
message_factory::New, // tp_new
PyObject_GC_Del, // tp_free
-};
-
-bool InitMessageFactory() {
- if (PyType_Ready(&PyMessageFactory_Type) < 0) {
- return false;
- }
-
- return true;
-}
-
-} // namespace python
-} // namespace protobuf
-} // namespace google
+};
+
+bool InitMessageFactory() {
+ if (PyType_Ready(&PyMessageFactory_Type) < 0) {
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace python
+} // namespace protobuf
+} // namespace google
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.h b/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.h
index 54cacfb04f..515c29cdb8 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/message_factory.h
@@ -1,103 +1,103 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
-
-#include <Python.h>
-
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
+#define GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
+
+#include <Python.h>
+
#include <unordered_map>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/pyext/descriptor_pool.h>
-
-namespace google {
-namespace protobuf {
-class MessageFactory;
-
-namespace python {
-
-// The (meta) type of all Messages classes.
-struct CMessageClass;
-
-struct PyMessageFactory {
- PyObject_HEAD
-
- // DynamicMessageFactory used to create C++ instances of messages.
- // This object cache the descriptors that were used, so the DescriptorPool
- // needs to get rid of it before it can delete itself.
- //
- // Note: A C++ MessageFactory is different from the PyMessageFactory.
- // The C++ one creates messages, when the Python one creates classes.
- MessageFactory* message_factory;
-
+
+namespace google {
+namespace protobuf {
+class MessageFactory;
+
+namespace python {
+
+// The (meta) type of all Messages classes.
+struct CMessageClass;
+
+struct PyMessageFactory {
+ PyObject_HEAD
+
+ // DynamicMessageFactory used to create C++ instances of messages.
+ // This object cache the descriptors that were used, so the DescriptorPool
+ // needs to get rid of it before it can delete itself.
+ //
+ // Note: A C++ MessageFactory is different from the PyMessageFactory.
+ // The C++ one creates messages, when the Python one creates classes.
+ MessageFactory* message_factory;
+
// Owned reference to a Python DescriptorPool.
// This reference must stay until the message_factory is destructed.
- PyDescriptorPool* pool;
-
- // Make our own mapping to retrieve Python classes from C++ descriptors.
- //
- // Descriptor pointers stored here are owned by the DescriptorPool above.
- // Python references to classes are owned by this PyDescriptorPool.
+ PyDescriptorPool* pool;
+
+ // Make our own mapping to retrieve Python classes from C++ descriptors.
+ //
+ // Descriptor pointers stored here are owned by the DescriptorPool above.
+ // Python references to classes are owned by this PyDescriptorPool.
typedef std::unordered_map<const Descriptor*, CMessageClass*>
ClassesByMessageMap;
- ClassesByMessageMap* classes_by_descriptor;
-};
-
-extern PyTypeObject PyMessageFactory_Type;
-
-namespace message_factory {
-
-// Creates a new MessageFactory instance.
-PyMessageFactory* NewMessageFactory(PyTypeObject* type, PyDescriptorPool* pool);
-
-// Registers a new Python class for the given message descriptor.
-// On error, returns -1 with a Python exception set.
-int RegisterMessageClass(PyMessageFactory* self,
- const Descriptor* message_descriptor,
- CMessageClass* message_class);
-// Retrieves the Python class registered with the given message descriptor, or
-// fail with a TypeError. Returns a *borrowed* reference.
-CMessageClass* GetMessageClass(PyMessageFactory* self,
- const Descriptor* message_descriptor);
-// Retrieves the Python class registered with the given message descriptor.
-// The class is created if not done yet. Returns a *new* reference.
-CMessageClass* GetOrCreateMessageClass(PyMessageFactory* self,
- const Descriptor* message_descriptor);
-} // namespace message_factory
-
-// Initialize objects used by this module.
-// On error, returns false with a Python exception set.
-bool InitMessageFactory();
-
-} // namespace python
-} // namespace protobuf
+ ClassesByMessageMap* classes_by_descriptor;
+};
+
+extern PyTypeObject PyMessageFactory_Type;
+
+namespace message_factory {
+
+// Creates a new MessageFactory instance.
+PyMessageFactory* NewMessageFactory(PyTypeObject* type, PyDescriptorPool* pool);
+
+// Registers a new Python class for the given message descriptor.
+// On error, returns -1 with a Python exception set.
+int RegisterMessageClass(PyMessageFactory* self,
+ const Descriptor* message_descriptor,
+ CMessageClass* message_class);
+// Retrieves the Python class registered with the given message descriptor, or
+// fail with a TypeError. Returns a *borrowed* reference.
+CMessageClass* GetMessageClass(PyMessageFactory* self,
+ const Descriptor* message_descriptor);
+// Retrieves the Python class registered with the given message descriptor.
+// The class is created if not done yet. Returns a *new* reference.
+CMessageClass* GetOrCreateMessageClass(PyMessageFactory* self,
+ const Descriptor* message_descriptor);
+} // namespace message_factory
+
+// Initialize objects used by this module.
+// On error, returns false with a Python exception set.
+bool InitMessageFactory();
+
+} // namespace python
+} // namespace protobuf
} // namespace google
-
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
+
+#endif // GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_FACTORY_H__
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_composite_container.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_composite_container.cc
index 4029880f88..f3d6fc3092 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_composite_container.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_composite_container.cc
@@ -81,10 +81,10 @@ PyObject* Add(RepeatedCompositeContainer* self, PyObject* args,
Message* message = self->parent->message;
Message* sub_message =
- message->GetReflection()->AddMessage(
- message,
- self->parent_field_descriptor,
- self->child_message_class->py_message_factory->message_factory);
+ message->GetReflection()->AddMessage(
+ message,
+ self->parent_field_descriptor,
+ self->child_message_class->py_message_factory->message_factory);
CMessage* cmsg = self->parent->BuildSubMessageFromPointer(
self->parent_field_descriptor, sub_message, self->child_message_class);
@@ -360,15 +360,15 @@ static PyObject* ToStr(PyObject* pself) {
ScopedPyObjectPtr full_slice(PySlice_New(nullptr, nullptr, nullptr));
if (full_slice == nullptr) {
return nullptr;
- }
+ }
ScopedPyObjectPtr list(Subscript(
reinterpret_cast<RepeatedCompositeContainer*>(pself), full_slice.get()));
if (list == nullptr) {
return nullptr;
- }
- return PyObject_Repr(list.get());
-}
-
+ }
+ return PyObject_Repr(list.get());
+}
+
// ---------------------------------------------------------------------
// sort()
@@ -484,14 +484,14 @@ static PyObject* Pop(PyObject* pself, PyObject* args) {
PyObject* item = GetItem(self, index, length);
if (item == nullptr) {
return nullptr;
- }
+ }
ScopedPyObjectPtr py_index(PyLong_FromSsize_t(index));
if (AssignSubscript(self, py_index.get(), nullptr) < 0) {
return nullptr;
}
return item;
-}
-
+}
+
PyObject* DeepCopy(PyObject* pself, PyObject* arg) {
return reinterpret_cast<RepeatedCompositeContainer*>(pself)->DeepCopy();
}
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_scalar_container.cc b/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_scalar_container.cc
index c38c19066b..3a41a58adb 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_scalar_container.cc
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/repeated_scalar_container.cc
@@ -668,14 +668,14 @@ static PyObject* ToStr(PyObject* pself) {
ScopedPyObjectPtr full_slice(PySlice_New(nullptr, nullptr, nullptr));
if (full_slice == nullptr) {
return nullptr;
- }
+ }
ScopedPyObjectPtr list(Subscript(pself, full_slice.get()));
if (list == nullptr) {
return nullptr;
- }
- return PyObject_Repr(list.get());
-}
-
+ }
+ return PyObject_Repr(list.get());
+}
+
static PyObject* MergeFrom(PyObject* pself, PyObject* arg) {
return Extend(reinterpret_cast<RepeatedScalarContainer*>(pself), arg);
}
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/safe_numerics.h b/contrib/python/protobuf/py2/google/protobuf/pyext/safe_numerics.h
index b991a73019..93ae640e8b 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/safe_numerics.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/safe_numerics.h
@@ -1,164 +1,164 @@
-// Protocol Buffers - Google's data interchange format
-// Copyright 2008 Google Inc. All rights reserved.
-// https://developers.google.com/protocol-buffers/
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
-#define GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
-// Copied from chromium with only changes to the namespace.
-
-#include <limits>
-
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
+#define GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
+// Copied from chromium with only changes to the namespace.
+
+#include <limits>
+
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/common.h>
-
-namespace google {
-namespace protobuf {
-namespace python {
-
-template <bool SameSize, bool DestLarger,
- bool DestIsSigned, bool SourceIsSigned>
-struct IsValidNumericCastImpl;
-
-#define BASE_NUMERIC_CAST_CASE_SPECIALIZATION(A, B, C, D, Code) \
-template <> struct IsValidNumericCastImpl<A, B, C, D> { \
- template <class Source, class DestBounds> static inline bool Test( \
- Source source, DestBounds min, DestBounds max) { \
- return Code; \
- } \
-}
-
-#define BASE_NUMERIC_CAST_CASE_SAME_SIZE(DestSigned, SourceSigned, Code) \
- BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
- true, true, DestSigned, SourceSigned, Code); \
- BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
- true, false, DestSigned, SourceSigned, Code)
-
-#define BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(DestSigned, SourceSigned, Code) \
- BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
- false, false, DestSigned, SourceSigned, Code); \
-
-#define BASE_NUMERIC_CAST_CASE_DEST_LARGER(DestSigned, SourceSigned, Code) \
- BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
- false, true, DestSigned, SourceSigned, Code); \
-
-// The three top level cases are:
-// - Same size
-// - Source larger
-// - Dest larger
-// And for each of those three cases, we handle the 4 different possibilities
-// of signed and unsigned. This gives 12 cases to handle, which we enumerate
-// below.
-//
-// The last argument in each of the macros is the actual comparison code. It
-// has three arguments available, source (the value), and min/max which are
-// the ranges of the destination.
-
-
-// These are the cases where both types have the same size.
-
-// Both signed.
-BASE_NUMERIC_CAST_CASE_SAME_SIZE(true, true, true);
-// Both unsigned.
-BASE_NUMERIC_CAST_CASE_SAME_SIZE(false, false, true);
-// Dest unsigned, Source signed.
-BASE_NUMERIC_CAST_CASE_SAME_SIZE(false, true, source >= 0);
-// Dest signed, Source unsigned.
-// This cast is OK because Dest's max must be less than Source's.
-BASE_NUMERIC_CAST_CASE_SAME_SIZE(true, false,
- source <= static_cast<Source>(max));
-
-
-// These are the cases where Source is larger.
-
-// Both unsigned.
-BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(false, false, source <= max);
-// Both signed.
-BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(true, true,
- source >= min && source <= max);
-// Dest is unsigned, Source is signed.
-BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(false, true,
- source >= 0 && source <= max);
-// Dest is signed, Source is unsigned.
-// This cast is OK because Dest's max must be less than Source's.
-BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(true, false,
- source <= static_cast<Source>(max));
-
-
-// These are the cases where Dest is larger.
-
-// Both unsigned.
-BASE_NUMERIC_CAST_CASE_DEST_LARGER(false, false, true);
-// Both signed.
-BASE_NUMERIC_CAST_CASE_DEST_LARGER(true, true, true);
-// Dest is unsigned, Source is signed.
-BASE_NUMERIC_CAST_CASE_DEST_LARGER(false, true, source >= 0);
-// Dest is signed, Source is unsigned.
-BASE_NUMERIC_CAST_CASE_DEST_LARGER(true, false, true);
-
-#undef BASE_NUMERIC_CAST_CASE_SPECIALIZATION
-#undef BASE_NUMERIC_CAST_CASE_SAME_SIZE
-#undef BASE_NUMERIC_CAST_CASE_SOURCE_LARGER
-#undef BASE_NUMERIC_CAST_CASE_DEST_LARGER
-
-
-// The main test for whether the conversion will under or overflow.
-template <class Dest, class Source>
-inline bool IsValidNumericCast(Source source) {
- typedef std::numeric_limits<Source> SourceLimits;
- typedef std::numeric_limits<Dest> DestLimits;
+
+namespace google {
+namespace protobuf {
+namespace python {
+
+template <bool SameSize, bool DestLarger,
+ bool DestIsSigned, bool SourceIsSigned>
+struct IsValidNumericCastImpl;
+
+#define BASE_NUMERIC_CAST_CASE_SPECIALIZATION(A, B, C, D, Code) \
+template <> struct IsValidNumericCastImpl<A, B, C, D> { \
+ template <class Source, class DestBounds> static inline bool Test( \
+ Source source, DestBounds min, DestBounds max) { \
+ return Code; \
+ } \
+}
+
+#define BASE_NUMERIC_CAST_CASE_SAME_SIZE(DestSigned, SourceSigned, Code) \
+ BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
+ true, true, DestSigned, SourceSigned, Code); \
+ BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
+ true, false, DestSigned, SourceSigned, Code)
+
+#define BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(DestSigned, SourceSigned, Code) \
+ BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
+ false, false, DestSigned, SourceSigned, Code); \
+
+#define BASE_NUMERIC_CAST_CASE_DEST_LARGER(DestSigned, SourceSigned, Code) \
+ BASE_NUMERIC_CAST_CASE_SPECIALIZATION( \
+ false, true, DestSigned, SourceSigned, Code); \
+
+// The three top level cases are:
+// - Same size
+// - Source larger
+// - Dest larger
+// And for each of those three cases, we handle the 4 different possibilities
+// of signed and unsigned. This gives 12 cases to handle, which we enumerate
+// below.
+//
+// The last argument in each of the macros is the actual comparison code. It
+// has three arguments available, source (the value), and min/max which are
+// the ranges of the destination.
+
+
+// These are the cases where both types have the same size.
+
+// Both signed.
+BASE_NUMERIC_CAST_CASE_SAME_SIZE(true, true, true);
+// Both unsigned.
+BASE_NUMERIC_CAST_CASE_SAME_SIZE(false, false, true);
+// Dest unsigned, Source signed.
+BASE_NUMERIC_CAST_CASE_SAME_SIZE(false, true, source >= 0);
+// Dest signed, Source unsigned.
+// This cast is OK because Dest's max must be less than Source's.
+BASE_NUMERIC_CAST_CASE_SAME_SIZE(true, false,
+ source <= static_cast<Source>(max));
+
+
+// These are the cases where Source is larger.
+
+// Both unsigned.
+BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(false, false, source <= max);
+// Both signed.
+BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(true, true,
+ source >= min && source <= max);
+// Dest is unsigned, Source is signed.
+BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(false, true,
+ source >= 0 && source <= max);
+// Dest is signed, Source is unsigned.
+// This cast is OK because Dest's max must be less than Source's.
+BASE_NUMERIC_CAST_CASE_SOURCE_LARGER(true, false,
+ source <= static_cast<Source>(max));
+
+
+// These are the cases where Dest is larger.
+
+// Both unsigned.
+BASE_NUMERIC_CAST_CASE_DEST_LARGER(false, false, true);
+// Both signed.
+BASE_NUMERIC_CAST_CASE_DEST_LARGER(true, true, true);
+// Dest is unsigned, Source is signed.
+BASE_NUMERIC_CAST_CASE_DEST_LARGER(false, true, source >= 0);
+// Dest is signed, Source is unsigned.
+BASE_NUMERIC_CAST_CASE_DEST_LARGER(true, false, true);
+
+#undef BASE_NUMERIC_CAST_CASE_SPECIALIZATION
+#undef BASE_NUMERIC_CAST_CASE_SAME_SIZE
+#undef BASE_NUMERIC_CAST_CASE_SOURCE_LARGER
+#undef BASE_NUMERIC_CAST_CASE_DEST_LARGER
+
+
+// The main test for whether the conversion will under or overflow.
+template <class Dest, class Source>
+inline bool IsValidNumericCast(Source source) {
+ typedef std::numeric_limits<Source> SourceLimits;
+ typedef std::numeric_limits<Dest> DestLimits;
static_assert(SourceLimits::is_specialized, "argument must be numeric");
static_assert(SourceLimits::is_integer, "argument must be integral");
static_assert(DestLimits::is_specialized, "result must be numeric");
static_assert(DestLimits::is_integer, "result must be integral");
-
- return IsValidNumericCastImpl<
- sizeof(Dest) == sizeof(Source),
- (sizeof(Dest) > sizeof(Source)),
- DestLimits::is_signed,
- SourceLimits::is_signed>::Test(
- source,
- DestLimits::min(),
- DestLimits::max());
-}
-
-// checked_numeric_cast<> is analogous to static_cast<> for numeric types,
-// except that it CHECKs that the specified numeric conversion will not
-// overflow or underflow. Floating point arguments are not currently allowed
+
+ return IsValidNumericCastImpl<
+ sizeof(Dest) == sizeof(Source),
+ (sizeof(Dest) > sizeof(Source)),
+ DestLimits::is_signed,
+ SourceLimits::is_signed>::Test(
+ source,
+ DestLimits::min(),
+ DestLimits::max());
+}
+
+// checked_numeric_cast<> is analogous to static_cast<> for numeric types,
+// except that it CHECKs that the specified numeric conversion will not
+// overflow or underflow. Floating point arguments are not currently allowed
// (this is static_asserted), though this could be supported if necessary.
-template <class Dest, class Source>
-inline Dest checked_numeric_cast(Source source) {
- GOOGLE_CHECK(IsValidNumericCast<Dest>(source));
- return static_cast<Dest>(source);
-}
-
-} // namespace python
-} // namespace protobuf
+template <class Dest, class Source>
+inline Dest checked_numeric_cast(Source source) {
+ GOOGLE_CHECK(IsValidNumericCast<Dest>(source));
+ return static_cast<Dest>(source);
+}
+
+} // namespace python
+} // namespace protobuf
} // namespace google
-
-#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
+
+#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SAFE_NUMERICS_H__
diff --git a/contrib/python/protobuf/py2/google/protobuf/pyext/scoped_pyobject_ptr.h b/contrib/python/protobuf/py2/google/protobuf/pyext/scoped_pyobject_ptr.h
index a4f0655b79..6f7fc29813 100644
--- a/contrib/python/protobuf/py2/google/protobuf/pyext/scoped_pyobject_ptr.h
+++ b/contrib/python/protobuf/py2/google/protobuf/pyext/scoped_pyobject_ptr.h
@@ -36,65 +36,65 @@
#include <google/protobuf/stubs/common.h>
#include <Python.h>
-namespace google {
-namespace protobuf {
-namespace python {
-
-// Owns a python object and decrements the reference count on destruction.
-// This class is not threadsafe.
-template <typename PyObjectStruct>
-class ScopedPythonPtr {
+namespace google {
+namespace protobuf {
+namespace python {
+
+// Owns a python object and decrements the reference count on destruction.
+// This class is not threadsafe.
+template <typename PyObjectStruct>
+class ScopedPythonPtr {
public:
- // Takes the ownership of the specified object to ScopedPythonPtr.
- // The reference count of the specified py_object is not incremented.
- explicit ScopedPythonPtr(PyObjectStruct* py_object = NULL)
- : ptr_(py_object) {}
+ // Takes the ownership of the specified object to ScopedPythonPtr.
+ // The reference count of the specified py_object is not incremented.
+ explicit ScopedPythonPtr(PyObjectStruct* py_object = NULL)
+ : ptr_(py_object) {}
- // If a PyObject is owned, decrement its reference count.
- ~ScopedPythonPtr() { Py_XDECREF(ptr_); }
+ // If a PyObject is owned, decrement its reference count.
+ ~ScopedPythonPtr() { Py_XDECREF(ptr_); }
- // Deletes the current owned object, if any.
- // Then takes ownership of a new object without incrementing the reference
- // count.
+ // Deletes the current owned object, if any.
+ // Then takes ownership of a new object without incrementing the reference
+ // count.
// This function must be called with a reference that you own.
// this->reset(this->get()) is wrong!
// this->reset(this->release()) is OK.
- PyObjectStruct* reset(PyObjectStruct* p = NULL) {
+ PyObjectStruct* reset(PyObjectStruct* p = NULL) {
Py_XDECREF(ptr_);
ptr_ = p;
return ptr_;
}
- // Releases ownership of the object without decrementing the reference count.
+ // Releases ownership of the object without decrementing the reference count.
// The caller now owns the returned reference.
- PyObjectStruct* release() {
+ PyObjectStruct* release() {
PyObject* p = ptr_;
ptr_ = NULL;
return p;
}
- PyObjectStruct* get() const { return ptr_; }
+ PyObjectStruct* get() const { return ptr_; }
- PyObject* as_pyobject() const { return reinterpret_cast<PyObject*>(ptr_); }
+ PyObject* as_pyobject() const { return reinterpret_cast<PyObject*>(ptr_); }
// Increments the reference count of the current object.
- // Should not be called when no object is held.
+ // Should not be called when no object is held.
void inc() const { Py_INCREF(ptr_); }
- // True when a ScopedPyObjectPtr and a raw pointer refer to the same object.
- // Comparison operators are non reflexive.
- bool operator==(const PyObjectStruct* p) const { return ptr_ == p; }
- bool operator!=(const PyObjectStruct* p) const { return ptr_ != p; }
+ // True when a ScopedPyObjectPtr and a raw pointer refer to the same object.
+ // Comparison operators are non reflexive.
+ bool operator==(const PyObjectStruct* p) const { return ptr_ == p; }
+ bool operator!=(const PyObjectStruct* p) const { return ptr_ != p; }
private:
- PyObjectStruct* ptr_;
+ PyObjectStruct* ptr_;
- GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPythonPtr);
+ GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPythonPtr);
};
-typedef ScopedPythonPtr<PyObject> ScopedPyObjectPtr;
-
-} // namespace python
-} // namespace protobuf
+typedef ScopedPythonPtr<PyObject> ScopedPyObjectPtr;
+
+} // namespace python
+} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
diff --git a/contrib/python/protobuf/py2/google/protobuf/reflection.py b/contrib/python/protobuf/py2/google/protobuf/reflection.py
index 716c97517b..81e18859a8 100644
--- a/contrib/python/protobuf/py2/google/protobuf/reflection.py
+++ b/contrib/python/protobuf/py2/google/protobuf/reflection.py
@@ -55,9 +55,9 @@ from google.protobuf import symbol_database
# Part of the public interface, but normally only used by message factories.
GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
-MESSAGE_CLASS_CACHE = {}
+MESSAGE_CLASS_CACHE = {}
+
-
# Deprecated. Please NEVER use reflection.ParseMessage().
def ParseMessage(descriptor, byte_str):
"""Generate a new Message instance from this Descriptor and a byte string.
diff --git a/contrib/python/protobuf/py2/google/protobuf/symbol_database.py b/contrib/python/protobuf/py2/google/protobuf/symbol_database.py
index 9608c88a43..fdcf8cf06c 100644
--- a/contrib/python/protobuf/py2/google/protobuf/symbol_database.py
+++ b/contrib/python/protobuf/py2/google/protobuf/symbol_database.py
@@ -80,20 +80,20 @@ class SymbolDatabase(message_factory.MessageFactory):
"""
desc = message.DESCRIPTOR
- self._classes[desc] = message
- self.RegisterMessageDescriptor(desc)
+ self._classes[desc] = message
+ self.RegisterMessageDescriptor(desc)
return message
- def RegisterMessageDescriptor(self, message_descriptor):
- """Registers the given message descriptor in the local database.
-
- Args:
+ def RegisterMessageDescriptor(self, message_descriptor):
+ """Registers the given message descriptor in the local database.
+
+ Args:
message_descriptor (Descriptor): the message descriptor to add.
- """
+ """
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._AddDescriptor(message_descriptor)
-
+
def RegisterEnumDescriptor(self, enum_descriptor):
"""Registers the given enum descriptor in the local database.
@@ -108,17 +108,17 @@ class SymbolDatabase(message_factory.MessageFactory):
self.pool._AddEnumDescriptor(enum_descriptor)
return enum_descriptor
- def RegisterServiceDescriptor(self, service_descriptor):
- """Registers the given service descriptor in the local database.
-
- Args:
+ def RegisterServiceDescriptor(self, service_descriptor):
+ """Registers the given service descriptor in the local database.
+
+ Args:
service_descriptor (ServiceDescriptor): the service descriptor to
register.
- """
+ """
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._AddServiceDescriptor(service_descriptor)
-
+
def RegisterFileDescriptor(self, file_descriptor):
"""Registers the given file descriptor in the local database.
@@ -145,7 +145,7 @@ class SymbolDatabase(message_factory.MessageFactory):
KeyError: if the symbol could not be found.
"""
- return self._classes[self.pool.FindMessageTypeByName(symbol)]
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
def GetMessages(self, files):
# TODO(amauryfa): Fix the differences with MessageFactory.
@@ -153,8 +153,8 @@ class SymbolDatabase(message_factory.MessageFactory):
Only messages already created and registered will be returned; (this is the
case for imported _pb2 modules)
- But unlike MessageFactory, this version also returns already defined nested
- messages, but does not register any message extensions.
+ But unlike MessageFactory, this version also returns already defined nested
+ messages, but does not register any message extensions.
Args:
files (list[str]): The file names to extract messages from.
@@ -166,20 +166,20 @@ class SymbolDatabase(message_factory.MessageFactory):
KeyError: if a file could not be found.
"""
- def _GetAllMessages(desc):
+ def _GetAllMessages(desc):
"""Walk a message Descriptor and recursively yields all message names."""
- yield desc
+ yield desc
for msg_desc in desc.nested_types:
- for nested_desc in _GetAllMessages(msg_desc):
- yield nested_desc
+ for nested_desc in _GetAllMessages(msg_desc):
+ yield nested_desc
result = {}
for file_name in files:
file_desc = self.pool.FindFileByName(file_name)
for msg_desc in file_desc.message_types_by_name.values():
- for desc in _GetAllMessages(msg_desc):
+ for desc in _GetAllMessages(msg_desc):
try:
- result[desc.full_name] = self._classes[desc]
+ result[desc.full_name] = self._classes[desc]
except KeyError:
# This descriptor has no registered class, skip it.
pass
diff --git a/contrib/python/protobuf/py2/google/protobuf/text_format.py b/contrib/python/protobuf/py2/google/protobuf/text_format.py
index 345f5e157f..9c4ca90ee6 100644
--- a/contrib/python/protobuf/py2/google/protobuf/text_format.py
+++ b/contrib/python/protobuf/py2/google/protobuf/text_format.py
@@ -229,7 +229,7 @@ def PrintMessage(message,
float_format=None,
double_format=None,
use_field_number=False,
- descriptor_pool=None,
+ descriptor_pool=None,
message_formatter=None,
print_unknown_fields=False,
force_colon=False):
@@ -258,7 +258,7 @@ def PrintField(field,
use_short_repeated_primitives=False,
pointy_brackets=False,
use_index_order=False,
- float_format=None,
+ float_format=None,
double_format=None,
message_formatter=None,
print_unknown_fields=False,
@@ -282,7 +282,7 @@ def PrintFieldValue(field,
use_short_repeated_primitives=False,
pointy_brackets=False,
use_index_order=False,
- float_format=None,
+ float_format=None,
double_format=None,
message_formatter=None,
print_unknown_fields=False,
@@ -309,16 +309,16 @@ def _BuildMessageFromTypeName(type_name, descriptor_pool):
wasn't found matching type_name.
"""
# pylint: disable=g-import-not-at-top
- if descriptor_pool is None:
- from google.protobuf import descriptor_pool as pool_mod
- descriptor_pool = pool_mod.Default()
- from google.protobuf import symbol_database
- database = symbol_database.Default()
+ if descriptor_pool is None:
+ from google.protobuf import descriptor_pool as pool_mod
+ descriptor_pool = pool_mod.Default()
+ from google.protobuf import symbol_database
+ database = symbol_database.Default()
try:
message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
except KeyError:
return None
- message_type = database.GetPrototype(message_descriptor)
+ message_type = database.GetPrototype(message_descriptor)
return message_type()
@@ -375,9 +375,9 @@ class _Printer(object):
float_format is set, use float_format. Otherwise, str() is used.
use_field_number: If True, print field numbers instead of names.
descriptor_pool: A DescriptorPool used to resolve Any types.
- message_formatter: A function(message, indent, as_one_line): unicode|None
- to custom format selected sub-messages (usually based on message type).
- Use to pretty print parts of the protobuf for easier diffing.
+ message_formatter: A function(message, indent, as_one_line): unicode|None
+ to custom format selected sub-messages (usually based on message type).
+ Use to pretty print parts of the protobuf for easier diffing.
print_unknown_fields: If True, unknown fields will be printed.
force_colon: If set, a colon will be added after the field name even if
the field is a proto message.
@@ -396,7 +396,7 @@ class _Printer(object):
self.double_format = float_format
self.use_field_number = use_field_number
self.descriptor_pool = descriptor_pool
- self.message_formatter = message_formatter
+ self.message_formatter = message_formatter
self.print_unknown_fields = print_unknown_fields
self.force_colon = force_colon
@@ -416,27 +416,27 @@ class _Printer(object):
else:
return False
- def _TryCustomFormatMessage(self, message):
- formatted = self.message_formatter(message, self.indent, self.as_one_line)
- if formatted is None:
- return False
-
- out = self.out
- out.write(' ' * self.indent)
- out.write(formatted)
- out.write(' ' if self.as_one_line else '\n')
- return True
-
+ def _TryCustomFormatMessage(self, message):
+ formatted = self.message_formatter(message, self.indent, self.as_one_line)
+ if formatted is None:
+ return False
+
+ out = self.out
+ out.write(' ' * self.indent)
+ out.write(formatted)
+ out.write(' ' if self.as_one_line else '\n')
+ return True
+
def PrintMessage(self, message):
"""Convert protobuf message to text format.
Args:
message: The protocol buffers message.
"""
- if self.message_formatter and self._TryCustomFormatMessage(message):
- return
+ if self.message_formatter and self._TryCustomFormatMessage(message):
+ return
if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
- self._TryPrintAsAnyMessage(message)):
+ self._TryPrintAsAnyMessage(message)):
return
fields = message.ListFields()
if self.use_index_order:
@@ -450,7 +450,7 @@ class _Printer(object):
# of this file to work around.
#
# TODO(haberman): refactor and optimize if this becomes an issue.
- entry_submsg = value.GetEntryClass()(key=key, value=value[key])
+ entry_submsg = value.GetEntryClass()(key=key, value=value[key])
self.PrintField(field, entry_submsg)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if (self.use_short_repeated_primitives
@@ -642,36 +642,36 @@ class _Printer(object):
def Parse(text,
message,
allow_unknown_extension=False,
- allow_field_number=False,
+ allow_field_number=False,
descriptor_pool=None,
allow_unknown_field=False):
"""Parses a text representation of a protocol message into a message.
- NOTE: for historical reasons this function does not clear the input
- message. This is different from what the binary msg.ParseFrom(...) does.
+ NOTE: for historical reasons this function does not clear the input
+ message. This is different from what the binary msg.ParseFrom(...) does.
If text contains a field already set in message, the value is appended if the
field is repeated. Otherwise, an error is raised.
-
+
Example::
- a = MyProto()
- a.repeated_field.append('test')
- b = MyProto()
-
+ a = MyProto()
+ a.repeated_field.append('test')
+ b = MyProto()
+
# Repeated fields are combined
- text_format.Parse(repr(a), b)
- text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
-
+ text_format.Parse(repr(a), b)
+ text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
+
# Non-repeated fields cannot be overwritten
a.singular_field = 1
b.singular_field = 2
text_format.Parse(repr(a), b) # ParseError
- # Binary version:
- b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
-
- Caller is responsible for clearing the message as needed.
-
+ # Binary version:
+ b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
+
+ Caller is responsible for clearing the message as needed.
+
Args:
text (str): Message text representation.
message (Message): A protocol buffer message to merge into.
@@ -690,9 +690,9 @@ def Parse(text,
ParseError: On text parsing problems.
"""
return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'),
- message,
- allow_unknown_extension,
- allow_field_number,
+ message,
+ allow_unknown_extension,
+ allow_field_number,
descriptor_pool=descriptor_pool,
allow_unknown_field=allow_unknown_field)
@@ -738,7 +738,7 @@ def Merge(text,
def ParseLines(lines,
message,
allow_unknown_extension=False,
- allow_field_number=False,
+ allow_field_number=False,
descriptor_pool=None,
allow_unknown_field=False):
"""Parses a text representation of a protocol message into a message.
@@ -762,8 +762,8 @@ def ParseLines(lines,
Raises:
ParseError: On text parsing problems.
"""
- parser = _Parser(allow_unknown_extension,
- allow_field_number,
+ parser = _Parser(allow_unknown_extension,
+ allow_field_number,
descriptor_pool=descriptor_pool,
allow_unknown_field=allow_unknown_field)
return parser.ParseLines(lines, message)
@@ -785,7 +785,7 @@ def MergeLines(lines,
allow_unknown_extension: if True, skip over missing extensions and keep
parsing
allow_field_number: if True, both field number and field name are allowed.
- descriptor_pool: A DescriptorPool used to resolve Any types.
+ descriptor_pool: A DescriptorPool used to resolve Any types.
allow_unknown_field: if True, skip over unknown field and keep
parsing. Avoid to use this option if possible. It may hide some
errors (e.g. spelling error on field name)
@@ -907,11 +907,11 @@ class _Parser(object):
field = None
else:
raise tokenizer.ParseErrorPreviousToken(
- 'Extension "%s" not registered. '
- 'Did you import the _pb2 module which defines it? '
- 'If you are trying to place the extension in the MessageSet '
- 'field of another message that is in an Any or MessageSet field, '
- 'that message\'s _pb2 module must be imported as well' % name)
+ 'Extension "%s" not registered. '
+ 'Did you import the _pb2 module which defines it? '
+ 'If you are trying to place the extension in the MessageSet '
+ 'field of another message that is in an Any or MessageSet field, '
+ 'that message\'s _pb2 module must be imported as well' % name)
elif message_descriptor != field.containing_type:
raise tokenizer.ParseErrorPreviousToken(
'Extension "%s" does not extend message type "%s".' %
@@ -992,17 +992,17 @@ class _Parser(object):
def _ConsumeAnyTypeUrl(self, tokenizer):
"""Consumes a google.protobuf.Any type URL and returns the type name."""
# Consume "type.googleapis.com/".
- prefix = [tokenizer.ConsumeIdentifier()]
+ prefix = [tokenizer.ConsumeIdentifier()]
tokenizer.Consume('.')
- prefix.append(tokenizer.ConsumeIdentifier())
+ prefix.append(tokenizer.ConsumeIdentifier())
tokenizer.Consume('.')
- prefix.append(tokenizer.ConsumeIdentifier())
+ prefix.append(tokenizer.ConsumeIdentifier())
tokenizer.Consume('/')
# Consume the fully-qualified type name.
name = [tokenizer.ConsumeIdentifier()]
while tokenizer.TryConsume('.'):
name.append(tokenizer.ConsumeIdentifier())
- return '.'.join(prefix), '.'.join(name)
+ return '.'.join(prefix), '.'.join(name)
def _MergeMessageField(self, tokenizer, message, field):
"""Merges a single scalar field into a message.
@@ -1027,7 +1027,7 @@ class _Parser(object):
if field.is_extension:
sub_message = message.Extensions[field].add()
elif is_map_entry:
- sub_message = getattr(message, field.name).GetEntryClass()()
+ sub_message = getattr(message, field.name).GetEntryClass()()
else:
sub_message = getattr(message, field.name).add()
else:
@@ -1062,12 +1062,12 @@ class _Parser(object):
else:
getattr(message, field.name)[sub_message.key] = sub_message.value
- @staticmethod
- def _IsProto3Syntax(message):
- message_descriptor = message.DESCRIPTOR
- return (hasattr(message_descriptor, 'syntax') and
- message_descriptor.syntax == 'proto3')
-
+ @staticmethod
+ def _IsProto3Syntax(message):
+ message_descriptor = message.DESCRIPTOR
+ return (hasattr(message_descriptor, 'syntax') and
+ message_descriptor.syntax == 'proto3')
+
def _MergeScalarField(self, tokenizer, message, field):
"""Merges a single scalar field into a message.
@@ -1120,7 +1120,7 @@ class _Parser(object):
if field.is_extension:
if (not self._allow_multiple_scalars and
not self._IsProto3Syntax(message) and
- message.HasExtension(field)):
+ message.HasExtension(field)):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" extensions.' %
(message.DESCRIPTOR.full_name, field.full_name))
@@ -1333,22 +1333,22 @@ class Tokenizer(object):
self.NextToken()
return result
- def ConsumeCommentOrTrailingComment(self):
- """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
-
- # Tokenizer initializes _previous_line and _previous_column to 0. As the
- # tokenizer starts, it looks like there is a previous token on the line.
- just_started = self._line == 0 and self._column == 0
-
- before_parsing = self._previous_line
- comment = self.ConsumeComment()
-
- # A trailing comment is a comment on the same line than the previous token.
- trailing = (self._previous_line == before_parsing
- and not just_started)
-
- return trailing, comment
-
+ def ConsumeCommentOrTrailingComment(self):
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
+
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
+ # tokenizer starts, it looks like there is a previous token on the line.
+ just_started = self._line == 0 and self._column == 0
+
+ before_parsing = self._previous_line
+ comment = self.ConsumeComment()
+
+ # A trailing comment is a comment on the same line than the previous token.
+ trailing = (self._previous_line == before_parsing
+ and not just_started)
+
+ return trailing, comment
+
def TryConsumeIdentifier(self):
try:
self.ConsumeIdentifier()
@@ -1389,7 +1389,7 @@ class Tokenizer(object):
"""
result = self.token
if not self._IDENTIFIER_OR_NUMBER.match(result):
- raise self.ParseError('Expected identifier or number, got %s.' % result)
+ raise self.ParseError('Expected identifier or number, got %s.' % result)
self.NextToken()
return result
@@ -1779,9 +1779,9 @@ def ParseBool(text):
Raises:
ValueError: If text is not a valid boolean.
"""
- if text in ('true', 't', '1', 'True'):
+ if text in ('true', 't', '1', 'True'):
return True
- elif text in ('false', 'f', '0', 'False'):
+ elif text in ('false', 'f', '0', 'False'):
return False
else:
raise ValueError('Expected "true" or "false".')