aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authorheretic <heretic@yandex-team.ru>2022-06-14 13:29:31 +0300
committerheretic <heretic@yandex-team.ru>2022-06-14 13:29:31 +0300
commit16f8be4f481c275c34795233c18f8d078382fcb3 (patch)
tree2363f1306ce2e17e72c0a48614256acd046990e6 /contrib/python
parent647dc68b78e469e5ab416e9b62885c9846fd511d (diff)
downloadydb-16f8be4f481c275c34795233c18f8d078382fcb3.tar.gz
Update protobuf to 3.18.1
ref:4846abb21711ea0dc148d4c5df7b5edd3d1bdc69
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/protobuf/py3/README.md4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/__init__.py2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/descriptor.py3
-rwxr-xr-xcontrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py3
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/decoder.py54
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/encoder.py11
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/enum_type_wrapper.py4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/python_message.py28
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py34
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py14
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/json_format.py27
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/message_factory.py2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/proto_api.h21
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/proto_builder.py5
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc3
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc50
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h20
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message.cc110
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc14
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/text_encoding.py19
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/text_format.py55
22 files changed, 254 insertions, 233 deletions
diff --git a/contrib/python/protobuf/py3/README.md b/contrib/python/protobuf/py3/README.md
index cb8b7e9892..f0c9ce4f44 100644
--- a/contrib/python/protobuf/py3/README.md
+++ b/contrib/python/protobuf/py3/README.md
@@ -1,8 +1,6 @@
Protocol Buffers - Google's data interchange format
===================================================
-[![Build status](https://storage.googleapis.com/protobuf-kokoro-results/status-badge/linux-python.png)](https://fusion.corp.google.com/projectanalysis/current/KOKORO/prod:protobuf%2Fgithub%2Fmaster%2Fubuntu%2Fpython%2Fcontinuous) [![Build status](https://storage.googleapis.com/protobuf-kokoro-results/status-badge/linux-python_compatibility.png)](https://fusion.corp.google.com/projectanalysis/current/KOKORO/prod:protobuf%2Fgithub%2Fmaster%2Fubuntu%2Fpython_compatibility%2Fcontinuous) [![Build status](https://storage.googleapis.com/protobuf-kokoro-results/status-badge/linux-python_cpp.png)](https://fusion.corp.google.com/projectanalysis/current/KOKORO/prod:protobuf%2Fgithub%2Fmaster%2Fubuntu%2Fpython_cpp%2Fcontinuous) [![Build status](https://storage.googleapis.com/protobuf-kokoro-results/status-badge/macos-python.png)](https://fusion.corp.google.com/projectanalysis/current/KOKORO/prod:protobuf%2Fgithub%2Fmaster%2Fmacos%2Fpython%2Fcontinuous) [![Build status](https://storage.googleapis.com/protobuf-kokoro-results/status-badge/macos-python_cpp.png)](https://fusion.corp.google.com/projectanalysis/current/KOKORO/prod:protobuf%2Fgithub%2Fmaster%2Fmacos%2Fpython_cpp%2Fcontinuous) [![Compat check PyPI](https://python-compatibility-tools.appspot.com/one_badge_image?package=protobuf)](https://python-compatibility-tools.appspot.com/one_badge_target?package=protobuf)
-
Copyright 2008 Google Inc.
This directory contains the Python Protocol Buffers runtime library.
@@ -28,7 +26,7 @@ use python c++ implementation.
Installation
============
-1) Make sure you have Python 2.7 or newer. If in doubt, run:
+1) Make sure you have Python 3.5 or newer. If in doubt, run:
$ python -V
diff --git a/contrib/python/protobuf/py3/google/protobuf/__init__.py b/contrib/python/protobuf/py3/google/protobuf/__init__.py
index 496df6adaf..b8122e82ae 100644
--- a/contrib/python/protobuf/py3/google/protobuf/__init__.py
+++ b/contrib/python/protobuf/py3/google/protobuf/__init__.py
@@ -30,4 +30,4 @@
# Copyright 2007 Google Inc. All Rights Reserved.
-__version__ = '3.17.3'
+__version__ = '3.18.1'
diff --git a/contrib/python/protobuf/py3/google/protobuf/descriptor.py b/contrib/python/protobuf/py3/google/protobuf/descriptor.py
index 70fdae16ff..0f7bd17443 100644
--- a/contrib/python/protobuf/py3/google/protobuf/descriptor.py
+++ b/contrib/python/protobuf/py3/google/protobuf/descriptor.py
@@ -36,7 +36,6 @@ __author__ = 'robinson@google.com (Will Robinson)'
import threading
import warnings
-import six
from google.protobuf.internal import api_implementation
@@ -111,7 +110,7 @@ _Deprecated.count = 100
_internal_create_key = object()
-class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
+class DescriptorBase(metaclass=DescriptorMetaclass):
"""Descriptors base class.
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py b/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py
index 4cba1d479d..287ba689bf 100755
--- a/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py
@@ -154,8 +154,6 @@ except ImportError:
import unittest
import uuid
-import six
-
try:
# Since python 3
import collections.abc as collections_abc
@@ -181,7 +179,7 @@ def _StrClass(cls):
def _NonStringIterable(obj):
return (isinstance(obj, collections_abc.Iterable) and not
- isinstance(obj, six.string_types))
+ isinstance(obj, str))
def _FormatParameterList(testcase_params):
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
index be1af7df6b..a3667318c1 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
@@ -80,8 +80,7 @@ if _api_version < 0: # Still unspecified?
# and Python 3 default to `_api_version = 2` (C++ implementation V2).
pass
-_default_implementation_type = (
- 'python' if _api_version <= 0 else 'cpp')
+_default_implementation_type = ('python' if _api_version <= 0 else 'cpp')
# This environment variable can be used to switch to a certain implementation
# of the Python API, overriding the compile-time constants in the
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py b/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
index 6804986b6e..bc1b7b785c 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
@@ -80,16 +80,8 @@ we repeatedly read a tag, look up the corresponding decoder, and invoke it.
__author__ = 'kenton@google.com (Kenton Varda)'
+import math
import struct
-import sys
-import six
-
-_UCS2_MAXUNICODE = 65535
-if six.PY3:
- long = int
-else:
- import re # pylint: disable=g-import-not-at-top
- _SURROGATE_PATTERN = re.compile(six.u(r'[\ud800-\udfff]'))
from google.protobuf.internal import containers
from google.protobuf.internal import encoder
@@ -97,13 +89,6 @@ from google.protobuf.internal import wire_format
from google.protobuf import message
-# This will overflow and thus become IEEE-754 "infinity". We would use
-# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
-_POS_INF = 1e10000
-_NEG_INF = -_POS_INF
-_NAN = _POS_INF * 0
-
-
# This is not for optimization, but rather to avoid conflicts with local
# variables named "message".
_DecodeError = message.DecodeError
@@ -123,7 +108,7 @@ def _VarintDecoder(mask, result_type):
result = 0
shift = 0
while 1:
- b = six.indexbytes(buffer, pos)
+ b = buffer[pos]
result |= ((b & 0x7f) << shift)
pos += 1
if not (b & 0x80):
@@ -146,7 +131,7 @@ def _SignedVarintDecoder(bits, result_type):
result = 0
shift = 0
while 1:
- b = six.indexbytes(buffer, pos)
+ b = buffer[pos]
result |= ((b & 0x7f) << shift)
pos += 1
if not (b & 0x80):
@@ -159,12 +144,9 @@ def _SignedVarintDecoder(bits, result_type):
raise _DecodeError('Too many bytes when decoding varint.')
return DecodeVarint
-# We force 32-bit values to int and 64-bit values to long to make
-# alternate implementations where the distinction is more significant
-# (e.g. the C++ implementation) simpler.
-
-_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
-_DecodeSignedVarint = _SignedVarintDecoder(64, long)
+# All 32-bit and 64-bit values are represented as int.
+_DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
+_DecodeSignedVarint = _SignedVarintDecoder(64, int)
# Use these versions for values which must be limited to 32 bits.
_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
@@ -189,7 +171,7 @@ def ReadTag(buffer, pos):
Tuple[bytes, int] of the tag data and new position.
"""
start = pos
- while six.indexbytes(buffer, pos) & 0x80:
+ while buffer[pos] & 0x80:
pos += 1
pos += 1
@@ -333,11 +315,11 @@ def _FloatDecoder():
if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
# If at least one significand bit is set...
if float_bytes[0:3] != b'\x00\x00\x80':
- return (_NAN, new_pos)
+ return (math.nan, new_pos)
# If sign bit is set...
if float_bytes[3:4] == b'\xFF':
- return (_NEG_INF, new_pos)
- return (_POS_INF, new_pos)
+ return (-math.inf, new_pos)
+ return (math.inf, new_pos)
# Note that we expect someone up-stack to catch struct.error and convert
# it to _DecodeError -- this way we don't have to set up exception-
@@ -377,7 +359,7 @@ def _DoubleDecoder():
if ((double_bytes[7:8] in b'\x7F\xFF')
and (double_bytes[6:7] >= b'\xF0')
and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
- return (_NAN, new_pos)
+ return (math.nan, new_pos)
# Note that we expect someone up-stack to catch struct.error and convert
# it to _DecodeError -- this way we don't have to set up exception-
@@ -559,31 +541,21 @@ BoolDecoder = _ModifiedDecoder(
def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
- is_strict_utf8=False, clear_if_default=False):
+ clear_if_default=False):
"""Returns a decoder for a string field."""
local_DecodeVarint = _DecodeVarint
- local_unicode = six.text_type
def _ConvertToUnicode(memview):
"""Convert byte to unicode."""
byte_str = memview.tobytes()
try:
- value = local_unicode(byte_str, 'utf-8')
+ value = str(byte_str, 'utf-8')
except UnicodeDecodeError as e:
# add more information to the error message and re-raise it.
e.reason = '%s in field: %s' % (e, key.full_name)
raise
- if is_strict_utf8 and six.PY2 and sys.maxunicode > _UCS2_MAXUNICODE:
- # Only do the check for python2 ucs4 when is_strict_utf8 enabled
- if _SURROGATE_PATTERN.search(value):
- reason = ('String field %s contains invalid UTF-8 data when parsing'
- 'a protocol buffer: surrogates not allowed. Use'
- 'the bytes type if you intend to send raw bytes.') % (
- key.full_name)
- raise message.DecodeError(reason)
-
return value
assert not is_packed
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/encoder.py b/contrib/python/protobuf/py3/google/protobuf/internal/encoder.py
index 0c016f3cfa..4b4f652f25 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/encoder.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/encoder.py
@@ -68,8 +68,6 @@ __author__ = 'kenton@google.com (Kenton Varda)'
import struct
-import six
-
from google.protobuf.internal import wire_format
@@ -372,7 +370,8 @@ def MapSizer(field_descriptor, is_message_map):
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
- local_int2byte = six.int2byte
+ local_int2byte = struct.Struct('>B').pack
+
def EncodeVarint(write, value, unused_deterministic=None):
bits = value & 0x7f
value >>= 7
@@ -389,7 +388,8 @@ def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
- local_int2byte = six.int2byte
+ local_int2byte = struct.Struct('>B').pack
+
def EncodeSignedVarint(write, value, unused_deterministic=None):
if value < 0:
value += (1 << 64)
@@ -420,8 +420,7 @@ def _VarintBytes(value):
def TagBytes(field_number, wire_type):
"""Encode the given tag and return the bytes. Only called at startup."""
- return six.binary_type(
- _VarintBytes(wire_format.PackTag(field_number, wire_type)))
+ return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
# --------------------------------------------------------------------
# As with sizers (see above), we have a number of common encoder
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/enum_type_wrapper.py b/contrib/python/protobuf/py3/google/protobuf/internal/enum_type_wrapper.py
index 9ae0066584..9a53999a43 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/enum_type_wrapper.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/enum_type_wrapper.py
@@ -37,8 +37,6 @@ on proto classes. For usage, see:
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
-import six
-
class EnumTypeWrapper(object):
"""A utility for finding the names of enum values."""
@@ -57,7 +55,7 @@ class EnumTypeWrapper(object):
except KeyError:
pass # fall out to break exception chaining
- if not isinstance(number, six.integer_types):
+ if not isinstance(number, int):
raise TypeError(
'Enum value for {} must be an int, but got {} {!r}.'.format(
self._enum_type.name, type(number), number))
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py b/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
index 99d2f078de..2921d5cb6e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
@@ -55,9 +55,6 @@ import struct
import sys
import weakref
-import six
-from six.moves import range
-
# We use "as" to avoid name collisions with variables.
from google.protobuf.internal import api_implementation
from google.protobuf.internal import containers
@@ -284,13 +281,6 @@ def _IsMessageMapField(field):
return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
-def _IsStrictUtf8Check(field):
- if field.containing_type.syntax != 'proto3':
- return False
- enforce_utf8 = True
- return enforce_utf8
-
-
def _AttachFieldHelpers(cls, field_descriptor):
is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
is_packable = (is_repeated and
@@ -348,11 +338,10 @@ def _AttachFieldHelpers(cls, field_descriptor):
field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
is_message_map)
elif decode_type == _FieldDescriptor.TYPE_STRING:
- is_strict_utf8_check = _IsStrictUtf8Check(field_descriptor)
field_decoder = decoder.StringDecoder(
field_descriptor.number, is_repeated, is_packed,
field_descriptor, field_descriptor._default_constructor,
- is_strict_utf8_check, clear_if_default)
+ clear_if_default)
elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
field_descriptor.number, is_repeated, is_packed,
@@ -485,7 +474,7 @@ def _ReraiseTypeErrorWithFieldName(message_name, field_name):
exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
# re-raise possibly-amended exception with original traceback:
- six.reraise(type(exc), exc, sys.exc_info()[2])
+ raise exc.with_traceback(sys.exc_info()[2])
def _AddInitMethod(message_descriptor, cls):
@@ -498,7 +487,7 @@ def _AddInitMethod(message_descriptor, cls):
enum_type with the same name. If the value is not a string, it's
returned as-is. (No conversion or bounds-checking is done.)
"""
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
try:
return enum_type.values_by_name[value].number
except KeyError:
@@ -1305,6 +1294,14 @@ def _AddIsInitializedMethod(message_descriptor, cls):
cls.FindInitializationErrors = FindInitializationErrors
+def _FullyQualifiedClassName(klass):
+ module = klass.__module__
+ name = getattr(klass, '__qualname__', klass.__name__)
+ if module in (None, 'builtins', '__builtin__'):
+ return name
+ return module + '.' + name
+
+
def _AddMergeFromMethod(cls):
LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
@@ -1313,7 +1310,8 @@ def _AddMergeFromMethod(cls):
if not isinstance(msg, cls):
raise TypeError(
'Parameter to MergeFrom() must be instance of same class: '
- 'expected %s got %s.' % (cls.__name__, msg.__class__.__name__))
+ 'expected %s got %s.' % (_FullyQualifiedClassName(cls),
+ _FullyQualifiedClassName(msg.__class__)))
assert msg is not self
self._Modified()
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py b/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
index eb66f9f6fb..9b9b859e1e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
@@ -45,16 +45,8 @@ TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
__author__ = 'robinson@google.com (Will Robinson)'
-try:
- import ctypes
-except Exception: # pylint: disable=broad-except
- ctypes = None
- import struct
+import ctypes
import numbers
-import six
-
-if six.PY3:
- long = int
from google.protobuf.internal import api_implementation
from google.protobuf.internal import decoder
@@ -66,10 +58,7 @@ _FieldDescriptor = descriptor.FieldDescriptor
def TruncateToFourByteFloat(original):
- if ctypes:
- return ctypes.c_float(original).value
- else:
- return struct.unpack('<f', struct.pack('<f', original))[0]
+ return ctypes.c_float(original).value
def ToShortestFloat(original):
@@ -162,14 +151,13 @@ class IntValueChecker(object):
def CheckValue(self, proposed_value):
if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), six.integer_types))
+ (proposed_value, type(proposed_value), (int,)))
raise TypeError(message)
if not self._MIN <= int(proposed_value) <= self._MAX:
raise ValueError('Value out of range: %d' % proposed_value)
- # We force 32-bit values to int and 64-bit values to long to make
- # alternate implementations where the distinction is more significant
- # (e.g. the C++ implementation) simpler.
- proposed_value = self._TYPE(proposed_value)
+ # We force all values to int to make alternate implementations where the
+ # distinction is more significant (e.g. the C++ implementation) simpler.
+ proposed_value = int(proposed_value)
return proposed_value
def DefaultValue(self):
@@ -186,7 +174,7 @@ class EnumValueChecker(object):
def CheckValue(self, proposed_value):
if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), six.integer_types))
+ (proposed_value, type(proposed_value), (int,)))
raise TypeError(message)
if int(proposed_value) not in self._enum_type.values_by_number:
raise ValueError('Unknown enum value: %d' % proposed_value)
@@ -204,9 +192,9 @@ class UnicodeValueChecker(object):
"""
def CheckValue(self, proposed_value):
- if not isinstance(proposed_value, (bytes, six.text_type)):
+ if not isinstance(proposed_value, (bytes, str)):
message = ('%.1024r has type %s, but expected one of: %s' %
- (proposed_value, type(proposed_value), (bytes, six.text_type)))
+ (proposed_value, type(proposed_value), (bytes, str)))
raise TypeError(message)
# If the value is of type 'bytes' make sure that it is valid UTF-8 data.
@@ -237,25 +225,21 @@ class Int32ValueChecker(IntValueChecker):
# efficient.
_MIN = -2147483648
_MAX = 2147483647
- _TYPE = int
class Uint32ValueChecker(IntValueChecker):
_MIN = 0
_MAX = (1 << 32) - 1
- _TYPE = int
class Int64ValueChecker(IntValueChecker):
_MIN = -(1 << 63)
_MAX = (1 << 63) - 1
- _TYPE = long
class Uint64ValueChecker(IntValueChecker):
_MIN = 0
_MAX = (1 << 64) - 1
- _TYPE = long
# The max 4 bytes float is about 3.4028234663852886e+38
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py b/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
index 6f55d6b17b..f14f1527ca 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
@@ -43,7 +43,6 @@ __author__ = 'jieluo@google.com (Jie Luo)'
import calendar
from datetime import datetime
from datetime import timedelta
-import six
try:
# Since python 3
@@ -143,6 +142,8 @@ class Timestamp(object):
Raises:
ValueError: On parsing problems.
"""
+ if not isinstance(value, str):
+ raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
timezone_offset = value.find('Z')
if timezone_offset == -1:
timezone_offset = value.find('+')
@@ -303,6 +304,8 @@ class Duration(object):
Raises:
ValueError: On parsing problems.
"""
+ if not isinstance(value, str):
+ raise ValueError('Duration JSON value not a string: {!r}'.format(value))
if len(value) < 1 or value[-1] != 's':
raise ValueError(
'Duration must end with letter "s": {0}.'.format(value))
@@ -428,6 +431,8 @@ class FieldMask(object):
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
+ if not isinstance(value, str):
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
self.Clear()
if value:
for path in value.split(','):
@@ -712,9 +717,6 @@ def _AddFieldPaths(node, prefix, field_mask):
_AddFieldPaths(node[name], child_path, field_mask)
-_INT_OR_FLOAT = six.integer_types + (float,)
-
-
def _SetStructValue(struct_value, value):
if value is None:
struct_value.null_value = 0
@@ -722,9 +724,9 @@ def _SetStructValue(struct_value, value):
# Note: this check must come before the number check because in Python
# True and False are also considered numbers.
struct_value.bool_value = value
- elif isinstance(value, six.string_types):
+ elif isinstance(value, str):
struct_value.string_value = value
- elif isinstance(value, _INT_OR_FLOAT):
+ elif isinstance(value, (int, float)):
struct_value.number_value = value
elif isinstance(value, (dict, Struct)):
struct_value.struct_value.Clear()
diff --git a/contrib/python/protobuf/py3/google/protobuf/json_format.py b/contrib/python/protobuf/py3/google/protobuf/json_format.py
index 965614d803..b3e85933e9 100644
--- a/contrib/python/protobuf/py3/google/protobuf/json_format.py
+++ b/contrib/python/protobuf/py3/google/protobuf/json_format.py
@@ -42,24 +42,15 @@ Simple usage example:
__author__ = 'jieluo@google.com (Jie Luo)'
-# pylint: disable=g-statement-before-imports,g-import-not-at-top
-try:
- from collections import OrderedDict
-except ImportError:
- from ordereddict import OrderedDict # PY26
-# pylint: enable=g-statement-before-imports,g-import-not-at-top
import base64
+from collections import OrderedDict
import json
import math
-
from operator import methodcaller
-
import re
import sys
-import six
-
from google.protobuf.internal import type_checkers
from google.protobuf import descriptor
from google.protobuf import symbol_database
@@ -78,9 +69,8 @@ _INFINITY = 'Infinity'
_NEG_INFINITY = '-Infinity'
_NAN = 'NaN'
-_UNPAIRED_SURROGATE_PATTERN = re.compile(six.u(
- r'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
-))
+_UNPAIRED_SURROGATE_PATTERN = re.compile(
+ u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]')
_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
@@ -426,7 +416,8 @@ def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
Raises::
ParseError: On JSON parsing problems.
"""
- if not isinstance(text, six.text_type): text = text.decode('utf-8')
+ if not isinstance(text, str):
+ text = text.decode('utf-8')
try:
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
@@ -455,7 +446,7 @@ def ParseDict(js_dict,
return message
-_INT_OR_FLOAT = six.integer_types + (float,)
+_INT_OR_FLOAT = (int, float)
class _Parser(object):
@@ -646,7 +637,7 @@ class _Parser(object):
message.null_value = 0
elif isinstance(value, bool):
message.bool_value = value
- elif isinstance(value, six.string_types):
+ elif isinstance(value, str):
message.string_value = value
elif isinstance(value, _INT_OR_FLOAT):
message.number_value = value
@@ -729,7 +720,7 @@ def _ConvertScalarFieldValue(value, field, require_str=False):
return _ConvertBool(value, require_str)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
- if isinstance(value, six.text_type):
+ if isinstance(value, str):
encoded = value.encode('utf-8')
else:
encoded = value
@@ -776,7 +767,7 @@ def _ConvertInteger(value):
if isinstance(value, float) and not value.is_integer():
raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
- if isinstance(value, six.text_type) and value.find(' ') != -1:
+ if isinstance(value, str) and value.find(' ') != -1:
raise ParseError('Couldn\'t parse integer: "{0}".'.format(value))
if isinstance(value, bool):
diff --git a/contrib/python/protobuf/py3/google/protobuf/message_factory.py b/contrib/python/protobuf/py3/google/protobuf/message_factory.py
index 7dfaec88e1..3656fa6874 100644
--- a/contrib/python/protobuf/py3/google/protobuf/message_factory.py
+++ b/contrib/python/protobuf/py3/google/protobuf/message_factory.py
@@ -98,8 +98,6 @@ class MessageFactory(object):
A class describing the passed in descriptor.
"""
descriptor_name = descriptor.name
- if str is bytes: # PY2
- descriptor_name = descriptor.name.encode('ascii', 'ignore')
result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
descriptor_name,
(message.Message,),
diff --git a/contrib/python/protobuf/py3/google/protobuf/proto_api.h b/contrib/python/protobuf/py3/google/protobuf/proto_api.h
index c869bce058..2e2156a56e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/proto_api.h
+++ b/contrib/python/protobuf/py3/google/protobuf/proto_api.h
@@ -78,6 +78,18 @@ struct PyProto_API {
// With the current implementation, only empty messages are in this case.
virtual Message* GetMutableMessagePointer(PyObject* msg) const = 0;
+ // If the passed object is a Python Message Descriptor, returns its internal
+ // pointer.
+ // Otherwise, returns NULL with an exception set.
+ virtual const Descriptor* MessageDescriptor_AsDescriptor(
+ PyObject* desc) const = 0;
+
+ // If the passed object is a Python Enum Descriptor, returns its internal
+ // pointer.
+ // Otherwise, returns NULL with an exception set.
+ virtual const EnumDescriptor* EnumDescriptor_AsDescriptor(
+ PyObject* enum_desc) const = 0;
+
// Expose the underlying DescriptorPool and MessageFactory to enable C++ code
// to create Python-compatible message.
virtual const DescriptorPool* GetDefaultDescriptorPool() const = 0;
@@ -108,6 +120,15 @@ struct PyProto_API {
// python objects referencing the same C++ object.
virtual PyObject* NewMessageOwnedExternally(
Message* msg, PyObject* py_message_factory) const = 0;
+
+ // Returns a new reference for the given DescriptorPool.
+ // The returned object does not manage the C++ DescriptorPool: it is the
+ // responsibility of the caller to keep it alive.
+ // As long as the returned Python DescriptorPool object is kept alive,
+ // functions that process C++ descriptors or messages created from this pool
+ // can work and return their Python counterparts.
+ virtual PyObject* DescriptorPool_FromPool(
+ const google::protobuf::DescriptorPool* pool) const = 0;
};
inline const char* PyProtoAPICapsuleName() {
diff --git a/contrib/python/protobuf/py3/google/protobuf/proto_builder.py b/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
index 2b7dddcbd3..a4667ce63e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
+++ b/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
@@ -30,10 +30,7 @@
"""Dynamic Protobuf class creator."""
-try:
- from collections import OrderedDict
-except ImportError:
- from ordereddict import OrderedDict #PY26
+from collections import OrderedDict
import hashlib
import os
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
index de788afa2f..cf18a1cbb4 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
@@ -913,8 +913,9 @@ static int SetContainingType(PyBaseDescriptor *self, PyObject *value,
}
static PyObject* GetExtensionScope(PyBaseDescriptor *self, void *closure) {
+ const auto* desc = _GetDescriptor(self);
const Descriptor* extension_scope =
- _GetDescriptor(self)->extension_scope();
+ desc->is_extension() ? desc->extension_scope() : nullptr;
if (extension_scope) {
return PyMessageDescriptor_FromDescriptor(extension_scope);
} else {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
index a53411e797..741dbfad5b 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
@@ -111,6 +111,8 @@ static PyDescriptorPool* _CreateDescriptorPool() {
cpool->error_collector = nullptr;
cpool->underlay = NULL;
cpool->database = NULL;
+ cpool->is_owned = false;
+ cpool->is_mutable = false;
cpool->descriptor_options = new std::unordered_map<const void*, PyObject*>();
@@ -138,6 +140,8 @@ static PyDescriptorPool* PyDescriptorPool_NewWithUnderlay(
return NULL;
}
cpool->pool = new DescriptorPool(underlay);
+ cpool->is_owned = true;
+ cpool->is_mutable = true;
cpool->underlay = underlay;
if (!descriptor_pool_map->insert(
@@ -159,10 +163,13 @@ static PyDescriptorPool* PyDescriptorPool_NewWithDatabase(
if (database != NULL) {
cpool->error_collector = new BuildFileErrorCollector();
cpool->pool = new DescriptorPool(database, cpool->error_collector);
+ cpool->is_mutable = false;
cpool->database = database;
} else {
cpool->pool = new DescriptorPool();
+ cpool->is_mutable = true;
}
+ cpool->is_owned = true;
if (!descriptor_pool_map->insert(std::make_pair(cpool->pool, cpool)).second) {
// Should never happen -- would indicate an internal error / bug.
@@ -201,7 +208,9 @@ static void Dealloc(PyObject* pself) {
}
delete self->descriptor_options;
delete self->database;
- delete self->pool;
+ if (self->is_owned) {
+ delete self->pool;
+ }
delete self->error_collector;
Py_TYPE(self)->tp_free(pself);
}
@@ -582,6 +591,12 @@ static PyObject* AddSerializedFile(PyObject* pself, PyObject* serialized_pb) {
"Add your file to the underlying database.");
return NULL;
}
+ if (!self->is_mutable) {
+ PyErr_SetString(
+ PyExc_ValueError,
+ "This DescriptorPool is not mutable and cannot add new definitions.");
+ return nullptr;
+ }
if (PyBytes_AsStringAndSize(serialized_pb, &message_type, &message_len) < 0) {
return NULL;
@@ -606,8 +621,9 @@ static PyObject* AddSerializedFile(PyObject* pself, PyObject* serialized_pb) {
BuildFileErrorCollector error_collector;
const FileDescriptor* descriptor =
- self->pool->BuildFileCollectingErrors(file_proto,
- &error_collector);
+ // Pool is mutable, we can remove the "const".
+ const_cast<DescriptorPool*>(self->pool)
+ ->BuildFileCollectingErrors(file_proto, &error_collector);
if (descriptor == NULL) {
PyErr_Format(PyExc_TypeError,
"Couldn't build proto file into descriptor pool!\n%s",
@@ -615,6 +631,7 @@ static PyObject* AddSerializedFile(PyObject* pself, PyObject* serialized_pb) {
return NULL;
}
+
return PyFileDescriptor_FromDescriptorWithSerializedPb(
descriptor, serialized_pb);
}
@@ -768,6 +785,33 @@ PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool) {
return it->second;
}
+PyObject* PyDescriptorPool_FromPool(const DescriptorPool* pool) {
+ PyDescriptorPool* existing_pool = GetDescriptorPool_FromPool(pool);
+ if (existing_pool != nullptr) {
+ Py_INCREF(existing_pool);
+ return reinterpret_cast<PyObject*>(existing_pool);
+ } else {
+ PyErr_Clear();
+ }
+
+ PyDescriptorPool* cpool = cdescriptor_pool::_CreateDescriptorPool();
+ if (cpool == nullptr) {
+ return nullptr;
+ }
+ cpool->pool = const_cast<DescriptorPool*>(pool);
+ cpool->is_owned = false;
+ cpool->is_mutable = false;
+ cpool->underlay = nullptr;
+
+ if (!descriptor_pool_map->insert(std::make_pair(cpool->pool, cpool)).second) {
+ // Should never happen -- We already checked the existence above.
+ PyErr_SetString(PyExc_ValueError, "DescriptorPool already registered");
+ return nullptr;
+ }
+
+ return reinterpret_cast<PyObject*>(cpool);
+}
+
} // namespace python
} // namespace protobuf
} // namespace google
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
index 2d456f9088..48658d3e88 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
@@ -54,10 +54,18 @@ struct CMessageClass;
// "Methods" that interacts with this DescriptorPool are in the cdescriptor_pool
// namespace.
typedef struct PyDescriptorPool {
- PyObject_HEAD
+ PyObject_HEAD;
// The C++ pool containing Descriptors.
- DescriptorPool* pool;
+ const DescriptorPool* pool;
+
+ // True if we should free the pointer above.
+ bool is_owned;
+
+ // True if this pool accepts new proto definitions.
+ // In this case it is allowed to const_cast<DescriptorPool*>(pool).
+ bool is_mutable;
+
// The error collector to store error info. Can be NULL. This pointer is
// owned.
@@ -116,16 +124,20 @@ PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg);
} // namespace cdescriptor_pool
-// Retrieve the global descriptor pool owned by the _message module.
+// Retrieves the global descriptor pool owned by the _message module.
// This is the one used by pb2.py generated modules.
// Returns a *borrowed* reference.
// "Default" pool used to register messages from _pb2.py modules.
PyDescriptorPool* GetDefaultDescriptorPool();
-// Retrieve the python descriptor pool owning a C++ descriptor pool.
+// Retrieves an existing python descriptor pool owning the C++ descriptor pool.
// Returns a *borrowed* reference.
PyDescriptorPool* GetDescriptorPool_FromPool(const DescriptorPool* pool);
+// Wraps a C++ descriptor pool in a Python object, creates it if necessary.
+// Returns a new reference.
+PyObject* PyDescriptorPool_FromPool(const DescriptorPool* pool);
+
// Initialize objects used by this module.
bool InitDescriptorPool();
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
index 8b41ca47dd..a9e464d7f9 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
@@ -110,6 +110,10 @@ class MessageReflectionFriend {
const std::vector<const FieldDescriptor*>& fields) {
lhs->GetReflection()->UnsafeShallowSwapFields(lhs, rhs, fields);
}
+ static bool IsLazyField(const Reflection* reflection,
+ const FieldDescriptor* field) {
+ return reflection->IsLazyField(field);
+ }
};
static PyObject* kDESCRIPTOR;
@@ -478,6 +482,18 @@ static PyObject* GetAttr(CMessageClass* self, PyObject* name) {
} // namespace message_meta
+// Protobuf has a 64MB limit built in, this variable will override this. Please
+// do not enable this unless you fully understand the implications: protobufs
+// must all be kept in memory at the same time, so if they grow too big you may
+// get OOM errors. The protobuf APIs do not provide any tools for processing
+// protobufs in chunks. If you have protos this big you should break them up if
+// it is at all convenient to do so.
+#ifdef PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
+static bool allow_oversize_protos = true;
+#else
+static bool allow_oversize_protos = false;
+#endif
+
static PyTypeObject _CMessageClass_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0) FULL_MODULE_NAME
".MessageMeta", // tp_name
@@ -877,6 +893,7 @@ int FixupMessageAfterMerge(CMessage* self) {
if (!self->composite_fields) {
return 0;
}
+ PyMessageFactory* factory = GetFactoryForMessage(self);
for (const auto& item : *self->composite_fields) {
const FieldDescriptor* descriptor = item.first;
if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE &&
@@ -890,8 +907,8 @@ int FixupMessageAfterMerge(CMessage* self) {
if (reflection->HasField(*message, descriptor)) {
// Message used to be read_only, but is no longer. Get the new pointer
// and record it.
- Message* mutable_message =
- reflection->MutableMessage(message, descriptor, nullptr);
+ Message* mutable_message = reflection->MutableMessage(
+ message, descriptor, factory->message_factory);
cmsg->message = mutable_message;
cmsg->read_only = false;
if (FixupMessageAfterMerge(cmsg) < 0) {
@@ -1052,6 +1069,9 @@ int DeleteRepeatedField(
}
}
+ Arena* arena = Arena::InternalHelper<Message>::GetArenaForAllocation(message);
+ GOOGLE_DCHECK_EQ(arena, nullptr)
+ << "python protobuf is expected to be allocated from heap";
// Remove items, starting from the end.
for (; length > to; length--) {
if (field_descriptor->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
@@ -1060,7 +1080,18 @@ int DeleteRepeatedField(
}
// It seems that RemoveLast() is less efficient for sub-messages, and
// the memory is not completely released. Prefer ReleaseLast().
- Message* sub_message = reflection->ReleaseLast(message, field_descriptor);
+ //
+ // To work around a debug hardening (PROTOBUF_FORCE_COPY_IN_RELEASE),
+ // explicitly use UnsafeArenaReleaseLast. To not break rare use cases where
+ // arena is used, we fallback to ReleaseLast (but GOOGLE_DCHECK to find/fix it).
+ //
+ // Note that arena is likely null and GOOGLE_DCHECK and ReleaesLast might be
+ // redundant. The current approach takes extra cautious path not to disrupt
+ // production.
+ Message* sub_message =
+ (arena == nullptr)
+ ? reflection->UnsafeArenaReleaseLast(message, field_descriptor)
+ : reflection->ReleaseLast(message, field_descriptor);
// If there is a live weak reference to an item being removed, we "Release"
// it, and it takes ownership of the message.
if (CMessage* released = self->MaybeReleaseSubMessage(sub_message)) {
@@ -1914,18 +1945,6 @@ static PyObject* CopyFrom(CMessage* self, PyObject* arg) {
Py_RETURN_NONE;
}
-// Protobuf has a 64MB limit built in, this variable will override this. Please
-// do not enable this unless you fully understand the implications: protobufs
-// must all be kept in memory at the same time, so if they grow too big you may
-// get OOM errors. The protobuf APIs do not provide any tools for processing
-// protobufs in chunks. If you have protos this big you should break them up if
-// it is at all convenient to do so.
-#ifdef PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS
-static bool allow_oversize_protos = true;
-#else
-static bool allow_oversize_protos = false;
-#endif
-
// Provide a method in the module to set allow_oversize_protos to a boolean
// value. This method returns the newly value of allow_oversize_protos.
PyObject* SetAllowOversizeProtos(PyObject* m, PyObject* arg) {
@@ -2265,8 +2284,6 @@ CMessage* InternalGetSubMessage(
CMessage* self, const FieldDescriptor* field_descriptor) {
const Reflection* reflection = self->message->GetReflection();
PyMessageFactory* factory = GetFactoryForMessage(self);
- const Message& sub_message = reflection->GetMessage(
- *self->message, field_descriptor, factory->message_factory);
CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
factory, field_descriptor->message_type());
@@ -2284,7 +2301,20 @@ CMessage* InternalGetSubMessage(
Py_INCREF(self);
cmsg->parent = self;
cmsg->parent_field_descriptor = field_descriptor;
- cmsg->read_only = !reflection->HasField(*self->message, field_descriptor);
+ if (reflection->HasField(*self->message, field_descriptor)) {
+ // Force triggering MutableMessage to set the lazy message 'Dirty'
+ if (MessageReflectionFriend::IsLazyField(reflection, field_descriptor)) {
+ Message* sub_message = reflection->MutableMessage(
+ self->message, field_descriptor, factory->message_factory);
+ cmsg->read_only = false;
+ cmsg->message = sub_message;
+ return cmsg;
+ }
+ } else {
+ cmsg->read_only = true;
+ }
+ const Message& sub_message = reflection->GetMessage(
+ *self->message, field_descriptor, factory->message_factory);
cmsg->message = const_cast<Message*>(&sub_message);
return cmsg;
}
@@ -2867,20 +2897,36 @@ Message* PyMessage_GetMutableMessagePointer(PyObject* msg) {
return cmsg->message;
}
-PyObject* PyMessage_New(const Descriptor* descriptor,
- PyObject* py_message_factory) {
+// Returns a new reference to the MessageClass to use for message creation.
+// - if a PyMessageFactory is passed, use it.
+// - Otherwise, if a PyDescriptorPool was created, use its factory.
+static CMessageClass* GetMessageClassFromDescriptor(
+ const Descriptor* descriptor, PyObject* py_message_factory) {
PyMessageFactory* factory = nullptr;
if (py_message_factory == nullptr) {
- factory = GetDescriptorPool_FromPool(descriptor->file()->pool())
- ->py_message_factory;
+ PyDescriptorPool* pool =
+ GetDescriptorPool_FromPool(descriptor->file()->pool());
+ if (pool == nullptr) {
+ PyErr_SetString(PyExc_TypeError,
+ "Unknown descriptor pool; C++ users should call "
+ "DescriptorPool_FromPool and keep it alive");
+ return nullptr;
+ }
+ factory = pool->py_message_factory;
} else if (PyObject_TypeCheck(py_message_factory, &PyMessageFactory_Type)) {
factory = reinterpret_cast<PyMessageFactory*>(py_message_factory);
} else {
PyErr_SetString(PyExc_TypeError, "Expected a MessageFactory");
return nullptr;
}
- auto* message_class =
- message_factory::GetOrCreateMessageClass(factory, descriptor);
+
+ return message_factory::GetOrCreateMessageClass(factory, descriptor);
+}
+
+PyObject* PyMessage_New(const Descriptor* descriptor,
+ PyObject* py_message_factory) {
+ CMessageClass* message_class =
+ GetMessageClassFromDescriptor(descriptor, py_message_factory);
if (message_class == nullptr) {
return nullptr;
}
@@ -2895,20 +2941,8 @@ PyObject* PyMessage_New(const Descriptor* descriptor,
PyObject* PyMessage_NewMessageOwnedExternally(Message* message,
PyObject* py_message_factory) {
- if (py_message_factory) {
- PyErr_SetString(PyExc_NotImplementedError,
- "Default message_factory=NULL is the only supported value");
- return nullptr;
- }
- if (message->GetReflection()->GetMessageFactory() !=
- MessageFactory::generated_factory()) {
- PyErr_SetString(PyExc_TypeError,
- "Message pointer was not created from the default factory");
- return nullptr;
- }
-
- CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
- GetDefaultDescriptorPool()->py_message_factory, message->GetDescriptor());
+ CMessageClass* message_class = GetMessageClassFromDescriptor(
+ message->GetDescriptor(), py_message_factory);
if (message_class == nullptr) {
return nullptr;
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
index b5975f76c5..4125dd73a1 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
@@ -31,6 +31,7 @@
#include <Python.h>
#include <google/protobuf/message_lite.h>
+#include <google/protobuf/pyext/descriptor.h>
#include <google/protobuf/pyext/descriptor_pool.h>
#include <google/protobuf/pyext/message.h>
#include <google/protobuf/pyext/message_factory.h>
@@ -46,6 +47,15 @@ struct ApiImplementation : google::protobuf::python::PyProto_API {
google::protobuf::Message* GetMutableMessagePointer(PyObject* msg) const override {
return google::protobuf::python::PyMessage_GetMutableMessagePointer(msg);
}
+ const google::protobuf::Descriptor* MessageDescriptor_AsDescriptor(
+ PyObject* desc) const override {
+ return google::protobuf::python::PyMessageDescriptor_AsDescriptor(desc);
+ }
+ const google::protobuf::EnumDescriptor* EnumDescriptor_AsDescriptor(
+ PyObject* enum_desc) const override {
+ return google::protobuf::python::PyEnumDescriptor_AsDescriptor(enum_desc);
+ }
+
const google::protobuf::DescriptorPool* GetDefaultDescriptorPool() const override {
return google::protobuf::python::GetDefaultDescriptorPool()->pool;
}
@@ -63,6 +73,10 @@ struct ApiImplementation : google::protobuf::python::PyProto_API {
return google::protobuf::python::PyMessage_NewMessageOwnedExternally(
msg, py_message_factory);
}
+ PyObject* DescriptorPool_FromPool(
+ const google::protobuf::DescriptorPool* pool) const override {
+ return google::protobuf::python::PyDescriptorPool_FromPool(pool);
+ }
};
} // namespace
diff --git a/contrib/python/protobuf/py3/google/protobuf/text_encoding.py b/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
index 39898765f2..759cf11f62 100644
--- a/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
+++ b/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
@@ -31,8 +31,6 @@
"""Encoding related utilities."""
import re
-import six
-
_cescape_chr_to_symbol_map = {}
_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
@@ -72,14 +70,11 @@ def CEscape(text, as_utf8):
# escapes whereas our C++ unescaping function allows hex escapes to be any
# length. So, "\0011".encode('string_escape') ends up being "\\x011", which
# will be decoded in C++ as a single-character string with char code 0x11.
- if six.PY3:
- text_is_unicode = isinstance(text, str)
- if as_utf8 and text_is_unicode:
- # We're already unicode, no processing beyond control char escapes.
- return text.translate(_cescape_chr_to_symbol_map)
- ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
- else:
- ord_ = ord # PY2
+ text_is_unicode = isinstance(text, str)
+ if as_utf8 and text_is_unicode:
+ # We're already unicode, no processing beyond control char escapes.
+ return text.translate(_cescape_chr_to_symbol_map)
+ ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
if as_utf8:
return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
@@ -109,9 +104,7 @@ def CUnescape(text):
# allow single-digit hex escapes (like '\xf').
result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
- if six.PY2:
- return result.decode('string_escape')
- return (result.encode('utf-8') # PY3: Make it bytes to allow decode.
+ return (result.encode('utf-8') # Make it bytes to allow decode.
.decode('unicode_escape')
# Make it bytes again to return the proper type.
.encode('raw_unicode_escape'))
diff --git a/contrib/python/protobuf/py3/google/protobuf/text_format.py b/contrib/python/protobuf/py3/google/protobuf/text_format.py
index 9c4ca90ee6..412385c26f 100644
--- a/contrib/python/protobuf/py3/google/protobuf/text_format.py
+++ b/contrib/python/protobuf/py3/google/protobuf/text_format.py
@@ -48,16 +48,12 @@ import encodings.unicode_escape # pylint: disable=unused-import
import io
import math
import re
-import six
from google.protobuf.internal import decoder
from google.protobuf.internal import type_checkers
from google.protobuf import descriptor
from google.protobuf import text_encoding
-if six.PY3:
- long = int # pylint: disable=redefined-builtin,invalid-name
-
# pylint: disable=g-import-not-at-top
__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField',
'PrintFieldValue', 'Merge', 'MessageToBytes']
@@ -102,15 +98,9 @@ class ParseError(Error):
class TextWriter(object):
def __init__(self, as_utf8):
- if six.PY2:
- self._writer = io.BytesIO()
- else:
- self._writer = io.StringIO()
+ self._writer = io.StringIO()
def write(self, val):
- if six.PY2:
- if isinstance(val, six.text_type):
- val = val.encode('utf-8')
return self._writer.write(val)
def close(self):
@@ -562,7 +552,7 @@ class _Printer(object):
# Note: this is called only when value has at least one element.
self._PrintFieldName(field)
self.out.write(' [')
- for i in six.moves.range(len(value) - 1):
+ for i in range(len(value) - 1):
self.PrintFieldValue(field, value[i])
self.out.write(', ')
self.PrintFieldValue(field, value[-1])
@@ -608,7 +598,7 @@ class _Printer(object):
out.write(str(value))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
out.write('\"')
- if isinstance(value, six.text_type) and (six.PY2 or not self.as_utf8):
+ if isinstance(value, str) and not self.as_utf8:
out_value = value.encode('utf-8')
else:
out_value = value
@@ -839,12 +829,9 @@ class _Parser(object):
ParseError: On text parsing problems.
"""
# Tokenize expects native str lines.
- if six.PY2:
- str_lines = (line if isinstance(line, str) else line.encode('utf-8')
- for line in lines)
- else:
- str_lines = (line if isinstance(line, str) else line.decode('utf-8')
- for line in lines)
+ str_lines = (
+ line if isinstance(line, str) else line.decode('utf-8')
+ for line in lines)
tokenizer = Tokenizer(str_lines)
while not tokenizer.AtEnd():
self._MergeField(tokenizer, message)
@@ -1395,17 +1382,14 @@ class Tokenizer(object):
def TryConsumeInteger(self):
try:
- # Note: is_long only affects value type, not whether an error is raised.
self.ConsumeInteger()
return True
except ParseError:
return False
- def ConsumeInteger(self, is_long=False):
+ def ConsumeInteger(self):
"""Consumes an integer number.
- Args:
- is_long: True if the value should be returned as a long integer.
Returns:
The integer parsed.
@@ -1413,7 +1397,7 @@ class Tokenizer(object):
ParseError: If an integer couldn't be consumed.
"""
try:
- result = _ParseAbstractInteger(self.token, is_long=is_long)
+ result = _ParseAbstractInteger(self.token)
except ValueError as e:
raise self.ParseError(str(e))
self.NextToken()
@@ -1476,7 +1460,7 @@ class Tokenizer(object):
"""
the_bytes = self.ConsumeByteString()
try:
- return six.text_type(the_bytes, 'utf-8')
+ return str(the_bytes, 'utf-8')
except UnicodeDecodeError as e:
raise self._StringParseError(e)
@@ -1650,14 +1634,6 @@ def _ConsumeUint64(tokenizer):
return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
-def _TryConsumeInteger(tokenizer, is_signed=False, is_long=False):
- try:
- _ConsumeInteger(tokenizer, is_signed=is_signed, is_long=is_long)
- return True
- except ParseError:
- return False
-
-
def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
"""Consumes an integer number from tokenizer.
@@ -1695,7 +1671,7 @@ def ParseInteger(text, is_signed=False, is_long=False):
ValueError: Thrown Iff the text is not a valid integer.
"""
# Do the actual parsing. Exception handling is propagated to caller.
- result = _ParseAbstractInteger(text, is_long=is_long)
+ result = _ParseAbstractInteger(text)
# Check if the integer is sane. Exceptions handled by callers.
checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
@@ -1703,12 +1679,11 @@ def ParseInteger(text, is_signed=False, is_long=False):
return result
-def _ParseAbstractInteger(text, is_long=False):
+def _ParseAbstractInteger(text):
"""Parses an integer without checking size/signedness.
Args:
text: The text to parse.
- is_long: True if the value should be returned as a long integer.
Returns:
The integer value.
@@ -1724,13 +1699,7 @@ def _ParseAbstractInteger(text, is_long=False):
# we always use the '0o' prefix for multi-digit numbers starting with 0.
text = c_octal_match.group(1) + '0o' + c_octal_match.group(2)
try:
- # We force 32-bit values to int and 64-bit values to long to make
- # alternate implementations where the distinction is more significant
- # (e.g. the C++ implementation) simpler.
- if is_long:
- return long(text, 0)
- else:
- return int(text, 0)
+ return int(text, 0)
except ValueError:
raise ValueError('Couldn\'t parse integer: %s' % orig_text)