aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/protobuf/py3
diff options
context:
space:
mode:
authornechda <nechda@yandex-team.com>2024-08-29 23:50:27 +0300
committernechda <nechda@yandex-team.com>2024-08-30 00:05:25 +0300
commite10d6638f07a82edae3ea8197b9f5c0affcc07ea (patch)
tree571c38cec05813766a1ad290c9d51ce7ace52919 /contrib/python/protobuf/py3
parente79b38f2bbbf78d295d1901d2a79f898022d5224 (diff)
downloadydb-e10d6638f07a82edae3ea8197b9f5c0affcc07ea.tar.gz
Update cpp-protobuf to 22.5
Привет!\ Этот PR переключат cpp & python библиотеки protobuf на версию 22.5 Если у вас возникли проблемы после влития этого PR: 1. Если начали падать канон тесты, то проведите их переканонизацию 2. Прочитайте <https://wiki.yandex-team.ru/users/nechda/obnovlenie-cpp-protobuf-22.5/> страничку с основными изменениями 3. Если страничка в вики не помогла, то пишите в [DEVTOOLSSUPPORT](https://st.yandex-team.ru/DEVTOOLSSUPPORT) 7fecade616c20a841b9e9af7b7998bdfc8d2807d
Diffstat (limited to 'contrib/python/protobuf/py3')
-rw-r--r--contrib/python/protobuf/py3/.dist-info/METADATA21
-rw-r--r--contrib/python/protobuf/py3/.dist-info/top_level.txt1
-rw-r--r--contrib/python/protobuf/py3/CODE_OF_CONDUCT.md3
-rw-r--r--contrib/python/protobuf/py3/CONTRIBUTING.md120
-rw-r--r--contrib/python/protobuf/py3/CONTRIBUTORS.txt107
-rw-r--r--contrib/python/protobuf/py3/README.md160
-rw-r--r--contrib/python/protobuf/py3/SECURITY.md4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/__init__.py2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/descriptor.py72
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/descriptor_pool.py35
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/__init__.py30
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py443
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.cc78
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py7
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/decoder.py3
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/extension_dict.py5
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/field_mask.py333
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/python_message.py75
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/python_protobuf.cc63
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/testing_refleaks.py142
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py10
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py304
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/internal/wire_format.py2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/json_format.py73
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/message.py15
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/message_factory.py184
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/proto_api.h7
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/proto_builder.py20
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/__init__.py4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc39
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.h5
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_containers.cc53
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.cc26
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.h5
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc75
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.cc35
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.h2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/field.cc8
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/map_container.cc110
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/map_container.h6
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message.cc172
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message.h58
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.cc11
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.h4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc12
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.cc23
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.h2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.cc52
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.h4
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/safe_numerics.h5
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/scoped_pyobject_ptr.h6
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.cc12
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.h2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.cc15
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.h2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/python_protobuf.h58
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/reflection.py2
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/symbol_database.py28
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/text_encoding.py6
-rw-r--r--contrib/python/protobuf/py3/google/protobuf/text_format.py254
-rw-r--r--contrib/python/protobuf/py3/ya.make77
-rw-r--r--contrib/python/protobuf/py3/ya.make.inc57
63 files changed, 2339 insertions, 1212 deletions
diff --git a/contrib/python/protobuf/py3/.dist-info/METADATA b/contrib/python/protobuf/py3/.dist-info/METADATA
deleted file mode 100644
index 11099f6d54..0000000000
--- a/contrib/python/protobuf/py3/.dist-info/METADATA
+++ /dev/null
@@ -1,21 +0,0 @@
-Metadata-Version: 2.1
-Name: protobuf
-Version: 4.21.7
-Summary: Protocol Buffers
-Home-page: https://developers.google.com/protocol-buffers/
-Download-URL: https://github.com/protocolbuffers/protobuf/releases
-Maintainer: protobuf@googlegroups.com
-Maintainer-email: protobuf@googlegroups.com
-License: BSD-3-Clause
-Platform: UNKNOWN
-Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Requires-Python: >=3.7
-License-File: LICENSE
-
-Protocol Buffers are Google's data interchange format
-
diff --git a/contrib/python/protobuf/py3/.dist-info/top_level.txt b/contrib/python/protobuf/py3/.dist-info/top_level.txt
deleted file mode 100644
index cb429113e0..0000000000
--- a/contrib/python/protobuf/py3/.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-google
diff --git a/contrib/python/protobuf/py3/CODE_OF_CONDUCT.md b/contrib/python/protobuf/py3/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000000..ba19063083
--- /dev/null
+++ b/contrib/python/protobuf/py3/CODE_OF_CONDUCT.md
@@ -0,0 +1,3 @@
+This project is governed by
+[Protobuf's Code of Conduct](https://github.com/protocolbuffers/.github/blob/main/profile/CODE_OF_CONDUCT.md).
+
diff --git a/contrib/python/protobuf/py3/CONTRIBUTING.md b/contrib/python/protobuf/py3/CONTRIBUTING.md
new file mode 100644
index 0000000000..a16d63c65f
--- /dev/null
+++ b/contrib/python/protobuf/py3/CONTRIBUTING.md
@@ -0,0 +1,120 @@
+# Contributing to Protocol Buffers
+
+We welcome some types of contributions to protocol buffers. This doc describes the
+process to contribute patches to protobuf and the general guidelines we
+expect contributors to follow.
+
+## What We Accept
+
+* Bug fixes with unit tests demonstrating the problem are very welcome.
+ We also appreciate bug reports, even when they don't come with a patch.
+ Bug fixes without tests are usually not accepted.
+* New APIs and features with adequate test coverage and documentation
+ may be accepted if they do not compromise backwards
+ compatibility. However there's a fairly high bar of usefulness a new public
+ method must clear before it will be accepted. Features that are fine in
+ isolation are often rejected because they don't have enough impact to justify the
+ conceptual burden and ongoing maintenance cost. It's best to file an issue
+ and get agreement from maintainers on the value of a new feature before
+ working on a PR.
+* Performance optimizations may be accepted if they have convincing benchmarks that demonstrate
+ an improvement and they do not significantly increase complexity.
+* Changes to existing APIs are almost never accepted. Stability and
+ backwards compatibility are paramount. In the unlikely event a breaking change
+ is required, it must usually be implemented in google3 first.
+* Changes to the wire and text formats are never accepted. Any breaking change
+ to these formats would have to be implemented as a completely new format.
+ We cannot begin generating protos that cannot be parsed by existing code.
+
+## Before You Start
+
+We accept patches in the form of github pull requests. If you are new to
+github, please read [How to create github pull requests](https://help.github.com/articles/about-pull-requests/)
+first.
+
+### Contributor License Agreements
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution,
+this simply gives us permission to use and redistribute your contributions
+as part of the project.
+
+* If you are an individual writing original source code and you're sure you
+ own the intellectual property, then you'll need to sign an [individual CLA](https://cla.developers.google.com/about/google-individual?csw=1).
+* If you work for a company that wants to allow you to contribute your work,
+ then you'll need to sign a [corporate CLA](https://cla.developers.google.com/about/google-corporate?csw=1).
+
+### Coding Style
+
+This project follows [Google’s Coding Style Guides](https://github.com/google/styleguide).
+Before sending out your pull request, please familiarize yourself with the
+corresponding style guides and make sure the proposed code change is style
+conforming.
+
+## Contributing Process
+
+Most pull requests should go to the main branch and the change will be
+included in the next major/minor version release (e.g., 3.6.0 release). If you
+need to include a bug fix in a patch release (e.g., 3.5.2), make sure it’s
+already merged to main, and then create a pull request cherry-picking the
+commits from main branch to the release branch (e.g., branch 3.5.x).
+
+For each pull request, a protobuf team member will be assigned to review the
+pull request. For minor cleanups, the pull request may be merged right away
+after an initial review. For larger changes, you will likely receive multiple
+rounds of comments and it may take some time to complete. We will try to keep
+our response time within 7-days but if you don’t get any response in a few
+days, feel free to comment on the threads to get our attention. We also expect
+you to respond to our comments within a reasonable amount of time. If we don’t
+hear from you for 2 weeks or longer, we may close the pull request. You can
+still send the pull request again once you have time to work on it.
+
+Once a pull request is merged, we will take care of the rest and get it into
+the final release.
+
+## Pull Request Guidelines
+
+* If you are a Googler, it is preferable to first create an internal CL and
+ have it reviewed and submitted. The code propagation process will deliver the
+ change to GitHub.
+* Create small PRs that are narrowly focused on addressing a single concern.
+ We often receive PRs that are trying to fix several things at a time, but if
+ only one fix is considered acceptable, nothing gets merged and both author's
+ & reviewer's time is wasted. Create more PRs to address different concerns and
+ everyone will be happy.
+* For speculative changes, consider opening an issue and discussing it first.
+ If you are suggesting a behavioral or API change, make sure you get explicit
+ support from a protobuf team member before sending us the pull request.
+* Provide a good PR description as a record of what change is being made and
+ why it was made. Link to a GitHub issue if it exists.
+* Don't fix code style and formatting unless you are already changing that
+ line to address an issue. PRs with irrelevant changes won't be merged. If
+ you do want to fix formatting or style, do that in a separate PR.
+* Unless your PR is trivial, you should expect there will be reviewer comments
+ that you'll need to address before merging. We expect you to be reasonably
+ responsive to those comments, otherwise the PR will be closed after 2-3 weeks
+ of inactivity.
+* Maintain clean commit history and use meaningful commit messages. PRs with
+ messy commit history are difficult to review and won't be merged. Use rebase
+ -i upstream/main to curate your commit history and/or to bring in latest
+ changes from main (but avoid rebasing in the middle of a code review).
+* Keep your PR up to date with upstream/main (if there are merge conflicts,
+ we can't really merge your change).
+* All tests need to be passing before your change can be merged. We recommend
+ you run tests locally before creating your PR to catch breakages early on.
+ Ultimately, the green signal will be provided by our testing infrastructure.
+ The reviewer will help you if there are test failures that seem not related
+ to the change you are making.
+
+## Reviewer Guidelines
+
+* Make sure that all tests are passing before approval.
+* Apply the "release notes: yes" label if the pull request's description should
+ be included in the next release (e.g., any new feature / bug fix).
+ Apply the "release notes: no" label if the pull request's description should
+ not be included in the next release (e.g., refactoring changes that does not
+ change behavior, integration from Google internal, updating tests, etc.).
+* Apply the appropriate language label (e.g., C++, Java, Python, etc.) to the
+ pull request. This will make it easier to identify which languages the pull
+ request affects, allowing us to better identify appropriate reviewer, create
+ a better release note, and make it easier to identify issues in the future.
diff --git a/contrib/python/protobuf/py3/CONTRIBUTORS.txt b/contrib/python/protobuf/py3/CONTRIBUTORS.txt
new file mode 100644
index 0000000000..c2da98f2c6
--- /dev/null
+++ b/contrib/python/protobuf/py3/CONTRIBUTORS.txt
@@ -0,0 +1,107 @@
+This file contains a list of people who have made large contributions
+to the public version of Protocol Buffers.
+
+Original Protocol Buffers design and implementation:
+ Sanjay Ghemawat <sanjay@google.com>
+ Jeff Dean <jeff@google.com>
+ Daniel Dulitz <daniel@google.com>
+ Craig Silverstein
+ Paul Haahr <haahr@google.com>
+ Corey Anderson <corin@google.com>
+ (and many others)
+
+Proto2 C++ and Java primary author:
+ Kenton Varda <kenton@google.com>
+
+Proto2 Python primary authors:
+ Will Robinson <robinson@google.com>
+ Petar Petrov <petar@google.com>
+
+Java Nano primary authors:
+ Brian Duff <bduff@google.com>
+ Tom Chao <chaot@google.com>
+ Max Cai <maxtroy@google.com>
+ Ulas Kirazci <ulas@google.com>
+
+Large code contributions:
+ Jason Hsueh <jasonh@google.com>
+ Joseph Schorr <jschorr@google.com>
+ Wenbo Zhu <wenboz@google.com>
+
+Large quantity of code reviews:
+ Scott Bruce <sbruce@google.com>
+ Frank Yellin
+ Neal Norwitz <nnorwitz@google.com>
+ Jeffrey Yasskin <jyasskin@google.com>
+ Ambrose Feinstein <ambrose@google.com>
+
+Documentation:
+ Lisa Carey <lcarey@google.com>
+
+Maven packaging:
+ Gregory Kick <gak@google.com>
+
+Patch contributors:
+ Kevin Ko <kevin.s.ko@gmail.com>
+ * Small patch to handle trailing slashes in --proto_path flag.
+ Johan Euphrosine <proppy@aminche.com>
+ * Small patch to fix Python CallMethod().
+ Ulrich Kunitz <kune@deine-taler.de>
+ * Small optimizations to Python serialization.
+ Leandro Lucarella <llucax@gmail.com>
+ * VI syntax highlighting tweaks.
+ * Fix compiler to not make output executable.
+ Dilip Joseph <dilip.antony.joseph@gmail.com>
+ * Heuristic detection of sub-messages when printing unknown fields in
+ text format.
+ Brian Atkinson <nairb774@gmail.com>
+ * Added @Override annotation to generated Java code where appropriate.
+ Vincent Choinière <Choiniere.Vincent@hydro.qc.ca>
+ * Tru64 support.
+ Monty Taylor <monty.taylor@gmail.com>
+ * Solaris 10 + Sun Studio fixes.
+ Alek Storm <alek.storm@gmail.com>
+ * Slicing support for repeated scalar fields for the Python API.
+ Oleg Smolsky <oleg.smolsky@gmail.com>
+ * MS Visual Studio error format option.
+ * Detect unordered_map in stl_hash.m4.
+ Brian Olson <brianolson@google.com>
+ * gzip/zlib I/O support.
+ Michael Poole <mdpoole@troilus.org>
+ * Fixed warnings about generated constructors not explicitly initializing
+ all fields (only present with certain compiler settings).
+ * Added generation of field number constants.
+ Wink Saville <wink@google.com>
+ * Fixed initialization ordering problem in logging code.
+ Will Pierce <willp@nuclei.com>
+ * Small patch improving performance of in Python serialization.
+ Alexandre Vassalotti <alexandre@peadrop.com>
+ * Emacs mode for Protocol Buffers (editors/protobuf-mode.el).
+ Scott Stafford <scott.stafford@gmail.com>
+ * Added Swap(), SwapElements(), and RemoveLast() to Reflection interface.
+ Alexander Melnikov <alm@sibmail.ru>
+ * HPUX support.
+ Oliver Jowett <oliver.jowett@gmail.com>
+ * Detect whether zlib is new enough in configure script.
+ * Fixes for Solaris 10 32/64-bit confusion.
+ Evan Jones <evanj@mit.edu>
+ * Optimize Java serialization code when writing a small message to a stream.
+ * Optimize Java serialization of strings so that UTF-8 encoding happens only
+ once per string per serialization call.
+ * Clean up some Java warnings.
+ * Fix bug with permanent callbacks that delete themselves when run.
+ Michael Kucharski <m.kucharski@gmail.com>
+ * Added CodedInputStream.getTotalBytesRead().
+ Kacper Kowalik <xarthisius.kk@gmail.com>
+ * Fixed m4/acx_pthread.m4 problem for some Linux distributions.
+ William Orr <will@worrbase.com>
+ * Fixed detection of sched_yield on Solaris.
+ * Added atomicops for Solaris
+ Andrew Paprocki <andrew@ishiboo.com>
+ * Fixed minor IBM xlC compiler build issues
+ * Added atomicops for AIX (POWER)
+ Nipunn Koorapati <nipunn1313@gmail.com>
+ * Provide a type alias field ValueType on EnumTypeWrapper
+ * Match service argument names to abstract interface
+
+
diff --git a/contrib/python/protobuf/py3/README.md b/contrib/python/protobuf/py3/README.md
index 27f22c82c0..3bfeb0f169 100644
--- a/contrib/python/protobuf/py3/README.md
+++ b/contrib/python/protobuf/py3/README.md
@@ -3,128 +3,80 @@ Protocol Buffers - Google's data interchange format
Copyright 2008 Google Inc.
-This directory contains the Python Protocol Buffers runtime library.
+[Protocol Buffers documentation](https://developers.google.com/protocol-buffers/)
-Normally, this directory comes as part of the protobuf package, available
-from:
+Overview
+--------
- https://developers.google.com/protocol-buffers/
+Protocol Buffers (a.k.a., protobuf) are Google's language-neutral,
+platform-neutral, extensible mechanism for serializing structured data. You
+can find [protobuf's documentation on the Google Developers site](https://developers.google.com/protocol-buffers/).
-The complete package includes the C++ source code, which includes the
-Protocol Compiler (protoc). If you downloaded this package from PyPI
-or some other Python-specific source, you may have received only the
-Python part of the code. In this case, you will need to obtain the
-Protocol Compiler from some other source before you can use this
-package.
+This README file contains protobuf installation instructions. To install
+protobuf, you need to install the protocol compiler (used to compile .proto
+files) and the protobuf runtime for your chosen programming language.
-Development Warning
-===================
+Protocol Compiler Installation
+------------------------------
-The pure python performance is slow. For better performance please
-use python c++ implementation.
+The protocol compiler is written in C++. If you are using C++, please follow
+the [C++ Installation Instructions](src/README.md) to install protoc along
+with the C++ runtime.
-Installation
-============
+For non-C++ users, the simplest way to install the protocol compiler is to
+download a pre-built binary from our [GitHub release page](https://github.com/protocolbuffers/protobuf/releases).
-1) Make sure you have Python 3.7 or newer. If in doubt, run:
+In the downloads section of each release, you can find pre-built binaries in
+zip packages: `protoc-$VERSION-$PLATFORM.zip`. It contains the protoc binary
+as well as a set of standard `.proto` files distributed along with protobuf.
- $ python -V
+If you are looking for an old version that is not available in the release
+page, check out the [Maven repository](https://repo1.maven.org/maven2/com/google/protobuf/protoc/).
-2) If you do not have setuptools installed, note that it will be
- downloaded and installed automatically as soon as you run `setup.py`.
- If you would rather install it manually, you may do so by following
- the instructions on [this page](https://packaging.python.org/en/latest/installing.html#setup-for-installing-packages).
+These pre-built binaries are only provided for released versions. If you want
+to use the github main version at HEAD, or you need to modify protobuf code,
+or you are using C++, it's recommended to build your own protoc binary from
+source.
-3) Build the C++ code, or install a binary distribution of `protoc`. If
- you install a binary distribution, make sure that it is the same
- version as this package. If in doubt, run:
+If you would like to build protoc binary from source, see the [C++ Installation Instructions](src/README.md).
- $ protoc --version
+Protobuf Runtime Installation
+-----------------------------
-4) Build and run the tests:
+Protobuf supports several different programming languages. For each programming
+language, you can find instructions in the corresponding source directory about
+how to install protobuf runtime for that specific language:
- $ python setup.py build
- $ python setup.py test
+| Language | Source |
+|--------------------------------------|-------------------------------------------------------------|
+| C++ (include C++ runtime and protoc) | [src](src) |
+| Java | [java](java) |
+| Python | [python](python) |
+| Objective-C | [objectivec](objectivec) |
+| C# | [csharp](csharp) |
+| Ruby | [ruby](ruby) |
+| Go | [protocolbuffers/protobuf-go](https://github.com/protocolbuffers/protobuf-go)|
+| PHP | [php](php) |
+| Dart | [dart-lang/protobuf](https://github.com/dart-lang/protobuf) |
+| Javascript | [protocolbuffers/protobuf-javascript](https://github.com/protocolbuffers/protobuf-javascript)|
- To build, test, and use the C++ implementation, you must first compile
- `libprotobuf.so`:
+Quick Start
+-----------
- $ (cd .. && make)
+The best way to learn how to use protobuf is to follow the [tutorials in our
+developer guide](https://developers.google.com/protocol-buffers/docs/tutorials).
- On OS X:
+If you want to learn from code examples, take a look at the examples in the
+[examples](examples) directory.
- If you are running a Homebrew-provided Python, you must make sure another
- version of protobuf is not already installed, as Homebrew's Python will
- search `/usr/local/lib` for `libprotobuf.so` before it searches
- `../src/.libs`.
+Documentation
+-------------
- You can either unlink Homebrew's protobuf or install the `libprotobuf` you
- built earlier:
+The complete documentation is available via the [Protocol Buffers documentation](https://developers.google.com/protocol-buffers/).
- $ brew unlink protobuf
+Developer Community
+-------------------
- or
+To be alerted to upcoming changes in Protocol Buffers and connect with protobuf developers and users,
+[join the Google Group](https://groups.google.com/g/protobuf).
- $ (cd .. && make install)
-
- On other *nix:
-
- You must make `libprotobuf.so` dynamically available. You can either
- install libprotobuf you built earlier, or set `LD_LIBRARY_PATH`:
-
- $ export LD_LIBRARY_PATH=../src/.libs
-
- or
-
- $ (cd .. && make install)
-
- To build the C++ implementation run:
-
- $ python setup.py build --cpp_implementation
-
- Then run the tests like so:
-
- $ python setup.py test --cpp_implementation
-
- If some tests fail, this library may not work correctly on your
- system. Continue at your own risk.
-
- Please note that there is a known problem with some versions of
- Python on Cygwin which causes the tests to fail after printing the
- error: `sem_init: Resource temporarily unavailable`. This appears
- to be a [bug either in Cygwin or in
- Python](http://www.cygwin.com/ml/cygwin/2005-07/msg01378.html).
-
- We do not know if or when it might be fixed. We also do not know
- how likely it is that this bug will affect users in practice.
-
-5) Install:
-
- $ python setup.py install
-
- or:
-
- $ (cd .. && make install)
- $ python setup.py install --cpp_implementation
-
- This step may require superuser privileges.
- NOTE: To use C++ implementation, you need to export an environment
- variable before running your program. See the "C++ Implementation"
- section below for more details.
-
-Usage
-=====
-
-The complete documentation for Protocol Buffers is available via the
-web at:
-
- https://developers.google.com/protocol-buffers/
-
-C++ Implementation
-==================
-
-The C++ implementation for Python messages is built as a Python extension to
-improve the overall protobuf Python performance.
-
-To use the C++ implementation, you need to install the C++ protobuf runtime
-library, please see instructions in the parent directory.
diff --git a/contrib/python/protobuf/py3/SECURITY.md b/contrib/python/protobuf/py3/SECURITY.md
new file mode 100644
index 0000000000..76a40ee066
--- /dev/null
+++ b/contrib/python/protobuf/py3/SECURITY.md
@@ -0,0 +1,4 @@
+To report security concerns or vulnerabilities within protobuf, please use
+Google's official channel for reporting these.
+
+https://www.google.com/appserve/security-bugs/m2/new
diff --git a/contrib/python/protobuf/py3/google/protobuf/__init__.py b/contrib/python/protobuf/py3/google/protobuf/__init__.py
index 70d564a90a..43ce1a25eb 100644
--- a/contrib/python/protobuf/py3/google/protobuf/__init__.py
+++ b/contrib/python/protobuf/py3/google/protobuf/__init__.py
@@ -30,4 +30,4 @@
# Copyright 2007 Google Inc. All Rights Reserved.
-__version__ = '4.21.7'
+__version__ = '4.22.5'
diff --git a/contrib/python/protobuf/py3/google/protobuf/descriptor.py b/contrib/python/protobuf/py3/google/protobuf/descriptor.py
index f5a0caa6bd..fcb87cab55 100644
--- a/contrib/python/protobuf/py3/google/protobuf/descriptor.py
+++ b/contrib/python/protobuf/py3/google/protobuf/descriptor.py
@@ -66,6 +66,7 @@ if _USE_C_DESCRIPTORS:
# and make it return True when the descriptor is an instance of the extension
# type written in C++.
class DescriptorMetaclass(type):
+
def __instancecheck__(cls, obj):
if super(DescriptorMetaclass, cls).__instancecheck__(obj):
return True
@@ -633,13 +634,29 @@ class FieldDescriptor(DescriptorBase):
if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or
self.containing_oneof):
return True
- if hasattr(self.file, 'syntax'):
- return self.file.syntax == 'proto2'
- if hasattr(self.message_type, 'syntax'):
- return self.message_type.syntax == 'proto2'
- raise RuntimeError(
- 'has_presence is not ready to use because field %s is not'
- ' linked with message type nor file' % self.full_name)
+ # self.containing_type is used here instead of self.file for legacy
+ # compatibility. FieldDescriptor.file was added in cl/153110619
+ # Some old/generated code didn't link file to FieldDescriptor.
+ # TODO(jieluo): remove syntax usage b/240619313
+ return self.containing_type.syntax == 'proto2'
+
+ @property
+ def is_packed(self):
+ """Returns if the field is packed."""
+ if self.label != FieldDescriptor.LABEL_REPEATED:
+ return False
+ field_type = self.type
+ if (field_type == FieldDescriptor.TYPE_STRING or
+ field_type == FieldDescriptor.TYPE_GROUP or
+ field_type == FieldDescriptor.TYPE_MESSAGE or
+ field_type == FieldDescriptor.TYPE_BYTES):
+ return False
+ if self.containing_type.syntax == 'proto2':
+ return self.has_options and self.GetOptions().packed
+ else:
+ return (not self.has_options or
+ not self.GetOptions().HasField('packed') or
+ self.GetOptions().packed)
@staticmethod
def ProtoTypeToCppProtoType(proto_type):
@@ -720,6 +737,30 @@ class EnumDescriptor(_NestedDescriptorBase):
# Values are reversed to ensure that the first alias is retained.
self.values_by_number = dict((v.number, v) for v in reversed(values))
+ @property
+ def is_closed(self):
+ """Returns true whether this is a "closed" enum.
+
+ This means that it:
+ - Has a fixed set of values, rather than being equivalent to an int32.
+ - Encountering values not in this set causes them to be treated as unknown
+ fields.
+ - The first value (i.e., the default) may be nonzero.
+
+ WARNING: Some runtimes currently have a quirk where non-closed enums are
+ treated as closed when used as the type of fields defined in a
+ `syntax = proto2;` file. This quirk is not present in all runtimes; as of
+ writing, we know that:
+
+ - C++, Java, and C++-based Python share this quirk.
+ - UPB and UPB-based Python do not.
+ - PHP and Ruby treat all enums as open regardless of declaration.
+
+ Care should be taken when using this function to respect the target
+ runtime's enum handling quirks.
+ """
+ return self.file.syntax == 'proto2'
+
def CopyToProto(self, proto):
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
@@ -873,11 +914,14 @@ class ServiceDescriptor(_NestedDescriptorBase):
Args:
name (str): Name of the method.
+
Returns:
- MethodDescriptor or None: the descriptor for the requested method, if
- found.
+ MethodDescriptor: The descriptor for the requested method.
+
+ Raises:
+ KeyError: if the method cannot be found in the service.
"""
- return self.methods_by_name.get(name, None)
+ return self.methods_by_name[name]
def CopyToProto(self, proto):
"""Copies this to a descriptor_pb2.ServiceDescriptorProto.
@@ -1018,13 +1062,7 @@ class FileDescriptor(DescriptorBase):
# FileDescriptor() is called from various places, not only from generated
# files, to register dynamic proto files and messages.
# pylint: disable=g-explicit-bool-comparison
- if serialized_pb == b'':
- # Cpp generated code must be linked in if serialized_pb is ''
- try:
- return _message.default_pool.FindFileByName(name)
- except KeyError:
- raise RuntimeError('Please link in cpp generated lib for %s' % (name))
- elif serialized_pb:
+ if serialized_pb:
return _message.default_pool.AddSerializedFile(serialized_pb)
else:
return super(FileDescriptor, cls).__new__(cls)
diff --git a/contrib/python/protobuf/py3/google/protobuf/descriptor_pool.py b/contrib/python/protobuf/py3/google/protobuf/descriptor_pool.py
index 911372a8b0..1ebf11834a 100644
--- a/contrib/python/protobuf/py3/google/protobuf/descriptor_pool.py
+++ b/contrib/python/protobuf/py3/google/protobuf/descriptor_pool.py
@@ -120,11 +120,13 @@ class DescriptorPool(object):
if _USE_C_DESCRIPTORS:
- def __new__(cls, descriptor_db=None):
- # pylint: disable=protected-access
- return descriptor._message.DescriptorPool(descriptor_db)
+ def __new__(cls, descriptor_db=None):
+ # pylint: disable=protected-access
+ return descriptor._message.DescriptorPool(descriptor_db)
- def __init__(self, descriptor_db=None):
+ def __init__(
+ self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts=False
+ ):
"""Initializes a Pool of proto buffs.
The descriptor_db argument to the constructor is provided to allow
@@ -135,6 +137,8 @@ class DescriptorPool(object):
Args:
descriptor_db: A secondary source of file descriptors.
+ use_deprecated_legacy_json_field_conflicts: Unused, for compatibility with
+ C++.
"""
self._internal_db = descriptor_database.DescriptorDatabase()
@@ -144,9 +148,6 @@ class DescriptorPool(object):
self._service_descriptors = {}
self._file_descriptors = {}
self._toplevel_extensions = {}
- # TODO(jieluo): Remove _file_desc_by_toplevel_extension after
- # maybe year 2020 for compatibility issue (with 3.4.1 only).
- self._file_desc_by_toplevel_extension = {}
self._top_enum_values = {}
# We store extensions in two two-level mappings: The first key is the
# descriptor of the message being extended, the second key is the extension
@@ -220,7 +221,7 @@ class DescriptorPool(object):
file_desc.serialized_pb = serialized_file_desc_proto
return file_desc
- # Add Descriptor to descriptor pool is dreprecated. Please use Add()
+ # Add Descriptor to descriptor pool is deprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
def AddDescriptor(self, desc):
@@ -245,7 +246,7 @@ class DescriptorPool(object):
self._descriptors[desc.full_name] = desc
self._AddFileDescriptor(desc.file)
- # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add()
+ # Add EnumDescriptor to descriptor pool is deprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
def AddEnumDescriptor(self, enum_desc):
@@ -286,7 +287,7 @@ class DescriptorPool(object):
self._top_enum_values[full_name] = enum_value
self._AddFileDescriptor(enum_desc.file)
- # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add()
+ # Add ServiceDescriptor to descriptor pool is deprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
def AddServiceDescriptor(self, service_desc):
@@ -307,7 +308,7 @@ class DescriptorPool(object):
service_desc.file.name)
self._service_descriptors[service_desc.full_name] = service_desc
- # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add()
+ # Add ExtensionDescriptor to descriptor pool is deprecated. Please use Add()
# or AddSerializedFile() to add a FileDescriptorProto instead.
@_Deprecated
def AddExtensionDescriptor(self, extension):
@@ -331,6 +332,8 @@ class DescriptorPool(object):
raise TypeError('Expected an extension descriptor.')
if extension.extension_scope is None:
+ self._CheckConflictRegister(
+ extension, extension.full_name, extension.file.name)
self._toplevel_extensions[extension.full_name] = extension
try:
@@ -372,12 +375,6 @@ class DescriptorPool(object):
"""
self._AddFileDescriptor(file_desc)
- # TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
- # FieldDescriptor.file is added in code gen. Remove this solution after
- # maybe 2020 for compatibility reason (with 3.4.1 only).
- for extension in file_desc.extensions_by_name.values():
- self._file_desc_by_toplevel_extension[
- extension.full_name] = file_desc
def _AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
@@ -483,7 +480,7 @@ class DescriptorPool(object):
pass
try:
- return self._file_desc_by_toplevel_extension[symbol]
+ return self._toplevel_extensions[symbol].file
except KeyError:
pass
@@ -792,8 +789,6 @@ class DescriptorPool(object):
file_descriptor.package, scope)
file_descriptor.extensions_by_name[extension_desc.name] = (
extension_desc)
- self._file_desc_by_toplevel_extension[extension_desc.full_name] = (
- file_descriptor)
for desc_proto in file_proto.message_type:
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/__init__.py b/contrib/python/protobuf/py3/google/protobuf/internal/__init__.py
index e69de29bb2..7d2e571a14 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/__init__.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/__init__.py
@@ -0,0 +1,30 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py b/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py
new file mode 100644
index 0000000000..2f4a3b6b73
--- /dev/null
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/_parameterized.py
@@ -0,0 +1,443 @@
+#! /usr/bin/env python
+#
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Adds support for parameterized tests to Python's unittest TestCase class.
+
+A parameterized test is a method in a test case that is invoked with different
+argument tuples.
+
+A simple example:
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ (1, 2, 3),
+ (4, 5, 9),
+ (1, 1, 3))
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+
+Each invocation is a separate test case and properly isolated just
+like a normal test method, with its own setUp/tearDown cycle. In the
+example above, there are three separate testcases, one of which will
+fail due to an assertion error (1 + 1 != 3).
+
+Parameters for individual test cases can be tuples (with positional parameters)
+or dictionaries (with named parameters):
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ {'op1': 1, 'op2': 2, 'result': 3},
+ {'op1': 4, 'op2': 5, 'result': 9},
+ )
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+If a parameterized test fails, the error message will show the
+original test name (which is modified internally) and the arguments
+for the specific invocation, which are part of the string returned by
+the shortDescription() method on test cases.
+
+The id method of the test, used internally by the unittest framework,
+is also modified to show the arguments. To make sure that test names
+stay the same across several invocations, object representations like
+
+ >>> class Foo(object):
+ ... pass
+ >>> repr(Foo())
+ '<__main__.Foo object at 0x23d8610>'
+
+are turned into '<__main__.Foo>'. For even more descriptive names,
+especially in test logs, you can use the named_parameters decorator. In
+this case, only tuples are supported, and the first parameters has to
+be a string (or an object that returns an apt name when converted via
+str()):
+
+ class NamedExample(_parameterized.TestCase):
+ @_parameterized.named_parameters(
+ ('Normal', 'aa', 'aaa', True),
+ ('EmptyPrefix', '', 'abc', True),
+ ('BothEmpty', '', '', True))
+ def testStartsWith(self, prefix, string, result):
+ self.assertEqual(result, strings.startswith(prefix))
+
+Named tests also have the benefit that they can be run individually
+from the command line:
+
+ $ testmodule.py NamedExample.testStartsWithNormal
+ .
+ --------------------------------------------------------------------
+ Ran 1 test in 0.000s
+
+ OK
+
+Parameterized Classes
+=====================
+If invocation arguments are shared across test methods in a single
+TestCase class, instead of decorating all test methods
+individually, the class itself can be decorated:
+
+ @_parameterized.parameters(
+ (1, 2, 3)
+ (4, 5, 9))
+ class ArithmeticTest(_parameterized.TestCase):
+ def testAdd(self, arg1, arg2, result):
+ self.assertEqual(arg1 + arg2, result)
+
+ def testSubtract(self, arg2, arg2, result):
+ self.assertEqual(result - arg1, arg2)
+
+Inputs from Iterables
+=====================
+If parameters should be shared across several test cases, or are dynamically
+created from other sources, a single non-tuple iterable can be passed into
+the decorator. This iterable will be used to obtain the test cases:
+
+ class AdditionExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ c.op1, c.op2, c.result for c in testcases
+ )
+ def testAddition(self, op1, op2, result):
+ self.assertEqual(result, op1 + op2)
+
+
+Single-Argument Test Methods
+============================
+If a test method takes only one argument, the single argument does not need to
+be wrapped into a tuple:
+
+ class NegativeNumberExample(_parameterized.TestCase):
+ @_parameterized.parameters(
+ -1, -3, -4, -5
+ )
+ def testIsNegative(self, arg):
+ self.assertTrue(IsNegative(arg))
+"""
+
+__author__ = 'tmarek@google.com (Torsten Marek)'
+
+import functools
+import re
+import types
+import unittest
+import uuid
+
+try:
+ # Since python 3
+ import collections.abc as collections_abc
+except ImportError:
+ # Won't work after python 3.8
+ import collections as collections_abc
+
+ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
+_SEPARATOR = uuid.uuid1().hex
+_FIRST_ARG = object()
+_ARGUMENT_REPR = object()
+
+
+def _CleanRepr(obj):
+ return ADDR_RE.sub(r'<\1>', repr(obj))
+
+
+# Helper function formerly from the unittest module, removed from it in
+# Python 2.7.
+def _StrClass(cls):
+ return '%s.%s' % (cls.__module__, cls.__name__)
+
+
+def _NonStringIterable(obj):
+ return (isinstance(obj, collections_abc.Iterable) and
+ not isinstance(obj, str))
+
+
+def _FormatParameterList(testcase_params):
+ if isinstance(testcase_params, collections_abc.Mapping):
+ return ', '.join('%s=%s' % (argname, _CleanRepr(value))
+ for argname, value in testcase_params.items())
+ elif _NonStringIterable(testcase_params):
+ return ', '.join(map(_CleanRepr, testcase_params))
+ else:
+ return _FormatParameterList((testcase_params,))
+
+
+class _ParameterizedTestIter(object):
+ """Callable and iterable class for producing new test cases."""
+
+ def __init__(self, test_method, testcases, naming_type):
+ """Returns concrete test functions for a test and a list of parameters.
+
+ The naming_type is used to determine the name of the concrete
+ functions as reported by the unittest framework. If naming_type is
+ _FIRST_ARG, the testcases must be tuples, and the first element must
+ have a string representation that is a valid Python identifier.
+
+ Args:
+ test_method: The decorated test method.
+ testcases: (list of tuple/dict) A list of parameter
+ tuples/dicts for individual test invocations.
+ naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
+ """
+ self._test_method = test_method
+ self.testcases = testcases
+ self._naming_type = naming_type
+
+ def __call__(self, *args, **kwargs):
+ raise RuntimeError('You appear to be running a parameterized test case '
+ 'without having inherited from parameterized.'
+ 'TestCase. This is bad because none of '
+ 'your test cases are actually being run.')
+
+ def __iter__(self):
+ test_method = self._test_method
+ naming_type = self._naming_type
+
+ def MakeBoundParamTest(testcase_params):
+ @functools.wraps(test_method)
+ def BoundParamTest(self):
+ if isinstance(testcase_params, collections_abc.Mapping):
+ test_method(self, **testcase_params)
+ elif _NonStringIterable(testcase_params):
+ test_method(self, *testcase_params)
+ else:
+ test_method(self, testcase_params)
+
+ if naming_type is _FIRST_ARG:
+ # Signal the metaclass that the name of the test function is unique
+ # and descriptive.
+ BoundParamTest.__x_use_name__ = True
+ BoundParamTest.__name__ += str(testcase_params[0])
+ testcase_params = testcase_params[1:]
+ elif naming_type is _ARGUMENT_REPR:
+ # __x_extra_id__ is used to pass naming information to the __new__
+ # method of TestGeneratorMetaclass.
+ # The metaclass will make sure to create a unique, but nondescriptive
+ # name for this test.
+ BoundParamTest.__x_extra_id__ = '(%s)' % (
+ _FormatParameterList(testcase_params),)
+ else:
+ raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
+
+ BoundParamTest.__doc__ = '%s(%s)' % (
+ BoundParamTest.__name__, _FormatParameterList(testcase_params))
+ if test_method.__doc__:
+ BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
+ return BoundParamTest
+ return (MakeBoundParamTest(c) for c in self.testcases)
+
+
+def _IsSingletonList(testcases):
+ """True iff testcases contains only a single non-tuple element."""
+ return len(testcases) == 1 and not isinstance(testcases[0], tuple)
+
+
+def _ModifyClass(class_object, testcases, naming_type):
+ assert not getattr(class_object, '_id_suffix', None), (
+ 'Cannot add parameters to %s,'
+ ' which already has parameterized methods.' % (class_object,))
+ class_object._id_suffix = id_suffix = {}
+ # We change the size of __dict__ while we iterate over it,
+ # which Python 3.x will complain about, so use copy().
+ for name, obj in class_object.__dict__.copy().items():
+ if (name.startswith(unittest.TestLoader.testMethodPrefix)
+ and isinstance(obj, types.FunctionType)):
+ delattr(class_object, name)
+ methods = {}
+ _UpdateClassDictForParamTestCase(
+ methods, id_suffix, name,
+ _ParameterizedTestIter(obj, testcases, naming_type))
+ for name, meth in methods.items():
+ setattr(class_object, name, meth)
+
+
+def _ParameterDecorator(naming_type, testcases):
+ """Implementation of the parameterization decorators.
+
+ Args:
+ naming_type: The naming type.
+ testcases: Testcase parameters.
+
+ Returns:
+ A function for modifying the decorated object.
+ """
+ def _Apply(obj):
+ if isinstance(obj, type):
+ _ModifyClass(
+ obj,
+ list(testcases) if not isinstance(testcases, collections_abc.Sequence)
+ else testcases,
+ naming_type)
+ return obj
+ else:
+ return _ParameterizedTestIter(obj, testcases, naming_type)
+
+ if _IsSingletonList(testcases):
+ assert _NonStringIterable(testcases[0]), (
+ 'Single parameter argument must be a non-string iterable')
+ testcases = testcases[0]
+
+ return _Apply
+
+
+def parameters(*testcases): # pylint: disable=invalid-name
+ """A decorator for creating parameterized tests.
+
+ See the module docstring for a usage example.
+ Args:
+ *testcases: Parameters for the decorated method, either a single
+ iterable, or a list of tuples/dicts/objects (for tests
+ with only one argument).
+
+ Returns:
+ A test generator to be handled by TestGeneratorMetaclass.
+ """
+ return _ParameterDecorator(_ARGUMENT_REPR, testcases)
+
+
+def named_parameters(*testcases): # pylint: disable=invalid-name
+ """A decorator for creating parameterized tests.
+
+ See the module docstring for a usage example. The first element of
+ each parameter tuple should be a string and will be appended to the
+ name of the test method.
+
+ Args:
+ *testcases: Parameters for the decorated method, either a single
+ iterable, or a list of tuples.
+
+ Returns:
+ A test generator to be handled by TestGeneratorMetaclass.
+ """
+ return _ParameterDecorator(_FIRST_ARG, testcases)
+
+
+class TestGeneratorMetaclass(type):
+ """Metaclass for test cases with test generators.
+
+ A test generator is an iterable in a testcase that produces callables. These
+ callables must be single-argument methods. These methods are injected into
+ the class namespace and the original iterable is removed. If the name of the
+ iterable conforms to the test pattern, the injected methods will be picked
+ up as tests by the unittest framework.
+
+ In general, it is supposed to be used in conjunction with the
+ parameters decorator.
+ """
+
+ def __new__(mcs, class_name, bases, dct):
+ dct['_id_suffix'] = id_suffix = {}
+ for name, obj in dct.copy().items():
+ if (name.startswith(unittest.TestLoader.testMethodPrefix) and
+ _NonStringIterable(obj)):
+ iterator = iter(obj)
+ dct.pop(name)
+ _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
+
+ return type.__new__(mcs, class_name, bases, dct)
+
+
+def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
+ """Adds individual test cases to a dictionary.
+
+ Args:
+ dct: The target dictionary.
+ id_suffix: The dictionary for mapping names to test IDs.
+ name: The original name of the test case.
+ iterator: The iterator generating the individual test cases.
+ """
+ for idx, func in enumerate(iterator):
+ assert callable(func), 'Test generators must yield callables, got %r' % (
+ func,)
+ if getattr(func, '__x_use_name__', False):
+ new_name = func.__name__
+ else:
+ new_name = '%s%s%d' % (name, _SEPARATOR, idx)
+ assert new_name not in dct, (
+ 'Name of parameterized test case "%s" not unique' % (new_name,))
+ dct[new_name] = func
+ id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
+
+
+class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
+ """Base class for test cases using the parameters decorator."""
+
+ def _OriginalName(self):
+ return self._testMethodName.split(_SEPARATOR)[0]
+
+ def __str__(self):
+ return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
+
+ def id(self): # pylint: disable=invalid-name
+ """Returns the descriptive ID of the test.
+
+ This is used internally by the unittesting framework to get a name
+ for the test to be used in reports.
+
+ Returns:
+ The test id.
+ """
+ return '%s.%s%s' % (_StrClass(self.__class__),
+ self._OriginalName(),
+ self._id_suffix.get(self._testMethodName, ''))
+
+
+def CoopTestCase(other_base_class):
+ """Returns a new base class with a cooperative metaclass base.
+
+ This enables the TestCase to be used in combination
+ with other base classes that have custom metaclasses, such as
+ mox.MoxTestBase.
+
+ Only works with metaclasses that do not override type.__new__.
+
+ Example:
+
+ import google3
+ import mox
+
+ from google.protobuf.internal import _parameterized
+
+ class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
+ ...
+
+ Args:
+ other_base_class: (class) A test case base class.
+
+ Returns:
+ A new class object.
+ """
+ metaclass = type(
+ 'CoopMetaclass',
+ (other_base_class.__metaclass__,
+ TestGeneratorMetaclass), {})
+ return metaclass(
+ 'CoopTestCase',
+ (other_base_class, TestCase), {})
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.cc b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.cc
index 33f5b04f49..6db12e8dc6 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.cc
@@ -28,7 +28,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#define PY_SSIZE_T_CLEAN
#include <Python.h>
namespace google {
@@ -70,40 +69,59 @@ static const char* kImplVersionName = "api_version";
static const char* kModuleName = "_api_implementation";
static const char kModuleDocstring[] =
- "_api_implementation is a module that exposes compile-time constants that\n"
- "determine the default API implementation to use for Python proto2.\n"
- "\n"
- "It complements api_implementation.py by setting defaults using "
- "compile-time\n"
- "constants defined in C, such that one can set defaults at compilation\n"
- "(e.g. with blaze flag --copt=-DPYTHON_PROTO2_CPP_IMPL_V2).";
+"_api_implementation is a module that exposes compile-time constants that\n"
+"determine the default API implementation to use for Python proto2.\n"
+"\n"
+"It complements api_implementation.py by setting defaults using compile-time\n"
+"constants defined in C, such that one can set defaults at compilation\n"
+"(e.g. with blaze flag --copt=-DPYTHON_PROTO2_CPP_IMPL_V2).";
-static struct PyModuleDef _module = {PyModuleDef_HEAD_INIT,
- kModuleName,
- kModuleDocstring,
- -1,
- nullptr,
- nullptr,
- nullptr,
- nullptr,
- nullptr};
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef _module = {
+ PyModuleDef_HEAD_INIT,
+ kModuleName,
+ kModuleDocstring,
+ -1,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ NULL
+};
+#define INITFUNC PyInit__api_implementation
+#define INITFUNC_ERRORVAL NULL
+#else
+#define INITFUNC init_api_implementation
+#define INITFUNC_ERRORVAL
+#endif
extern "C" {
-PyMODINIT_FUNC PyInit__api_implementation() {
- PyObject* module = PyModule_Create(&_module);
- if (module == nullptr) {
- return nullptr;
- }
+ PyMODINIT_FUNC INITFUNC() {
+#if PY_MAJOR_VERSION >= 3
+ PyObject *module = PyModule_Create(&_module);
+#else
+ PyObject *module = Py_InitModule3(
+ const_cast<char*>(kModuleName),
+ NULL,
+ const_cast<char*>(kModuleDocstring));
+#endif
+ if (module == NULL) {
+ return INITFUNC_ERRORVAL;
+ }
- // Adds the module variable "api_version".
- if (PyModule_AddIntConstant(module, const_cast<char*>(kImplVersionName),
- kImplVersion)) {
- Py_DECREF(module);
- return nullptr;
- }
+ // Adds the module variable "api_version".
+ if (PyModule_AddIntConstant(
+ module,
+ const_cast<char*>(kImplVersionName),
+ kImplVersion))
+#if PY_MAJOR_VERSION < 3
+ return;
+#else
+ { Py_DECREF(module); return NULL; }
- return module;
-}
+ return module;
+#endif
+ }
}
} // namespace python
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
index 74586487a8..7d20bd2212 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/api_implementation.py
@@ -102,6 +102,7 @@ if _implementation_type == 'cpp':
try:
# pylint: disable=g-import-not-at-top
from google.protobuf.pyext import _message
+ sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message
_c_module = _message
del _message
except ImportError:
@@ -151,12 +152,6 @@ def Type():
return _implementation_type
-def _SetType(implementation_type):
- """Never use! Only for protobuf benchmark."""
- global _implementation_type
- _implementation_type = implementation_type
-
-
# See comment on 'Type' above.
# TODO(jieluo): Remove the API, it returns a constant. b/228102101
def Version():
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py b/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
index a916276319..8ff549381e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/decoder.py
@@ -806,8 +806,7 @@ def MessageSetItemDecoder(descriptor):
if value is None:
message_type = extension.message_type
if not hasattr(message_type, '_concrete_class'):
- # pylint: disable=protected-access
- message._FACTORY.GetPrototype(message_type)
+ message_factory.GetMessageClass(message_type)
value = field_dict.setdefault(
extension, message_type._concrete_class())
if value._InternalParse(buffer, message_start,message_end) != message_end:
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/extension_dict.py b/contrib/python/protobuf/py3/google/protobuf/internal/extension_dict.py
index b346cf283e..83c4cb5dc6 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/extension_dict.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/extension_dict.py
@@ -89,8 +89,9 @@ class _ExtensionDict(object):
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
message_type = extension_handle.message_type
if not hasattr(message_type, '_concrete_class'):
- # pylint: disable=protected-access
- self._extended_message._FACTORY.GetPrototype(message_type)
+ # pylint: disable=g-import-not-at-top
+ from google.protobuf import message_factory
+ message_factory.GetMessageClass(message_type)
assert getattr(extension_handle.message_type, '_concrete_class', None), (
'Uninitialized concrete class found for field %r (message type %r)'
% (extension_handle.full_name,
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/field_mask.py b/contrib/python/protobuf/py3/google/protobuf/internal/field_mask.py
new file mode 100644
index 0000000000..489769901e
--- /dev/null
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/field_mask.py
@@ -0,0 +1,333 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Contains FieldMask class."""
+
+from google.protobuf.descriptor import FieldDescriptor
+
+
+class FieldMask(object):
+ """Class for FieldMask message type."""
+
+ __slots__ = ()
+
+ def ToJsonString(self):
+ """Converts FieldMask to string according to proto3 JSON spec."""
+ camelcase_paths = []
+ for path in self.paths:
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
+ return ','.join(camelcase_paths)
+
+ def FromJsonString(self, value):
+ """Converts string to FieldMask according to proto3 JSON spec."""
+ if not isinstance(value, str):
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
+ self.Clear()
+ if value:
+ for path in value.split(','):
+ self.paths.append(_CamelCaseToSnakeCase(path))
+
+ def IsValidForDescriptor(self, message_descriptor):
+ """Checks whether the FieldMask is valid for Message Descriptor."""
+ for path in self.paths:
+ if not _IsValidPath(message_descriptor, path):
+ return False
+ return True
+
+ def AllFieldsFromDescriptor(self, message_descriptor):
+ """Gets all direct fields of Message Descriptor to FieldMask."""
+ self.Clear()
+ for field in message_descriptor.fields:
+ self.paths.append(field.name)
+
+ def CanonicalFormFromMask(self, mask):
+ """Converts a FieldMask to the canonical form.
+
+ Removes paths that are covered by another path. For example,
+ "foo.bar" is covered by "foo" and will be removed if "foo"
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
+
+ Args:
+ mask: The original FieldMask to be converted.
+ """
+ tree = _FieldMaskTree(mask)
+ tree.ToFieldMask(self)
+
+ def Union(self, mask1, mask2):
+ """Merges mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ tree.MergeFromFieldMask(mask2)
+ tree.ToFieldMask(self)
+
+ def Intersect(self, mask1, mask2):
+ """Intersects mask1 and mask2 into this FieldMask."""
+ _CheckFieldMaskMessage(mask1)
+ _CheckFieldMaskMessage(mask2)
+ tree = _FieldMaskTree(mask1)
+ intersection = _FieldMaskTree()
+ for path in mask2.paths:
+ tree.IntersectPath(path, intersection)
+ intersection.ToFieldMask(self)
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message_field=False, replace_repeated_field=False):
+ """Merges fields specified in FieldMask from source to destination.
+
+ Args:
+ source: Source message.
+ destination: The destination message to be merged into.
+ replace_message_field: Replace message field if True. Merge message
+ field if False.
+ replace_repeated_field: Replace repeated field if True. Append
+ elements of repeated field if False.
+ """
+ tree = _FieldMaskTree(self)
+ tree.MergeMessage(
+ source, destination, replace_message_field, replace_repeated_field)
+
+
+def _IsValidPath(message_descriptor, path):
+ """Checks whether the path is valid for Message Descriptor."""
+ parts = path.split('.')
+ last = parts.pop()
+ for name in parts:
+ field = message_descriptor.fields_by_name.get(name)
+ if (field is None or
+ field.label == FieldDescriptor.LABEL_REPEATED or
+ field.type != FieldDescriptor.TYPE_MESSAGE):
+ return False
+ message_descriptor = field.message_type
+ return last in message_descriptor.fields_by_name
+
+
+def _CheckFieldMaskMessage(message):
+ """Raises ValueError if message is not a FieldMask."""
+ message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.name != 'FieldMask' or
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
+ raise ValueError('Message {0} is not a FieldMask.'.format(
+ message_descriptor.full_name))
+
+
+def _SnakeCaseToCamelCase(path_name):
+ """Converts a path name from snake_case to camelCase."""
+ result = []
+ after_underscore = False
+ for c in path_name:
+ if c.isupper():
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: Path name '
+ '{0} must not contain uppercase letters.'.format(path_name))
+ if after_underscore:
+ if c.islower():
+ result.append(c.upper())
+ after_underscore = False
+ else:
+ raise ValueError(
+ 'Fail to print FieldMask to Json string: The '
+ 'character after a "_" must be a lowercase letter '
+ 'in path name {0}.'.format(path_name))
+ elif c == '_':
+ after_underscore = True
+ else:
+ result += c
+
+ if after_underscore:
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
+ 'in path name {0}.'.format(path_name))
+ return ''.join(result)
+
+
+def _CamelCaseToSnakeCase(path_name):
+ """Converts a field name from camelCase to snake_case."""
+ result = []
+ for c in path_name:
+ if c == '_':
+ raise ValueError('Fail to parse FieldMask: Path name '
+ '{0} must not contain "_"s.'.format(path_name))
+ if c.isupper():
+ result += '_'
+ result += c.lower()
+ else:
+ result += c
+ return ''.join(result)
+
+
+class _FieldMaskTree(object):
+ """Represents a FieldMask in a tree structure.
+
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
+ the FieldMaskTree will be:
+ [_root] -+- foo -+- bar
+ | |
+ | +- baz
+ |
+ +- bar --- baz
+ In the tree, each leaf node represents a field path.
+ """
+
+ __slots__ = ('_root',)
+
+ def __init__(self, field_mask=None):
+ """Initializes the tree by FieldMask."""
+ self._root = {}
+ if field_mask:
+ self.MergeFromFieldMask(field_mask)
+
+ def MergeFromFieldMask(self, field_mask):
+ """Merges a FieldMask to the tree."""
+ for path in field_mask.paths:
+ self.AddPath(path)
+
+ def AddPath(self, path):
+ """Adds a field path into the tree.
+
+ If the field path to add is a sub-path of an existing field path
+ in the tree (i.e., a leaf node), it means the tree already matches
+ the given path so nothing will be added to the tree. If the path
+ matches an existing non-leaf node in the tree, that non-leaf node
+ will be turned into a leaf node with all its children removed because
+ the path matches all the node's children. Otherwise, a new path will
+ be added.
+
+ Args:
+ path: The field path to add.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ node[name] = {}
+ elif not node[name]:
+ # Pre-existing empty node implies we already have this entire tree.
+ return
+ node = node[name]
+ # Remove any sub-trees we might have had.
+ node.clear()
+
+ def ToFieldMask(self, field_mask):
+ """Converts the tree to a FieldMask."""
+ field_mask.Clear()
+ _AddFieldPaths(self._root, '', field_mask)
+
+ def IntersectPath(self, path, intersection):
+ """Calculates the intersection part of a field path with this tree.
+
+ Args:
+ path: The field path to calculates.
+ intersection: The out tree to record the intersection part.
+ """
+ node = self._root
+ for name in path.split('.'):
+ if name not in node:
+ return
+ elif not node[name]:
+ intersection.AddPath(path)
+ return
+ node = node[name]
+ intersection.AddLeafNodes(path, node)
+
+ def AddLeafNodes(self, prefix, node):
+ """Adds leaf nodes begin with prefix to this tree."""
+ if not node:
+ self.AddPath(prefix)
+ for name in node:
+ child_path = prefix + '.' + name
+ self.AddLeafNodes(child_path, node[name])
+
+ def MergeMessage(
+ self, source, destination,
+ replace_message, replace_repeated):
+ """Merge all fields specified by this tree from source to destination."""
+ _MergeMessage(
+ self._root, source, destination, replace_message, replace_repeated)
+
+
+def _StrConvert(value):
+ """Converts value to str if it is not."""
+ # This file is imported by c extension and some methods like ClearField
+ # requires string for the field name. py2/py3 has different text
+ # type and may use unicode.
+ if not isinstance(value, str):
+ return value.encode('utf-8')
+ return value
+
+
+def _MergeMessage(
+ node, source, destination, replace_message, replace_repeated):
+ """Merge all fields specified by a sub-tree from source to destination."""
+ source_descriptor = source.DESCRIPTOR
+ for name in node:
+ child = node[name]
+ field = source_descriptor.fields_by_name[name]
+ if field is None:
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
+ name, source_descriptor.full_name))
+ if child:
+ # Sub-paths are only allowed for singular message fields.
+ if (field.label == FieldDescriptor.LABEL_REPEATED or
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
+ 'message field and cannot have sub-fields.'.format(
+ name, source_descriptor.full_name))
+ if source.HasField(name):
+ _MergeMessage(
+ child, getattr(source, name), getattr(destination, name),
+ replace_message, replace_repeated)
+ continue
+ if field.label == FieldDescriptor.LABEL_REPEATED:
+ if replace_repeated:
+ destination.ClearField(_StrConvert(name))
+ repeated_source = getattr(source, name)
+ repeated_destination = getattr(destination, name)
+ repeated_destination.MergeFrom(repeated_source)
+ else:
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
+ if replace_message:
+ destination.ClearField(_StrConvert(name))
+ if source.HasField(name):
+ getattr(destination, name).MergeFrom(getattr(source, name))
+ else:
+ setattr(destination, name, getattr(source, name))
+
+
+def _AddFieldPaths(node, prefix, field_mask):
+ """Adds the field paths descended from node to field_mask."""
+ if not node and prefix:
+ field_mask.paths.append(prefix)
+ return
+ for name in sorted(node):
+ if prefix:
+ child_path = prefix + '.' + name
+ else:
+ child_path = name
+ _AddFieldPaths(node[name], child_path, field_mask)
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py b/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
index 5550b425c4..bf9acefd2a 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
@@ -283,20 +283,8 @@ def _IsMessageMapField(field):
def _AttachFieldHelpers(cls, field_descriptor):
is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
- is_packable = (is_repeated and
- wire_format.IsTypePackable(field_descriptor.type))
- is_proto3 = field_descriptor.containing_type.syntax == 'proto3'
- if not is_packable:
- is_packed = False
- elif field_descriptor.containing_type.syntax == 'proto2':
- is_packed = (field_descriptor.has_options and
- field_descriptor.GetOptions().packed)
- else:
- has_packed_false = (field_descriptor.has_options and
- field_descriptor.GetOptions().HasField('packed') and
- field_descriptor.GetOptions().packed == False)
- is_packed = not has_packed_false
is_map_entry = _IsMapField(field_descriptor)
+ is_packed = field_descriptor.is_packed
if is_map_entry:
field_encoder = encoder.MapEncoder(field_descriptor)
@@ -320,16 +308,12 @@ def _AttachFieldHelpers(cls, field_descriptor):
tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
decode_type = field_descriptor.type
if (decode_type == _FieldDescriptor.TYPE_ENUM and
- type_checkers.SupportsOpenEnums(field_descriptor)):
+ not field_descriptor.enum_type.is_closed):
decode_type = _FieldDescriptor.TYPE_INT32
oneof_descriptor = None
- clear_if_default = False
if field_descriptor.containing_oneof is not None:
oneof_descriptor = field_descriptor
- elif (is_proto3 and not is_repeated and
- field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE):
- clear_if_default = True
if is_map_entry:
is_message_map = _IsMessageMapField(field_descriptor)
@@ -341,7 +325,7 @@ def _AttachFieldHelpers(cls, field_descriptor):
field_decoder = decoder.StringDecoder(
field_descriptor.number, is_repeated, is_packed,
field_descriptor, field_descriptor._default_constructor,
- clear_if_default)
+ not field_descriptor.has_presence)
elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
field_descriptor.number, is_repeated, is_packed,
@@ -351,7 +335,7 @@ def _AttachFieldHelpers(cls, field_descriptor):
field_descriptor.number, is_repeated, is_packed,
# pylint: disable=protected-access
field_descriptor, field_descriptor._default_constructor,
- clear_if_default)
+ not field_descriptor.has_presence)
cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor)
@@ -683,7 +667,6 @@ def _AddPropertiesForNonRepeatedScalarField(field, cls):
property_name = _PropertyName(proto_field_name)
type_checker = type_checkers.GetTypeChecker(field)
default_value = field.default_value
- is_proto3 = field.containing_type.syntax == 'proto3'
def getter(self):
# TODO(protobuf-team): This may be broken since there may not be
@@ -692,8 +675,6 @@ def _AddPropertiesForNonRepeatedScalarField(field, cls):
getter.__module__ = None
getter.__doc__ = 'Getter for %s.' % proto_field_name
- clear_when_set_to_default = is_proto3 and not field.containing_oneof
-
def field_setter(self, new_value):
# pylint: disable=protected-access
# Testing the value for truthiness captures all of the proto3 defaults
@@ -703,7 +684,7 @@ def _AddPropertiesForNonRepeatedScalarField(field, cls):
except TypeError as e:
raise TypeError(
'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
- if clear_when_set_to_default and not new_value:
+ if not field.has_presence and not new_value:
self._fields.pop(field, None)
else:
self._fields[field] = new_value
@@ -788,12 +769,12 @@ def _AddPropertiesForExtensions(descriptor, cls):
def _AddStaticMethods(cls):
# TODO(robinson): This probably needs to be thread-safe(?)
- def RegisterExtension(extension_handle):
- extension_handle.containing_type = cls.DESCRIPTOR
+ def RegisterExtension(field_descriptor):
+ field_descriptor.containing_type = cls.DESCRIPTOR
# TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
# pylint: disable=protected-access
- cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle)
- _AttachFieldHelpers(cls, extension_handle)
+ cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(field_descriptor)
+ _AttachFieldHelpers(cls, field_descriptor)
cls.RegisterExtension = staticmethod(RegisterExtension)
def FromString(s):
@@ -825,24 +806,16 @@ def _AddListFieldsMethod(message_descriptor, cls):
cls.ListFields = ListFields
-_PROTO3_ERROR_TEMPLATE = \
- ('Protocol message %s has no non-repeated submessage field "%s" '
- 'nor marked as optional')
-_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"'
def _AddHasFieldMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
- is_proto3 = (message_descriptor.syntax == "proto3")
- error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE
-
hassable_fields = {}
for field in message_descriptor.fields:
if field.label == _FieldDescriptor.LABEL_REPEATED:
continue
# For proto3, only submessages and fields inside a oneof have presence.
- if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and
- not field.containing_oneof):
+ if not field.has_presence:
continue
hassable_fields[field.name] = field
@@ -853,8 +826,10 @@ def _AddHasFieldMethod(message_descriptor, cls):
def HasField(self, field_name):
try:
field = hassable_fields[field_name]
- except KeyError:
- raise ValueError(error_msg % (message_descriptor.full_name, field_name))
+ except KeyError as exc:
+ raise ValueError('Protocol message %s has no non-repeated field "%s" '
+ 'nor has presence is not available for this field.' % (
+ message_descriptor.full_name, field_name)) from exc
if isinstance(field, descriptor_mod.OneofDescriptor):
try:
@@ -911,28 +886,28 @@ def _AddClearFieldMethod(message_descriptor, cls):
def _AddClearExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
- def ClearExtension(self, extension_handle):
- extension_dict._VerifyExtensionHandle(self, extension_handle)
+ def ClearExtension(self, field_descriptor):
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
# Similar to ClearField(), above.
- if extension_handle in self._fields:
- del self._fields[extension_handle]
+ if field_descriptor in self._fields:
+ del self._fields[field_descriptor]
self._Modified()
cls.ClearExtension = ClearExtension
def _AddHasExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
- def HasExtension(self, extension_handle):
- extension_dict._VerifyExtensionHandle(self, extension_handle)
- if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
- raise KeyError('"%s" is repeated.' % extension_handle.full_name)
+ def HasExtension(self, field_descriptor):
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
+ if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED:
+ raise KeyError('"%s" is repeated.' % field_descriptor.full_name)
- if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
- value = self._fields.get(extension_handle)
+ if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
+ value = self._fields.get(field_descriptor)
return value is not None and value._is_present_in_parent
else:
- return extension_handle in self._fields
+ return field_descriptor in self._fields
cls.HasExtension = HasExtension
def _InternalUnpackAny(msg):
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/python_protobuf.cc b/contrib/python/protobuf/py3/google/protobuf/internal/python_protobuf.cc
new file mode 100644
index 0000000000..bbef7159aa
--- /dev/null
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/python_protobuf.cc
@@ -0,0 +1,63 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: qrczak@google.com (Marcin Kowalczyk)
+
+#include "google/protobuf/python_protobuf.h"
+
+namespace google {
+namespace protobuf {
+namespace python {
+
+static const Message* GetCProtoInsidePyProtoStub(PyObject* msg) {
+ return nullptr;
+}
+static Message* MutableCProtoInsidePyProtoStub(PyObject* msg) {
+ return nullptr;
+}
+
+// This is initialized with a default, stub implementation.
+// If python-google.protobuf.cc is loaded, the function pointer is overridden
+// with a full implementation.
+const Message* (*GetCProtoInsidePyProtoPtr)(PyObject* msg) =
+ GetCProtoInsidePyProtoStub;
+Message* (*MutableCProtoInsidePyProtoPtr)(PyObject* msg) =
+ MutableCProtoInsidePyProtoStub;
+
+const Message* GetCProtoInsidePyProto(PyObject* msg) {
+ return GetCProtoInsidePyProtoPtr(msg);
+}
+Message* MutableCProtoInsidePyProto(PyObject* msg) {
+ return MutableCProtoInsidePyProtoPtr(msg);
+}
+
+} // namespace python
+} // namespace protobuf
+} // namespace google
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/testing_refleaks.py b/contrib/python/protobuf/py3/google/protobuf/internal/testing_refleaks.py
new file mode 100644
index 0000000000..5f19c46fd5
--- /dev/null
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/testing_refleaks.py
@@ -0,0 +1,142 @@
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc. All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A subclass of unittest.TestCase which checks for reference leaks.
+
+To use:
+- Use testing_refleak.BaseTestCase instead of unittest.TestCase
+- Configure and compile Python with --with-pydebug
+
+If sys.gettotalrefcount() is not available (because Python was built without
+the Py_DEBUG option), then this module is a no-op and tests will run normally.
+"""
+
+import copyreg
+import gc
+import sys
+import unittest
+
+
+class LocalTestResult(unittest.TestResult):
+ """A TestResult which forwards events to a parent object, except for Skips."""
+
+ def __init__(self, parent_result):
+ unittest.TestResult.__init__(self)
+ self.parent_result = parent_result
+
+ def addError(self, test, error):
+ self.parent_result.addError(test, error)
+
+ def addFailure(self, test, error):
+ self.parent_result.addFailure(test, error)
+
+ def addSkip(self, test, reason):
+ pass
+
+
+class ReferenceLeakCheckerMixin(object):
+ """A mixin class for TestCase, which checks reference counts."""
+
+ NB_RUNS = 3
+
+ def run(self, result=None):
+ testMethod = getattr(self, self._testMethodName)
+ expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
+ expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
+ if expecting_failure_class or expecting_failure_method:
+ return
+
+ # python_message.py registers all Message classes to some pickle global
+ # registry, which makes the classes immortal.
+ # We save a copy of this registry, and reset it before we could references.
+ self._saved_pickle_registry = copyreg.dispatch_table.copy()
+
+ # Run the test twice, to warm up the instance attributes.
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
+
+ oldrefcount = 0
+ local_result = LocalTestResult(result)
+ num_flakes = 0
+
+ refcount_deltas = []
+ while len(refcount_deltas) < self.NB_RUNS:
+ oldrefcount = self._getRefcounts()
+ super(ReferenceLeakCheckerMixin, self).run(result=local_result)
+ newrefcount = self._getRefcounts()
+ # If the GC was able to collect some objects after the call to run() that
+ # it could not collect before the call, then the counts won't match.
+ if newrefcount < oldrefcount and num_flakes < 2:
+ # This result is (probably) a flake -- garbage collectors aren't very
+ # predictable, but a lower ending refcount is the opposite of the
+ # failure we are testing for. If the result is repeatable, then we will
+ # eventually report it, but not after trying to eliminate it.
+ num_flakes += 1
+ continue
+ num_flakes = 0
+ refcount_deltas.append(newrefcount - oldrefcount)
+ print(refcount_deltas, self)
+
+ try:
+ self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
+ except Exception: # pylint: disable=broad-except
+ result.addError(self, sys.exc_info())
+
+ def _getRefcounts(self):
+ copyreg.dispatch_table.clear()
+ copyreg.dispatch_table.update(self._saved_pickle_registry)
+ # It is sometimes necessary to gc.collect() multiple times, to ensure
+ # that all objects can be collected.
+ gc.collect()
+ gc.collect()
+ gc.collect()
+ return sys.gettotalrefcount()
+
+
+if hasattr(sys, 'gettotalrefcount'):
+
+ def TestCase(test_class):
+ new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
+ new_class = type(test_class)(
+ test_class.__name__, new_bases, dict(test_class.__dict__))
+ return new_class
+ SkipReferenceLeakChecker = unittest.skip
+
+else:
+ # When PyDEBUG is not enabled, run the tests normally.
+
+ def TestCase(test_class):
+ return test_class
+
+ def SkipReferenceLeakChecker(reason):
+ del reason # Don't skip, so don't need a reason.
+ def Same(func):
+ return func
+ return Same
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py b/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
index a53e71fe8e..165dcd8c2e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/type_checkers.py
@@ -75,10 +75,6 @@ def ToShortestFloat(original):
return rounded
-def SupportsOpenEnums(field_descriptor):
- return field_descriptor.containing_type.syntax == 'proto3'
-
-
def GetTypeChecker(field):
"""Returns a type checker for a message field of the specified types.
@@ -93,11 +89,11 @@ def GetTypeChecker(field):
field.type == _FieldDescriptor.TYPE_STRING):
return UnicodeValueChecker()
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
- if SupportsOpenEnums(field):
+ if field.enum_type.is_closed:
+ return EnumValueChecker(field.enum_type)
+ else:
# When open enums are supported, any int32 can be assigned.
return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
- else:
- return EnumValueChecker(field.enum_type)
return _VALUE_CHECKERS[field.cpp_type]
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py b/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
index 3cc97f6a55..ff6929effa 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/well_known_types.py
@@ -44,7 +44,9 @@ import calendar
import collections.abc
import datetime
-from google.protobuf.descriptor import FieldDescriptor
+from google.protobuf.internal import field_mask
+
+FieldMask = field_mask.FieldMask
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_NANOS_PER_SECOND = 1000000000
@@ -430,306 +432,6 @@ def _RoundTowardZero(value, divider):
return result
-class FieldMask(object):
- """Class for FieldMask message type."""
-
- __slots__ = ()
-
- def ToJsonString(self):
- """Converts FieldMask to string according to proto3 JSON spec."""
- camelcase_paths = []
- for path in self.paths:
- camelcase_paths.append(_SnakeCaseToCamelCase(path))
- return ','.join(camelcase_paths)
-
- def FromJsonString(self, value):
- """Converts string to FieldMask according to proto3 JSON spec."""
- if not isinstance(value, str):
- raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
- self.Clear()
- if value:
- for path in value.split(','):
- self.paths.append(_CamelCaseToSnakeCase(path))
-
- def IsValidForDescriptor(self, message_descriptor):
- """Checks whether the FieldMask is valid for Message Descriptor."""
- for path in self.paths:
- if not _IsValidPath(message_descriptor, path):
- return False
- return True
-
- def AllFieldsFromDescriptor(self, message_descriptor):
- """Gets all direct fields of Message Descriptor to FieldMask."""
- self.Clear()
- for field in message_descriptor.fields:
- self.paths.append(field.name)
-
- def CanonicalFormFromMask(self, mask):
- """Converts a FieldMask to the canonical form.
-
- Removes paths that are covered by another path. For example,
- "foo.bar" is covered by "foo" and will be removed if "foo"
- is also in the FieldMask. Then sorts all paths in alphabetical order.
-
- Args:
- mask: The original FieldMask to be converted.
- """
- tree = _FieldMaskTree(mask)
- tree.ToFieldMask(self)
-
- def Union(self, mask1, mask2):
- """Merges mask1 and mask2 into this FieldMask."""
- _CheckFieldMaskMessage(mask1)
- _CheckFieldMaskMessage(mask2)
- tree = _FieldMaskTree(mask1)
- tree.MergeFromFieldMask(mask2)
- tree.ToFieldMask(self)
-
- def Intersect(self, mask1, mask2):
- """Intersects mask1 and mask2 into this FieldMask."""
- _CheckFieldMaskMessage(mask1)
- _CheckFieldMaskMessage(mask2)
- tree = _FieldMaskTree(mask1)
- intersection = _FieldMaskTree()
- for path in mask2.paths:
- tree.IntersectPath(path, intersection)
- intersection.ToFieldMask(self)
-
- def MergeMessage(
- self, source, destination,
- replace_message_field=False, replace_repeated_field=False):
- """Merges fields specified in FieldMask from source to destination.
-
- Args:
- source: Source message.
- destination: The destination message to be merged into.
- replace_message_field: Replace message field if True. Merge message
- field if False.
- replace_repeated_field: Replace repeated field if True. Append
- elements of repeated field if False.
- """
- tree = _FieldMaskTree(self)
- tree.MergeMessage(
- source, destination, replace_message_field, replace_repeated_field)
-
-
-def _IsValidPath(message_descriptor, path):
- """Checks whether the path is valid for Message Descriptor."""
- parts = path.split('.')
- last = parts.pop()
- for name in parts:
- field = message_descriptor.fields_by_name.get(name)
- if (field is None or
- field.label == FieldDescriptor.LABEL_REPEATED or
- field.type != FieldDescriptor.TYPE_MESSAGE):
- return False
- message_descriptor = field.message_type
- return last in message_descriptor.fields_by_name
-
-
-def _CheckFieldMaskMessage(message):
- """Raises ValueError if message is not a FieldMask."""
- message_descriptor = message.DESCRIPTOR
- if (message_descriptor.name != 'FieldMask' or
- message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
- raise ValueError('Message {0} is not a FieldMask.'.format(
- message_descriptor.full_name))
-
-
-def _SnakeCaseToCamelCase(path_name):
- """Converts a path name from snake_case to camelCase."""
- result = []
- after_underscore = False
- for c in path_name:
- if c.isupper():
- raise ValueError(
- 'Fail to print FieldMask to Json string: Path name '
- '{0} must not contain uppercase letters.'.format(path_name))
- if after_underscore:
- if c.islower():
- result.append(c.upper())
- after_underscore = False
- else:
- raise ValueError(
- 'Fail to print FieldMask to Json string: The '
- 'character after a "_" must be a lowercase letter '
- 'in path name {0}.'.format(path_name))
- elif c == '_':
- after_underscore = True
- else:
- result += c
-
- if after_underscore:
- raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
- 'in path name {0}.'.format(path_name))
- return ''.join(result)
-
-
-def _CamelCaseToSnakeCase(path_name):
- """Converts a field name from camelCase to snake_case."""
- result = []
- for c in path_name:
- if c == '_':
- raise ValueError('Fail to parse FieldMask: Path name '
- '{0} must not contain "_"s.'.format(path_name))
- if c.isupper():
- result += '_'
- result += c.lower()
- else:
- result += c
- return ''.join(result)
-
-
-class _FieldMaskTree(object):
- """Represents a FieldMask in a tree structure.
-
- For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
- the FieldMaskTree will be:
- [_root] -+- foo -+- bar
- | |
- | +- baz
- |
- +- bar --- baz
- In the tree, each leaf node represents a field path.
- """
-
- __slots__ = ('_root',)
-
- def __init__(self, field_mask=None):
- """Initializes the tree by FieldMask."""
- self._root = {}
- if field_mask:
- self.MergeFromFieldMask(field_mask)
-
- def MergeFromFieldMask(self, field_mask):
- """Merges a FieldMask to the tree."""
- for path in field_mask.paths:
- self.AddPath(path)
-
- def AddPath(self, path):
- """Adds a field path into the tree.
-
- If the field path to add is a sub-path of an existing field path
- in the tree (i.e., a leaf node), it means the tree already matches
- the given path so nothing will be added to the tree. If the path
- matches an existing non-leaf node in the tree, that non-leaf node
- will be turned into a leaf node with all its children removed because
- the path matches all the node's children. Otherwise, a new path will
- be added.
-
- Args:
- path: The field path to add.
- """
- node = self._root
- for name in path.split('.'):
- if name not in node:
- node[name] = {}
- elif not node[name]:
- # Pre-existing empty node implies we already have this entire tree.
- return
- node = node[name]
- # Remove any sub-trees we might have had.
- node.clear()
-
- def ToFieldMask(self, field_mask):
- """Converts the tree to a FieldMask."""
- field_mask.Clear()
- _AddFieldPaths(self._root, '', field_mask)
-
- def IntersectPath(self, path, intersection):
- """Calculates the intersection part of a field path with this tree.
-
- Args:
- path: The field path to calculates.
- intersection: The out tree to record the intersection part.
- """
- node = self._root
- for name in path.split('.'):
- if name not in node:
- return
- elif not node[name]:
- intersection.AddPath(path)
- return
- node = node[name]
- intersection.AddLeafNodes(path, node)
-
- def AddLeafNodes(self, prefix, node):
- """Adds leaf nodes begin with prefix to this tree."""
- if not node:
- self.AddPath(prefix)
- for name in node:
- child_path = prefix + '.' + name
- self.AddLeafNodes(child_path, node[name])
-
- def MergeMessage(
- self, source, destination,
- replace_message, replace_repeated):
- """Merge all fields specified by this tree from source to destination."""
- _MergeMessage(
- self._root, source, destination, replace_message, replace_repeated)
-
-
-def _StrConvert(value):
- """Converts value to str if it is not."""
- # This file is imported by c extension and some methods like ClearField
- # requires string for the field name. py2/py3 has different text
- # type and may use unicode.
- if not isinstance(value, str):
- return value.encode('utf-8')
- return value
-
-
-def _MergeMessage(
- node, source, destination, replace_message, replace_repeated):
- """Merge all fields specified by a sub-tree from source to destination."""
- source_descriptor = source.DESCRIPTOR
- for name in node:
- child = node[name]
- field = source_descriptor.fields_by_name[name]
- if field is None:
- raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
- name, source_descriptor.full_name))
- if child:
- # Sub-paths are only allowed for singular message fields.
- if (field.label == FieldDescriptor.LABEL_REPEATED or
- field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
- raise ValueError('Error: Field {0} in message {1} is not a singular '
- 'message field and cannot have sub-fields.'.format(
- name, source_descriptor.full_name))
- if source.HasField(name):
- _MergeMessage(
- child, getattr(source, name), getattr(destination, name),
- replace_message, replace_repeated)
- continue
- if field.label == FieldDescriptor.LABEL_REPEATED:
- if replace_repeated:
- destination.ClearField(_StrConvert(name))
- repeated_source = getattr(source, name)
- repeated_destination = getattr(destination, name)
- repeated_destination.MergeFrom(repeated_source)
- else:
- if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
- if replace_message:
- destination.ClearField(_StrConvert(name))
- if source.HasField(name):
- getattr(destination, name).MergeFrom(getattr(source, name))
- else:
- setattr(destination, name, getattr(source, name))
-
-
-def _AddFieldPaths(node, prefix, field_mask):
- """Adds the field paths descended from node to field_mask."""
- if not node and prefix:
- field_mask.paths.append(prefix)
- return
- for name in sorted(node):
- if prefix:
- child_path = prefix + '.' + name
- else:
- child_path = name
- _AddFieldPaths(node[name], child_path, field_mask)
-
-
def _SetStructValue(struct_value, value):
if value is None:
struct_value.null_value = 0
diff --git a/contrib/python/protobuf/py3/google/protobuf/internal/wire_format.py b/contrib/python/protobuf/py3/google/protobuf/internal/wire_format.py
index 883f525585..1f54414b1a 100644
--- a/contrib/python/protobuf/py3/google/protobuf/internal/wire_format.py
+++ b/contrib/python/protobuf/py3/google/protobuf/internal/wire_format.py
@@ -43,7 +43,7 @@ TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
# These numbers identify the wire type of a protocol buffer value.
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
# tag-and-type to store one of these WIRETYPE_* constants.
-# These values must match WireType enum in google/protobuf/wire_format.h.
+# These values must match WireType enum in //google/protobuf/wire_format.h.
WIRETYPE_VARINT = 0
WIRETYPE_FIXED64 = 1
WIRETYPE_LENGTH_DELIMITED = 2
diff --git a/contrib/python/protobuf/py3/google/protobuf/json_format.py b/contrib/python/protobuf/py3/google/protobuf/json_format.py
index 5024ed89d7..a04e8aef13 100644
--- a/contrib/python/protobuf/py3/google/protobuf/json_format.py
+++ b/contrib/python/protobuf/py3/google/protobuf/json_format.py
@@ -53,6 +53,7 @@ import sys
from google.protobuf.internal import type_checkers
from google.protobuf import descriptor
+from google.protobuf import message_factory
from google.protobuf import symbol_database
@@ -109,7 +110,8 @@ def MessageToJson(
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
indent: The JSON object will be pretty-printed with this indent level.
- An indent level of 0 or negative will only insert newlines.
+ An indent level of 0 or negative will only insert newlines. If the
+ indent level is None, no newlines will be inserted.
sort_keys: If True, then the output will be sorted by field names.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
@@ -269,7 +271,7 @@ class _Printer(object):
except ValueError as e:
raise SerializeToJsonError(
- 'Failed to serialize {0} field: {1}.'.format(field.name, e))
+ 'Failed to serialize {0} field: {1}.'.format(field.name, e)) from e
return js
@@ -286,10 +288,11 @@ class _Printer(object):
if enum_value is not None:
return enum_value.name
else:
- if field.file.syntax == 'proto3':
+ if field.enum_type.is_closed:
+ raise SerializeToJsonError('Enum field contains an integer value '
+ 'which can not mapped to an enum value.')
+ else:
return value
- raise SerializeToJsonError('Enum field contains an integer value '
- 'which can not mapped to an enum value.')
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
# Use base64 Data encoding for bytes
@@ -352,8 +355,14 @@ class _Printer(object):
return None
if which == 'list_value':
return self._ListValueMessageToJsonObject(message.list_value)
- if which == 'struct_value':
- value = message.struct_value
+ if which == 'number_value':
+ value = message.number_value
+ if math.isinf(value):
+ raise ValueError('Fail to serialize Infinity for Value.number_value, '
+ 'which would parse as string_value')
+ if math.isnan(value):
+ raise ValueError('Fail to serialize NaN for Value.number_value, '
+ 'which would parse as string_value')
else:
value = getattr(message, which)
oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
@@ -397,10 +406,11 @@ def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
type_name = type_url.split('/')[-1]
try:
message_descriptor = pool.FindMessageTypeByName(type_name)
- except KeyError:
+ except KeyError as e:
raise TypeError(
- 'Can not find message descriptor by type_url: {0}'.format(type_url))
- message_class = db.GetPrototype(message_descriptor)
+ 'Can not find message descriptor by type_url: {0}'.format(type_url)
+ ) from e
+ message_class = message_factory.GetMessageClass(message_descriptor)
return message_class()
@@ -432,7 +442,7 @@ def Parse(text,
try:
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
- raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
+ raise ParseError('Failed to load JSON: {0}.'.format(str(e))) from e
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool,
max_recursion_depth)
@@ -624,13 +634,19 @@ class _Parser(object):
'{0}.{1}'.format(path, name)))
except ParseError as e:
if field and field.containing_oneof is None:
- raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
else:
- raise ParseError(str(e))
+ raise ParseError(str(e)) from e
except ValueError as e:
- raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
except TypeError as e:
- raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
+ raise ParseError(
+ 'Failed to parse {0} field: {1}.'.format(name, e)
+ ) from e
def _ConvertAnyMessage(self, value, message, path):
"""Convert a JSON representation into Any message."""
@@ -638,14 +654,15 @@ class _Parser(object):
return
try:
type_url = value['@type']
- except KeyError:
+ except KeyError as e:
raise ParseError(
- '@type is missing when parsing any message at {0}'.format(path))
+ '@type is missing when parsing any message at {0}'.format(path)
+ ) from e
try:
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
except TypeError as e:
- raise ParseError('{0} at {1}'.format(e, path))
+ raise ParseError('{0} at {1}'.format(e, path)) from e
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
@@ -670,7 +687,7 @@ class _Parser(object):
try:
message.FromJsonString(value)
except ValueError as e:
- raise ParseError('{0} at {1}'.format(e, path))
+ raise ParseError('{0} at {1}'.format(e, path)) from e
def _ConvertValueMessage(self, value, message, path):
"""Convert a JSON representation into Value message."""
@@ -794,18 +811,18 @@ def _ConvertScalarFieldValue(value, field, path, require_str=False):
try:
number = int(value)
enum_value = field.enum_type.values_by_number.get(number, None)
- except ValueError:
+ except ValueError as e:
raise ParseError('Invalid enum value {0} for enum type {1}'.format(
- value, field.enum_type.full_name))
+ value, field.enum_type.full_name)) from e
if enum_value is None:
- if field.file.syntax == 'proto3':
- # Proto3 accepts unknown enums.
+ if field.enum_type.is_closed:
+ raise ParseError('Invalid enum value {0} for enum type {1}'.format(
+ value, field.enum_type.full_name))
+ else:
return number
- raise ParseError('Invalid enum value {0} for enum type {1}'.format(
- value, field.enum_type.full_name))
return enum_value.number
except ParseError as e:
- raise ParseError('{0} at {1}'.format(e, path))
+ raise ParseError('{0} at {1}'.format(e, path)) from e
def _ConvertInteger(value):
@@ -857,7 +874,7 @@ def _ConvertFloat(value, field):
try:
# Assume Python compatible syntax.
return float(value)
- except ValueError:
+ except ValueError as e:
# Check alternative spellings.
if value == _NEG_INFINITY:
return float('-inf')
@@ -866,7 +883,7 @@ def _ConvertFloat(value, field):
elif value == _NAN:
return float('nan')
else:
- raise ParseError('Couldn\'t parse float: {0}'.format(value))
+ raise ParseError('Couldn\'t parse float: {0}'.format(value)) from e
def _ConvertBool(value, require_str):
diff --git a/contrib/python/protobuf/py3/google/protobuf/message.py b/contrib/python/protobuf/py3/google/protobuf/message.py
index 76c6802f70..37b9c4054e 100644
--- a/contrib/python/protobuf/py3/google/protobuf/message.py
+++ b/contrib/python/protobuf/py3/google/protobuf/message.py
@@ -74,7 +74,8 @@ class Message(object):
__slots__ = []
- #: The :class:`google.protobuf.descriptor.Descriptor` for this message type.
+ #: The :class:`google.protobuf.Descriptor`
+ # for this message type.
DESCRIPTOR = None
def __deepcopy__(self, memo=None):
@@ -191,7 +192,7 @@ class Message(object):
raise NotImplementedError
def ParseFromString(self, serialized):
- """Parse serialized protocol buffer data into this message.
+ """Parse serialized protocol buffer data in binary form into this message.
Like :func:`MergeFromString()`, except we clear the object first.
@@ -311,13 +312,13 @@ class Message(object):
"""
raise NotImplementedError
- def HasExtension(self, extension_handle):
+ def HasExtension(self, field_descriptor):
"""Checks if a certain extension is present for this message.
Extensions are retrieved using the :attr:`Extensions` mapping (if present).
Args:
- extension_handle: The handle for the extension to check.
+ field_descriptor: The field descriptor for the extension to check.
Returns:
bool: Whether the extension is present for this message.
@@ -329,11 +330,11 @@ class Message(object):
"""
raise NotImplementedError
- def ClearExtension(self, extension_handle):
+ def ClearExtension(self, field_descriptor):
"""Clears the contents of a given extension.
Args:
- extension_handle: The handle for the extension to clear.
+ field_descriptor: The field descriptor for the extension to clear.
"""
raise NotImplementedError
@@ -367,7 +368,7 @@ class Message(object):
raise NotImplementedError
@staticmethod
- def RegisterExtension(extension_handle):
+ def RegisterExtension(field_descriptor):
raise NotImplementedError
def _SetListener(self, message_listener):
diff --git a/contrib/python/protobuf/py3/google/protobuf/message_factory.py b/contrib/python/protobuf/py3/google/protobuf/message_factory.py
index 8d65204581..fac1165c51 100644
--- a/contrib/python/protobuf/py3/google/protobuf/message_factory.py
+++ b/contrib/python/protobuf/py3/google/protobuf/message_factory.py
@@ -39,6 +39,8 @@ my_proto_instance = message_classes['some.proto.package.MessageName']()
__author__ = 'matthewtoia@google.com (Matt Toia)'
+import warnings
+
from google.protobuf.internal import api_implementation
from google.protobuf import descriptor_pool
from google.protobuf import message
@@ -53,6 +55,95 @@ else:
_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
+def GetMessageClass(descriptor):
+ """Obtains a proto2 message class based on the passed in descriptor.
+
+ Passing a descriptor with a fully qualified name matching a previous
+ invocation will cause the same class to be returned.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ concrete_class = getattr(descriptor, '_concrete_class', None)
+ if concrete_class:
+ return concrete_class
+ return _InternalCreateMessageClass(descriptor)
+
+
+def GetMessageClassesForFiles(files, pool):
+ """Gets all the messages from specified files.
+
+ This will find and resolve dependencies, failing if the descriptor
+ pool cannot satisfy them.
+
+ Args:
+ files: The file names to extract messages from.
+ pool: The descriptor pool to find the files including the dependent
+ files.
+
+ Returns:
+ A dictionary mapping proto names to the message classes.
+ """
+ result = {}
+ for file_name in files:
+ file_desc = pool.FindFileByName(file_name)
+ for desc in file_desc.message_types_by_name.values():
+ result[desc.full_name] = GetMessageClass(desc)
+
+ # While the extension FieldDescriptors are created by the descriptor pool,
+ # the python classes created in the factory need them to be registered
+ # explicitly, which is done below.
+ #
+ # The call to RegisterExtension will specifically check if the
+ # extension was already registered on the object and either
+ # ignore the registration if the original was the same, or raise
+ # an error if they were different.
+
+ for extension in file_desc.extensions_by_name.values():
+ extended_class = GetMessageClass(extension.containing_type)
+ extended_class.RegisterExtension(extension)
+ # Recursively load protos for extension field, in order to be able to
+ # fully represent the extension. This matches the behavior for regular
+ # fields too.
+ if extension.message_type:
+ GetMessageClass(extension.message_type)
+ return result
+
+
+def _InternalCreateMessageClass(descriptor):
+ """Builds a proto2 message class based on the passed in descriptor.
+
+ Args:
+ descriptor: The descriptor to build from.
+
+ Returns:
+ A class describing the passed in descriptor.
+ """
+ descriptor_name = descriptor.name
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
+ descriptor_name,
+ (message.Message,),
+ {
+ 'DESCRIPTOR': descriptor,
+ # If module not set, it wrongly points to message_factory module.
+ '__module__': None,
+ })
+ for field in descriptor.fields:
+ if field.message_type:
+ GetMessageClass(field.message_type)
+ for extension in result_class.DESCRIPTOR.extensions:
+ extended_class = GetMessageClass(extension.containing_type)
+ extended_class.RegisterExtension(extension)
+ if extension.message_type:
+ GetMessageClass(extension.message_type)
+ return result_class
+
+
+# Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles()
+# method above instead.
class MessageFactory(object):
"""Factory for creating Proto2 messages from descriptors in a pool."""
@@ -60,9 +151,6 @@ class MessageFactory(object):
"""Initializes a new factory."""
self.pool = pool or descriptor_pool.DescriptorPool()
- # local cache of all classes built from protobuf descriptors
- self._classes = {}
-
def GetPrototype(self, descriptor):
"""Obtains a proto2 message class based on the passed in descriptor.
@@ -75,21 +163,17 @@ class MessageFactory(object):
Returns:
A class describing the passed in descriptor.
"""
- if descriptor not in self._classes:
- result_class = self.CreatePrototype(descriptor)
- # The assignment to _classes is redundant for the base implementation, but
- # might avoid confusion in cases where CreatePrototype gets overridden and
- # does not call the base implementation.
- self._classes[descriptor] = result_class
- return result_class
- return self._classes[descriptor]
+ # TODO(b/258832141): add this warning
+ # warnings.warn('MessageFactory class is deprecated. Please use '
+ # 'GetMessageClass() instead of MessageFactory.GetPrototype. '
+ # 'MessageFactory class will be removed after 2024.')
+ return GetMessageClass(descriptor)
def CreatePrototype(self, descriptor):
"""Builds a proto2 message class based on the passed in descriptor.
Don't call this function directly, it always creates a new class. Call
- GetPrototype() instead. This method is meant to be overridden in subblasses
- to perform additional operations on the newly constructed class.
+ GetMessageClass() instead.
Args:
descriptor: The descriptor to build from.
@@ -97,30 +181,11 @@ class MessageFactory(object):
Returns:
A class describing the passed in descriptor.
"""
- descriptor_name = descriptor.name
- result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
- descriptor_name,
- (message.Message,),
- {
- 'DESCRIPTOR': descriptor,
- # If module not set, it wrongly points to message_factory module.
- '__module__': None,
- })
- result_class._FACTORY = self # pylint: disable=protected-access
- # Assign in _classes before doing recursive calls to avoid infinite
- # recursion.
- self._classes[descriptor] = result_class
- for field in descriptor.fields:
- if field.message_type:
- self.GetPrototype(field.message_type)
- for extension in result_class.DESCRIPTOR.extensions:
- if extension.containing_type not in self._classes:
- self.GetPrototype(extension.containing_type)
- extended_class = self._classes[extension.containing_type]
- extended_class.RegisterExtension(extension)
- if extension.message_type:
- self.GetPrototype(extension.message_type)
- return result_class
+ # TODO(b/258832141): add this warning
+ # warnings.warn('Directly call CreatePrototype is wrong. Please use '
+ # 'GetMessageClass() method instead. Directly use '
+ # 'CreatePrototype will raise error after July 2023.')
+ return _InternalCreateMessageClass(descriptor)
def GetMessages(self, files):
"""Gets all the messages from a specified file.
@@ -136,39 +201,20 @@ class MessageFactory(object):
any dependent messages as well as any messages defined in the same file as
a specified message.
"""
- result = {}
- for file_name in files:
- file_desc = self.pool.FindFileByName(file_name)
- for desc in file_desc.message_types_by_name.values():
- result[desc.full_name] = self.GetPrototype(desc)
-
- # While the extension FieldDescriptors are created by the descriptor pool,
- # the python classes created in the factory need them to be registered
- # explicitly, which is done below.
- #
- # The call to RegisterExtension will specifically check if the
- # extension was already registered on the object and either
- # ignore the registration if the original was the same, or raise
- # an error if they were different.
-
- for extension in file_desc.extensions_by_name.values():
- if extension.containing_type not in self._classes:
- self.GetPrototype(extension.containing_type)
- extended_class = self._classes[extension.containing_type]
- extended_class.RegisterExtension(extension)
- if extension.message_type:
- self.GetPrototype(extension.message_type)
- return result
-
-
-_FACTORY = MessageFactory()
-
-
-def GetMessages(file_protos):
+ # TODO(b/258832141): add this warning
+ # warnings.warn('MessageFactory class is deprecated. Please use '
+ # 'GetMessageClassesForFiles() instead of '
+ # 'MessageFactory.GetMessages(). MessageFactory class '
+ # 'will be removed after 2024.')
+ return GetMessageClassesForFiles(files, self.pool)
+
+
+def GetMessages(file_protos, pool=None):
"""Builds a dictionary of all the messages available in a set of files.
Args:
file_protos: Iterable of FileDescriptorProto to build messages out of.
+ pool: The descriptor pool to add the file protos.
Returns:
A dictionary mapping proto names to the message classes. This will include
@@ -177,13 +223,15 @@ def GetMessages(file_protos):
"""
# The cpp implementation of the protocol buffer library requires to add the
# message in topological order of the dependency graph.
+ des_pool = pool or descriptor_pool.DescriptorPool()
file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
def _AddFile(file_proto):
for dependency in file_proto.dependency:
if dependency in file_by_name:
# Remove from elements to be visited, in order to cut cycles.
_AddFile(file_by_name.pop(dependency))
- _FACTORY.pool.Add(file_proto)
+ des_pool.Add(file_proto)
while file_by_name:
_AddFile(file_by_name.popitem()[1])
- return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
+ return GetMessageClassesForFiles(
+ [file_proto.name for file_proto in file_protos], des_pool)
diff --git a/contrib/python/protobuf/py3/google/protobuf/proto_api.h b/contrib/python/protobuf/py3/google/protobuf/proto_api.h
index 9969a91f44..4e910e07cb 100644
--- a/contrib/python/protobuf/py3/google/protobuf/proto_api.h
+++ b/contrib/python/protobuf/py3/google/protobuf/proto_api.h
@@ -48,8 +48,8 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/descriptor_database.h>
-#include <google/protobuf/message.h>
+#include "google/protobuf/descriptor_database.h"
+#include "google/protobuf/message.h"
namespace google {
namespace protobuf {
@@ -133,8 +133,7 @@ struct PyProto_API {
};
inline const char* PyProtoAPICapsuleName() {
- static const char kCapsuleName[] =
- "google.protobuf.pyext._message.proto_API";
+ static const char kCapsuleName[] = "google.protobuf.pyext._message.proto_API";
return kCapsuleName;
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/proto_builder.py b/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
index a4667ce63e..8dab8b3ee0 100644
--- a/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
+++ b/contrib/python/protobuf/py3/google/protobuf/proto_builder.py
@@ -36,22 +36,23 @@ import os
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor
+from google.protobuf import descriptor_pool
from google.protobuf import message_factory
-def _GetMessageFromFactory(factory, full_name):
+def _GetMessageFromFactory(pool, full_name):
"""Get a proto class from the MessageFactory by name.
Args:
- factory: a MessageFactory instance.
+ pool: a descriptor pool.
full_name: str, the fully qualified name of the proto type.
Returns:
A class, for the type identified by full_name.
Raises:
KeyError, if the proto is not found in the factory's descriptor pool.
"""
- proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
- proto_cls = factory.GetPrototype(proto_descriptor)
+ proto_descriptor = pool.FindMessageTypeByName(full_name)
+ proto_cls = message_factory.GetMessageClass(proto_descriptor)
return proto_cls
@@ -69,11 +70,10 @@ def MakeSimpleProtoClass(fields, full_name=None, pool=None):
Returns:
a class, the new protobuf class with a FileDescriptor.
"""
- factory = message_factory.MessageFactory(pool=pool)
-
+ pool_instance = pool or descriptor_pool.DescriptorPool()
if full_name is not None:
try:
- proto_cls = _GetMessageFromFactory(factory, full_name)
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
return proto_cls
except KeyError:
# The factory's DescriptorPool doesn't know about this class yet.
@@ -99,16 +99,16 @@ def MakeSimpleProtoClass(fields, full_name=None, pool=None):
full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
fields_hash.hexdigest())
try:
- proto_cls = _GetMessageFromFactory(factory, full_name)
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
return proto_cls
except KeyError:
# The factory's DescriptorPool doesn't know about this class yet.
pass
# This is the first time we see this proto: add a new descriptor to the pool.
- factory.pool.Add(
+ pool_instance.Add(
_MakeFileDescriptorProto(proto_file_name, full_name, field_items))
- return _GetMessageFromFactory(factory, full_name)
+ return _GetMessageFromFactory(pool_instance, full_name)
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/__init__.py b/contrib/python/protobuf/py3/google/protobuf/pyext/__init__.py
index 0cb9329b2c..e69de29bb2 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/__init__.py
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/__init__.py
@@ -1,4 +0,0 @@
-import warnings
-
-with warnings.catch_warnings(action="ignore", category=DeprecationWarning):
- import google.protobuf.pyext._message
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
index a5254ce97b..acb907a3e9 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.cc
@@ -30,7 +30,9 @@
// Author: petar@google.com (Petar Petrov)
-#include <google/protobuf/pyext/descriptor.h>
+#include "google/protobuf/pyext/descriptor.h"
+
+#include "y_absl/log/absl_check.h"
#define PY_SSIZE_T_CLEAN
#include <Python.h>
@@ -40,15 +42,15 @@
#include <string>
#include <unordered_map>
-#include <google/protobuf/io/coded_stream.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/pyext/descriptor_containers.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/hash.h>
+#include "google/protobuf/descriptor.pb.h"
+#include "google/protobuf/dynamic_message.h"
+#include "google/protobuf/pyext/descriptor_containers.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "y_absl/strings/string_view.h"
+#include "google/protobuf/io/coded_stream.h"
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) \
@@ -404,7 +406,7 @@ PyObject* NewInternedDescriptor(PyTypeObject* type,
std::unordered_map<const void*, PyObject*>::iterator it =
interned_descriptors->find(descriptor);
if (it != interned_descriptors->end()) {
- GOOGLE_DCHECK(Py_TYPE(it->second) == type);
+ Y_ABSL_DCHECK(Py_TYPE(it->second) == type);
Py_INCREF(it->second);
return it->second;
}
@@ -542,6 +544,12 @@ static PyObject* GetConcreteClass(PyBaseDescriptor* self, void *closure) {
GetDescriptorPool_FromPool(
_GetDescriptor(self)->file()->pool())->py_message_factory,
_GetDescriptor(self)));
+
+ if (concrete_class == nullptr) {
+ PyErr_Clear();
+ return nullptr;
+ }
+
Py_XINCREF(concrete_class);
return concrete_class->AsPyObject();
}
@@ -1179,6 +1187,11 @@ static PyObject* GetHasOptions(PyBaseDescriptor *self, void *closure) {
Py_RETURN_FALSE;
}
}
+
+static PyObject* GetIsClosed(PyBaseDescriptor* self, void* closure) {
+ return PyBool_FromLong(_GetDescriptor(self)->is_closed());
+}
+
static int SetHasOptions(PyBaseDescriptor *self, PyObject *value,
void *closure) {
return CheckCalledFromGeneratedFile("has_options");
@@ -1222,6 +1235,7 @@ static PyGetSetDef Getters[] = {
"Containing type"},
{"has_options", (getter)GetHasOptions, (setter)SetHasOptions,
"Has Options"},
+ {"is_closed", (getter)GetIsClosed, nullptr, "If the enum is closed"},
{"_options", (getter) nullptr, (setter)SetOptions, "Options"},
{"_serialized_options", (getter) nullptr, (setter)SetSerializedOptions,
"Serialized Options"},
@@ -1793,7 +1807,8 @@ static PyObject* FindMethodByName(PyBaseDescriptor *self, PyObject* arg) {
}
const MethodDescriptor* method_descriptor =
- _GetDescriptor(self)->FindMethodByName(StringParam(name, name_size));
+ _GetDescriptor(self)->FindMethodByName(
+ y_absl::string_view(name, name_size));
if (method_descriptor == nullptr) {
PyErr_Format(PyExc_KeyError, "Couldn't find method %.200s", name);
return nullptr;
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.h b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.h
index a383a7927a..8559c660e6 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor.h
@@ -36,15 +36,12 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/descriptor.h>
+#include "google/protobuf/descriptor.h"
namespace google {
namespace protobuf {
namespace python {
-// Should match the type of ConstStringParam.
-using StringParam = TProtoStringType;
-
extern PyTypeObject PyMessageDescriptor_Type;
extern PyTypeObject PyFieldDescriptor_Type;
extern PyTypeObject PyEnumDescriptor_Type;
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_containers.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_containers.cc
index b17d8348bf..b1b1f15733 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_containers.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_containers.cc
@@ -49,14 +49,21 @@
// because the Python API is based on C, and does not play well with C++
// inheritance.
+// clang-format off
#define PY_SSIZE_T_CLEAN
+// This inclusion must appear before all the others.
#include <Python.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/pyext/descriptor_containers.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include <string>
+
+#include "google/protobuf/pyext/descriptor_containers.h"
+// clang-format on
+
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "y_absl/strings/string_view.h"
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) \
@@ -75,9 +82,9 @@ struct PyContainer;
typedef int (*CountMethod)(PyContainer* self);
typedef const void* (*GetByIndexMethod)(PyContainer* self, int index);
typedef const void* (*GetByNameMethod)(PyContainer* self,
- ConstStringParam name);
+ y_absl::string_view name);
typedef const void* (*GetByCamelcaseNameMethod)(PyContainer* self,
- ConstStringParam name);
+ y_absl::string_view name);
typedef const void* (*GetByNumberMethod)(PyContainer* self, int index);
typedef PyObject* (*NewObjectFromItemMethod)(const void* descriptor);
typedef const TProtoStringType& (*GetItemNameMethod)(const void* descriptor);
@@ -175,8 +182,8 @@ static bool _GetItemByKey(PyContainer* self, PyObject* key, const void** item) {
}
return false;
}
- *item = self->container_def->get_by_name_fn(self,
- StringParam(name, name_size));
+ *item = self->container_def->get_by_name_fn(
+ self, y_absl::string_view(name, name_size));
return true;
}
case PyContainer::KIND_BYCAMELCASENAME: {
@@ -192,7 +199,7 @@ static bool _GetItemByKey(PyContainer* self, PyObject* key, const void** item) {
return false;
}
*item = self->container_def->get_by_camelcase_name_fn(
- self, StringParam(camelcase_name, name_size));
+ self, y_absl::string_view(camelcase_name, name_size));
return true;
}
case PyContainer::KIND_BYNUMBER: {
@@ -958,12 +965,12 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->field_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindFieldByName(name);
}
static const void* GetByCamelcaseName(PyContainer* self,
- ConstStringParam name) {
+ y_absl::string_view name) {
return GetDescriptor(self)->FindFieldByCamelcaseName(name);
}
@@ -1028,7 +1035,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->nested_type_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindNestedTypeByName(name);
}
@@ -1080,7 +1087,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->enum_type_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindEnumTypeByName(name);
}
@@ -1143,7 +1150,7 @@ static int Count(PyContainer* self) {
return count;
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindEnumValueByName(name);
}
@@ -1194,7 +1201,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->extension_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindExtensionByName(name);
}
@@ -1246,7 +1253,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->oneof_decl_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindOneofByName(name);
}
@@ -1304,7 +1311,7 @@ static const void* GetByIndex(PyContainer* self, int index) {
return GetDescriptor(self)->value(index);
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindValueByName(name);
}
@@ -1408,7 +1415,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->method_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindMethodByName(name);
}
@@ -1462,7 +1469,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->message_type_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindMessageTypeByName(name);
}
@@ -1502,7 +1509,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->enum_type_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindEnumTypeByName(name);
}
@@ -1542,7 +1549,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->extension_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindExtensionByName(name);
}
@@ -1582,7 +1589,7 @@ static int Count(PyContainer* self) {
return GetDescriptor(self)->service_count();
}
-static const void* GetByName(PyContainer* self, ConstStringParam name) {
+static const void* GetByName(PyContainer* self, y_absl::string_view name) {
return GetDescriptor(self)->FindServiceByName(name);
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.cc
index 14f5bf2230..abd1269efc 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.cc
@@ -31,15 +31,16 @@
// This file defines a C++ DescriptorDatabase, which wraps a Python Database
// and delegate all its operations to Python methods.
-#include <google/protobuf/pyext/descriptor_database.h>
+#include "google/protobuf/pyext/descriptor_database.h"
#include <cstdint>
+#include <string>
+#include <vector>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include "google/protobuf/descriptor.pb.h"
+#include "y_absl/log/absl_log.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
namespace google {
namespace protobuf {
@@ -61,7 +62,7 @@ static bool GetFileDescriptorProto(PyObject* py_descriptor,
// Expected error: item was simply not found.
PyErr_Clear();
} else {
- GOOGLE_LOG(ERROR) << "DescriptorDatabase method raised an error";
+ Y_ABSL_LOG(ERROR) << "DescriptorDatabase method raised an error";
PyErr_Print();
}
return false;
@@ -85,7 +86,7 @@ static bool GetFileDescriptorProto(PyObject* py_descriptor,
ScopedPyObjectPtr serialized_pb(
PyObject_CallMethod(py_descriptor, "SerializeToString", nullptr));
if (serialized_pb == nullptr) {
- GOOGLE_LOG(ERROR)
+ Y_ABSL_LOG(ERROR)
<< "DescriptorDatabase method did not return a FileDescriptorProto";
PyErr_Print();
return false;
@@ -93,14 +94,14 @@ static bool GetFileDescriptorProto(PyObject* py_descriptor,
char* str;
Py_ssize_t len;
if (PyBytes_AsStringAndSize(serialized_pb.get(), &str, &len) < 0) {
- GOOGLE_LOG(ERROR)
+ Y_ABSL_LOG(ERROR)
<< "DescriptorDatabase method did not return a FileDescriptorProto";
PyErr_Print();
return false;
}
FileDescriptorProto file_proto;
if (!file_proto.ParseFromArray(str, len)) {
- GOOGLE_LOG(ERROR)
+ Y_ABSL_LOG(ERROR)
<< "DescriptorDatabase method did not return a FileDescriptorProto";
return false;
}
@@ -171,9 +172,8 @@ bool PyDescriptorDatabase::FindAllExtensionNumbers(
ScopedPyObjectPtr item(PySequence_GetItem(py_list.get(), i));
item_value = PyLong_AsLong(item.get());
if (item_value < 0) {
- GOOGLE_LOG(ERROR)
- << "FindAllExtensionNumbers method did not return "
- << "valid extension numbers.";
+ Y_ABSL_LOG(ERROR) << "FindAllExtensionNumbers method did not return "
+ << "valid extension numbers.";
PyErr_Print();
return false;
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.h b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.h
index 08318ff98f..81040b2172 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_database.h
@@ -34,7 +34,10 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/descriptor_database.h>
+#include <string>
+#include <vector>
+
+#include "google/protobuf/descriptor_database.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
index 2dd47bdb23..4ab1f9bd19 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.cc
@@ -30,19 +30,27 @@
// Implements the DescriptorPool, which collects all descriptors.
+#include <string>
#include <unordered_map>
+#include <utility>
+#include <vector>
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_database.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/hash.h>
+#include "google/protobuf/descriptor.pb.h"
+#include "y_absl/strings/str_cat.h"
+#include "y_absl/strings/str_replace.h"
+#include "y_absl/strings/string_view.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_database.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+
+// Must be included last.
+#include "google/protobuf/port_def.inc"
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) \
@@ -69,19 +77,19 @@ class BuildFileErrorCollector : public DescriptorPool::ErrorCollector {
public:
BuildFileErrorCollector() : error_message(""), had_errors_(false) {}
- void AddError(const TProtoStringType& filename, const TProtoStringType& element_name,
- const Message* descriptor, ErrorLocation location,
- const TProtoStringType& message) override {
+ void RecordError(y_absl::string_view filename, y_absl::string_view element_name,
+ const Message* descriptor, ErrorLocation location,
+ y_absl::string_view message) override {
// Replicates the logging behavior that happens in the C++ implementation
// when an error collector is not passed in.
if (!had_errors_) {
- error_message +=
- ("Invalid proto descriptor for file \"" + filename + "\":\n");
+ y_absl::StrAppend(&error_message, "Invalid proto descriptor for file \"",
+ filename, "\":\n");
had_errors_ = true;
}
// As this only happens on failure and will result in the program not
// running at all, no effort is made to optimize this string manipulation.
- error_message += (" " + element_name + ": " + message + "\n");
+ y_absl::StrAppend(&error_message, " ", element_name, ": ", message, "\n");
}
void Clear() {
@@ -151,20 +159,28 @@ static PyDescriptorPool* PyDescriptorPool_NewWithUnderlay(
}
static PyDescriptorPool* PyDescriptorPool_NewWithDatabase(
- DescriptorDatabase* database) {
+ DescriptorDatabase* database,
+ bool use_deprecated_legacy_json_field_conflicts) {
PyDescriptorPool* cpool = _CreateDescriptorPool();
if (cpool == nullptr) {
return nullptr;
}
+ DescriptorPool* pool;
if (database != nullptr) {
cpool->error_collector = new BuildFileErrorCollector();
- cpool->pool = new DescriptorPool(database, cpool->error_collector);
+ pool = new DescriptorPool(database, cpool->error_collector);
cpool->is_mutable = false;
cpool->database = database;
} else {
- cpool->pool = new DescriptorPool();
+ pool = new DescriptorPool();
cpool->is_mutable = true;
}
+ if (use_deprecated_legacy_json_field_conflicts) {
+ PROTOBUF_IGNORE_DEPRECATION_START
+ pool->UseDeprecatedLegacyJsonFieldConflicts();
+ PROTOBUF_IGNORE_DEPRECATION_STOP
+ }
+ cpool->pool = pool;
cpool->is_owned = true;
if (!descriptor_pool_map->insert(std::make_pair(cpool->pool, cpool)).second) {
@@ -179,6 +195,7 @@ static PyDescriptorPool* PyDescriptorPool_NewWithDatabase(
// The public DescriptorPool constructor.
static PyObject* New(PyTypeObject* type,
PyObject* args, PyObject* kwargs) {
+ int use_deprecated_legacy_json_field_conflicts = 0;
static const char* kwlist[] = {"descriptor_db", nullptr};
PyObject* py_database = nullptr;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O",
@@ -189,8 +206,8 @@ static PyObject* New(PyTypeObject* type,
if (py_database && py_database != Py_None) {
database = new PyDescriptorDatabase(py_database);
}
- return reinterpret_cast<PyObject*>(
- PyDescriptorPool_NewWithDatabase(database));
+ return reinterpret_cast<PyObject*>(PyDescriptorPool_NewWithDatabase(
+ database, use_deprecated_legacy_json_field_conflicts));
}
static void Dealloc(PyObject* pself) {
@@ -249,7 +266,7 @@ static PyObject* FindMessageByName(PyObject* self, PyObject* arg) {
const Descriptor* message_descriptor =
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindMessageTypeByName(
- StringParam(name, name_size));
+ y_absl::string_view(name, name_size));
if (message_descriptor == nullptr) {
return SetErrorFromCollector(
@@ -273,7 +290,7 @@ static PyObject* FindFileByName(PyObject* self, PyObject* arg) {
PyDescriptorPool* py_pool = reinterpret_cast<PyDescriptorPool*>(self);
const FileDescriptor* file_descriptor =
- py_pool->pool->FindFileByName(StringParam(name, name_size));
+ py_pool->pool->FindFileByName(y_absl::string_view(name, name_size));
if (file_descriptor == nullptr) {
return SetErrorFromCollector(py_pool->error_collector, name, "file");
@@ -289,7 +306,7 @@ PyObject* FindFieldByName(PyDescriptorPool* self, PyObject* arg) {
}
const FieldDescriptor* field_descriptor =
- self->pool->FindFieldByName(StringParam(name, name_size));
+ self->pool->FindFieldByName(y_absl::string_view(name, name_size));
if (field_descriptor == nullptr) {
return SetErrorFromCollector(self->error_collector, name, "field");
}
@@ -310,7 +327,7 @@ PyObject* FindExtensionByName(PyDescriptorPool* self, PyObject* arg) {
}
const FieldDescriptor* field_descriptor =
- self->pool->FindExtensionByName(StringParam(name, name_size));
+ self->pool->FindExtensionByName(y_absl::string_view(name, name_size));
if (field_descriptor == nullptr) {
return SetErrorFromCollector(self->error_collector, name,
"extension field");
@@ -332,7 +349,7 @@ PyObject* FindEnumTypeByName(PyDescriptorPool* self, PyObject* arg) {
}
const EnumDescriptor* enum_descriptor =
- self->pool->FindEnumTypeByName(StringParam(name, name_size));
+ self->pool->FindEnumTypeByName(y_absl::string_view(name, name_size));
if (enum_descriptor == nullptr) {
return SetErrorFromCollector(self->error_collector, name, "enum");
}
@@ -353,7 +370,7 @@ PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg) {
}
const OneofDescriptor* oneof_descriptor =
- self->pool->FindOneofByName(StringParam(name, name_size));
+ self->pool->FindOneofByName(y_absl::string_view(name, name_size));
if (oneof_descriptor == nullptr) {
return SetErrorFromCollector(self->error_collector, name, "oneof");
}
@@ -375,7 +392,7 @@ static PyObject* FindServiceByName(PyObject* self, PyObject* arg) {
const ServiceDescriptor* service_descriptor =
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindServiceByName(
- StringParam(name, name_size));
+ y_absl::string_view(name, name_size));
if (service_descriptor == nullptr) {
return SetErrorFromCollector(
reinterpret_cast<PyDescriptorPool*>(self)->error_collector, name,
@@ -395,7 +412,7 @@ static PyObject* FindMethodByName(PyObject* self, PyObject* arg) {
const MethodDescriptor* method_descriptor =
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindMethodByName(
- StringParam(name, name_size));
+ y_absl::string_view(name, name_size));
if (method_descriptor == nullptr) {
return SetErrorFromCollector(
reinterpret_cast<PyDescriptorPool*>(self)->error_collector, name,
@@ -415,7 +432,7 @@ static PyObject* FindFileContainingSymbol(PyObject* self, PyObject* arg) {
const FileDescriptor* file_descriptor =
reinterpret_cast<PyDescriptorPool*>(self)->pool->FindFileContainingSymbol(
- StringParam(name, name_size));
+ y_absl::string_view(name, name_size));
if (file_descriptor == nullptr) {
return SetErrorFromCollector(
reinterpret_cast<PyDescriptorPool*>(self)->error_collector, name,
@@ -818,3 +835,5 @@ PyObject* PyDescriptorPool_FromPool(const DescriptorPool* pool) {
} // namespace python
} // namespace protobuf
} // namespace google
+
+#include "google/protobuf/port_undef.inc"
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
index 5d3c3a95cc..851cf31e87 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/descriptor_pool.h
@@ -35,7 +35,7 @@
#include <Python.h>
#include <unordered_map>
-#include <google/protobuf/descriptor.h>
+#include "google/protobuf/descriptor.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.cc
index 66703da898..4fe6863679 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.cc
@@ -31,23 +31,23 @@
// Author: anuraag@google.com (Anuraag Agrawal)
// Author: tibell@google.com (Johan Tibell)
-#include <google/protobuf/pyext/extension_dict.h>
+#include "google/protobuf/pyext/extension_dict.h"
#include <cstdint>
#include <memory>
-
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/repeated_composite_container.h>
-#include <google/protobuf/pyext/repeated_scalar_container.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include <vector>
+
+#include "google/protobuf/descriptor.pb.h"
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/dynamic_message.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/repeated_composite_container.h"
+#include "google/protobuf/pyext/repeated_scalar_container.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "y_absl/strings/string_view.h"
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) \
@@ -125,8 +125,9 @@ static void DeallocExtensionIterator(PyObject* _self) {
ExtensionIterator* self = reinterpret_cast<ExtensionIterator*>(_self);
self->fields.clear();
Py_XDECREF(self->extension_dict);
+ freefunc tp_free = Py_TYPE(_self)->tp_free;
self->~ExtensionIterator();
- Py_TYPE(_self)->tp_free(_self);
+ (*tp_free)(_self);
}
PyObject* subscript(ExtensionDict* self, PyObject* key) {
@@ -238,11 +239,11 @@ PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* arg) {
PyDescriptorPool* pool = cmessage::GetFactoryForMessage(self->parent)->pool;
const FieldDescriptor* message_extension =
- pool->pool->FindExtensionByName(StringParam(name, name_size));
+ pool->pool->FindExtensionByName(y_absl::string_view(name, name_size));
if (message_extension == nullptr) {
// Is is the name of a message set extension?
const Descriptor* message_descriptor =
- pool->pool->FindMessageTypeByName(StringParam(name, name_size));
+ pool->pool->FindMessageTypeByName(y_absl::string_view(name, name_size));
if (message_descriptor && message_descriptor->extension_count() > 0) {
const FieldDescriptor* extension = message_descriptor->extension(0);
if (extension->is_extension() &&
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.h b/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.h
index 86d2451a00..c5c2875936 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/extension_dict.h
@@ -37,7 +37,7 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/field.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/field.cc
index 0d3b0b9607..daa95d0069 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/field.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/field.cc
@@ -28,11 +28,11 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include <google/protobuf/pyext/field.h>
+#include "google/protobuf/pyext/field.h"
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.cc
index 17bf0988ed..032cfa8577 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.cc
@@ -30,21 +30,19 @@
// Author: haberman@google.com (Josh Haberman)
-#include <google/protobuf/pyext/map_container.h>
+#include "google/protobuf/pyext/map_container.h"
#include <cstdint>
#include <memory>
+#include <string>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/map.h>
-#include <google/protobuf/map_field.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/repeated_composite_container.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/map_util.h>
+#include "google/protobuf/map.h"
+#include "google/protobuf/map_field.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/repeated_composite_container.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
namespace google {
namespace protobuf {
@@ -55,7 +53,7 @@ namespace python {
class MapReflectionFriend {
public:
// Methods that are in common between the map types.
- static PyObject* Contains(PyObject* _self, PyObject* key);
+ static int Contains(PyObject* _self, PyObject* key);
static Py_ssize_t Length(PyObject* _self);
static PyObject* GetIterator(PyObject *_self);
static PyObject* IterNext(PyObject* _self);
@@ -126,27 +124,27 @@ static bool PythonToMapKey(MapContainer* self, PyObject* obj, MapKey* key) {
self->parent_field_descriptor->message_type()->map_key();
switch (field_descriptor->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
+ PROTOBUF_CHECK_GET_INT32(obj, value, false);
key->SetInt32Value(value);
break;
}
case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(obj, value, false);
+ PROTOBUF_CHECK_GET_INT64(obj, value, false);
key->SetInt64Value(value);
break;
}
case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(obj, value, false);
+ PROTOBUF_CHECK_GET_UINT32(obj, value, false);
key->SetUInt32Value(value);
break;
}
case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(obj, value, false);
+ PROTOBUF_CHECK_GET_UINT64(obj, value, false);
key->SetUInt64Value(value);
break;
}
case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(obj, value, false);
+ PROTOBUF_CHECK_GET_BOOL(obj, value, false);
key->SetBoolValue(value);
break;
}
@@ -232,37 +230,37 @@ static bool PythonToMapValueRef(MapContainer* self, PyObject* obj,
self->parent_field_descriptor->message_type()->map_value();
switch (field_descriptor->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
+ PROTOBUF_CHECK_GET_INT32(obj, value, false);
value_ref->SetInt32Value(value);
return true;
}
case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(obj, value, false);
+ PROTOBUF_CHECK_GET_INT64(obj, value, false);
value_ref->SetInt64Value(value);
return true;
}
case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(obj, value, false);
+ PROTOBUF_CHECK_GET_UINT32(obj, value, false);
value_ref->SetUInt32Value(value);
return true;
}
case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(obj, value, false);
+ PROTOBUF_CHECK_GET_UINT64(obj, value, false);
value_ref->SetUInt64Value(value);
return true;
}
case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(obj, value, false);
+ PROTOBUF_CHECK_GET_FLOAT(obj, value, false);
value_ref->SetFloatValue(value);
return true;
}
case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(obj, value, false);
+ PROTOBUF_CHECK_GET_DOUBLE(obj, value, false);
value_ref->SetDoubleValue(value);
return true;
}
case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(obj, value, false);
+ PROTOBUF_CHECK_GET_BOOL(obj, value, false);
value_ref->SetBoolValue(value);
return true;
}
@@ -275,7 +273,7 @@ static bool PythonToMapValueRef(MapContainer* self, PyObject* obj,
return true;
}
case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(obj, value, false);
+ PROTOBUF_CHECK_GET_INT32(obj, value, false);
if (allow_unknown_enum_values) {
value_ref->SetEnumValue(value);
return true;
@@ -354,7 +352,7 @@ PyObject* MapReflectionFriend::MergeFrom(PyObject* _self, PyObject* arg) {
Py_RETURN_NONE;
}
-PyObject* MapReflectionFriend::Contains(PyObject* _self, PyObject* key) {
+int MapReflectionFriend::Contains(PyObject* _self, PyObject* key) {
MapContainer* self = GetMap(_self);
const Message* message = self->parent->message;
@@ -362,14 +360,14 @@ PyObject* MapReflectionFriend::Contains(PyObject* _self, PyObject* key) {
MapKey map_key;
if (!PythonToMapKey(self, key, &map_key)) {
- return nullptr;
+ return -1;
}
if (reflection->ContainsMapKey(*message, self->parent_field_descriptor,
map_key)) {
- Py_RETURN_TRUE;
+ return 1;
} else {
- Py_RETURN_FALSE;
+ return 0;
}
}
@@ -468,12 +466,12 @@ static PyObject* ScalarMapGet(PyObject* self, PyObject* args,
return nullptr;
}
- ScopedPyObjectPtr is_present(MapReflectionFriend::Contains(self, key));
- if (is_present.get() == nullptr) {
+ auto is_present = MapReflectionFriend::Contains(self, key);
+ if (is_present < 0) {
return nullptr;
}
- if (PyObject_IsTrue(is_present.get())) {
+ if (is_present == 1) {
return MapReflectionFriend::ScalarMapGetItem(self, key);
} else {
if (default_value != nullptr) {
@@ -530,8 +528,6 @@ static void ScalarMapDealloc(PyObject* _self) {
}
static PyMethodDef ScalarMapMethods[] = {
- {"__contains__", MapReflectionFriend::Contains, METH_O,
- "Tests whether a key is a member of the map."},
{"clear", (PyCFunction)Clear, METH_NOARGS,
"Removes all elements from the map."},
{"get", (PyCFunction)ScalarMapGet, METH_VARARGS | METH_KEYWORDS,
@@ -556,6 +552,7 @@ static PyType_Slot ScalarMapContainer_Type_slots[] = {
{Py_mp_subscript, (void*)MapReflectionFriend::ScalarMapGetItem},
{Py_mp_ass_subscript, (void*)MapReflectionFriend::ScalarMapSetItem},
{Py_tp_methods, (void*)ScalarMapMethods},
+ {Py_sq_contains, (void*)MapReflectionFriend::Contains},
{Py_tp_iter, (void*)MapReflectionFriend::GetIterator},
{Py_tp_repr, (void*)MapReflectionFriend::ScalarMapToStr},
{0, nullptr},
@@ -715,12 +712,12 @@ PyObject* MessageMapGet(PyObject* self, PyObject* args, PyObject* kwargs) {
return nullptr;
}
- ScopedPyObjectPtr is_present(MapReflectionFriend::Contains(self, key));
- if (is_present.get() == nullptr) {
+ auto is_present = MapReflectionFriend::Contains(self, key);
+ if (is_present < 0) {
return nullptr;
}
- if (PyObject_IsTrue(is_present.get())) {
+ if (is_present == 1) {
return MapReflectionFriend::MessageMapGetItem(self, key);
} else {
if (default_value != nullptr) {
@@ -748,8 +745,6 @@ static void MessageMapDealloc(PyObject* _self) {
}
static PyMethodDef MessageMapMethods[] = {
- {"__contains__", (PyCFunction)MapReflectionFriend::Contains, METH_O,
- "Tests whether the map contains this element."},
{"clear", (PyCFunction)Clear, METH_NOARGS,
"Removes all elements from the map."},
{"get", (PyCFunction)MessageMapGet, METH_VARARGS | METH_KEYWORDS,
@@ -776,6 +771,7 @@ static PyType_Slot MessageMapContainer_Type_slots[] = {
{Py_mp_subscript, (void*)MapReflectionFriend::MessageMapGetItem},
{Py_mp_ass_subscript, (void*)MapReflectionFriend::MessageMapSetItem},
{Py_tp_methods, (void*)MessageMapMethods},
+ {Py_sq_contains, (void*)MapReflectionFriend::Contains},
{Py_tp_iter, (void*)MapReflectionFriend::GetIterator},
{Py_tp_repr, (void*)MapReflectionFriend::MessageMapToStr},
{0, nullptr}};
@@ -901,6 +897,33 @@ PyTypeObject MapIterator_Type = {
nullptr, // tp_init
};
+
+PyTypeObject* PyUpb_AddClassWithRegister(PyType_Spec* spec,
+ PyObject* virtual_base,
+ const char** methods) {
+ PyObject* type = PyType_FromSpec(spec);
+ PyObject* ret1 = PyObject_CallMethod(virtual_base, "register", "O", type);
+ if (!ret1) {
+ Py_XDECREF(type);
+ return NULL;
+ }
+ for (size_t i = 0; methods[i] != NULL; i++) {
+ PyObject* method = PyObject_GetAttrString(virtual_base, methods[i]);
+ if (!method) {
+ Py_XDECREF(type);
+ return NULL;
+ }
+ int ret2 = PyObject_SetAttrString(type, methods[i], method);
+ if (ret2 < 0) {
+ Py_XDECREF(type);
+ return NULL;
+ }
+ }
+
+ return (PyTypeObject*)type;
+}
+
+
bool InitMapContainers() {
// ScalarMapContainer_Type derives from our MutableMapping type.
ScopedPyObjectPtr abc(PyImport_ImportModule("collections.abc"));
@@ -915,20 +938,23 @@ bool InitMapContainers() {
}
Py_INCREF(mutable_mapping.get());
- ScopedPyObjectPtr bases(PyTuple_Pack(1, mutable_mapping.get()));
+ ScopedPyObjectPtr bases(Py_BuildValue("O", mutable_mapping.get()));
if (bases == nullptr) {
return false;
}
+ const char* methods[] = {"keys", "items", "values", "__eq__", "__ne__",
+ "pop", "popitem", "update", "setdefault", NULL};
+
ScalarMapContainer_Type = reinterpret_cast<PyTypeObject*>(
- PyType_FromSpecWithBases(&ScalarMapContainer_Type_spec, bases.get()));
+ PyUpb_AddClassWithRegister(&ScalarMapContainer_Type_spec, bases.get(), methods));
if (PyType_Ready(&MapIterator_Type) < 0) {
return false;
}
MessageMapContainer_Type = reinterpret_cast<PyTypeObject*>(
- PyType_FromSpecWithBases(&MessageMapContainer_Type_spec, bases.get()));
+ PyUpb_AddClassWithRegister(&MessageMapContainer_Type_spec, bases.get(), methods));
return true;
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.h b/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.h
index af334d2e77..d90d3a554a 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/map_container.h
@@ -36,9 +36,9 @@
#include <cstdint>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
index 0a1474a233..fc0257acdd 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message.cc
@@ -31,17 +31,19 @@
// Author: anuraag@google.com (Anuraag Agrawal)
// Author: tibell@google.com (Johan Tibell)
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/pyext/message.h"
#include <structmember.h> // A Python header file.
#include <cstdint>
#include <map>
#include <memory>
+#include <set>
#include <string>
#include <vector>
-#include <google/protobuf/stubs/strutil.h>
+#include "y_absl/log/absl_check.h"
+#include "y_absl/strings/match.h"
#ifndef PyVarObject_HEAD_INIT
#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
@@ -49,33 +51,32 @@
#ifndef Py_TYPE
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
#endif
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/io/coded_stream.h>
-#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
-#include <google/protobuf/descriptor.pb.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/text_format.h>
-#include <google/protobuf/unknown_field_set.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/extension_dict.h>
-#include <google/protobuf/pyext/field.h>
-#include <google/protobuf/pyext/map_container.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/repeated_composite_container.h>
-#include <google/protobuf/pyext/repeated_scalar_container.h>
-#include <google/protobuf/pyext/safe_numerics.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/pyext/unknown_field_set.h>
-#include <google/protobuf/pyext/unknown_fields.h>
-#include <google/protobuf/util/message_differencer.h>
-#include <google/protobuf/io/strtod.h>
-#include <google/protobuf/stubs/map_util.h>
+#include "google/protobuf/stubs/common.h"
+#include "google/protobuf/descriptor.pb.h"
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/text_format.h"
+#include "google/protobuf/unknown_field_set.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/extension_dict.h"
+#include "google/protobuf/pyext/field.h"
+#include "google/protobuf/pyext/map_container.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/repeated_composite_container.h"
+#include "google/protobuf/pyext/repeated_scalar_container.h"
+#include "google/protobuf/pyext/safe_numerics.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "google/protobuf/pyext/unknown_field_set.h"
+#include "google/protobuf/pyext/unknown_fields.h"
+#include "google/protobuf/util/message_differencer.h"
+#include "y_absl/strings/string_view.h"
+#include "google/protobuf/io/coded_stream.h"
+#include "google/protobuf/io/strtod.h"
+#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
// clang-format off
-#include <google/protobuf/port_def.inc>
+#include "google/protobuf/port_def.inc"
// clang-format on
#define PyString_AsString(ob) \
@@ -88,6 +89,9 @@
: 0) \
: PyBytes_AsStringAndSize(ob, (charpp), (sizep)))
+#define PROTOBUF_PYTHON_PUBLIC "google.protobuf"
+#define PROTOBUF_PYTHON_INTERNAL "google.protobuf.internal"
+
namespace google {
namespace protobuf {
namespace python {
@@ -242,12 +246,12 @@ static PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
ScopedPyObjectPtr new_args;
if (WKT_classes == nullptr) {
- ScopedPyObjectPtr well_known_types(PyImport_ImportModule(
- "google.protobuf.internal.well_known_types"));
- GOOGLE_DCHECK(well_known_types != nullptr);
+ ScopedPyObjectPtr well_known_types(
+ PyImport_ImportModule(PROTOBUF_PYTHON_INTERNAL ".well_known_types"));
+ Y_ABSL_DCHECK(well_known_types != nullptr);
WKT_classes = PyObject_GetAttrString(well_known_types.get(), "WKTBASES");
- GOOGLE_DCHECK(WKT_classes != nullptr);
+ Y_ABSL_DCHECK(WKT_classes != nullptr);
}
PyObject* well_known_class = PyDict_GetItemString(
@@ -401,7 +405,7 @@ static PyObject* GetClassAttribute(CMessageClass *self, PyObject* name) {
Py_ssize_t attr_size;
static const char kSuffix[] = "_FIELD_NUMBER";
if (PyString_AsStringAndSize(name, &attr, &attr_size) >= 0 &&
- HasSuffixString(StringPiece(attr, attr_size), kSuffix)) {
+ y_absl::EndsWith(y_absl::string_view(attr, attr_size), kSuffix)) {
TProtoStringType field_name(attr, attr_size - sizeof(kSuffix) + 1);
LowerString(&field_name);
@@ -672,8 +676,8 @@ bool IsValidUTF8(PyObject* obj) {
bool AllowInvalidUTF8(const FieldDescriptor* field) { return false; }
PyObject* CheckString(PyObject* arg, const FieldDescriptor* descriptor) {
- GOOGLE_DCHECK(descriptor->type() == FieldDescriptor::TYPE_STRING ||
- descriptor->type() == FieldDescriptor::TYPE_BYTES);
+ Y_ABSL_DCHECK(descriptor->type() == FieldDescriptor::TYPE_STRING ||
+ descriptor->type() == FieldDescriptor::TYPE_BYTES);
if (descriptor->type() == FieldDescriptor::TYPE_STRING) {
if (!PyBytes_Check(arg) && !PyUnicode_Check(arg)) {
FormatTypeError(arg, "bytes, unicode");
@@ -776,7 +780,7 @@ bool CheckFieldBelongsToMessage(const FieldDescriptor* field_descriptor,
namespace cmessage {
PyMessageFactory* GetFactoryForMessage(CMessage* message) {
- GOOGLE_DCHECK(PyObject_TypeCheck(message, CMessage_Type));
+ Y_ABSL_DCHECK(PyObject_TypeCheck(message, CMessage_Type));
return reinterpret_cast<CMessageClass*>(Py_TYPE(message))->py_message_factory;
}
@@ -848,7 +852,7 @@ int AssureWritable(CMessage* self) {
}
// Toplevel messages are always mutable.
- GOOGLE_DCHECK(self->parent);
+ Y_ABSL_DCHECK(self->parent);
if (AssureWritable(self->parent) == -1) {
return -1;
@@ -906,7 +910,7 @@ static PyObject* GetIntegerEnumValue(const FieldDescriptor& descriptor,
return nullptr;
}
const EnumValueDescriptor* enum_value_descriptor =
- enum_descriptor->FindValueByName(StringParam(enum_label, size));
+ enum_descriptor->FindValueByName(y_absl::string_view(enum_label, size));
if (enum_value_descriptor == nullptr) {
PyErr_Format(PyExc_ValueError, "unknown enum label \"%s\"", enum_label);
return nullptr;
@@ -980,7 +984,7 @@ int DeleteRepeatedField(
}
Arena* arena = Arena::InternalGetArenaForAllocation(message);
- GOOGLE_DCHECK_EQ(arena, nullptr)
+ Y_ABSL_DCHECK_EQ(arena, nullptr)
<< "python protobuf is expected to be allocated from heap";
// Remove items, starting from the end.
for (; length > to; length--) {
@@ -993,9 +997,10 @@ int DeleteRepeatedField(
//
// To work around a debug hardening (PROTOBUF_FORCE_COPY_IN_RELEASE),
// explicitly use UnsafeArenaReleaseLast. To not break rare use cases where
- // arena is used, we fallback to ReleaseLast (but GOOGLE_DCHECK to find/fix it).
+ // arena is used, we fallback to ReleaseLast (but Y_ABSL_DCHECK to find/fix
+ // it).
//
- // Note that arena is likely null and GOOGLE_DCHECK and ReleaesLast might be
+ // Note that arena is likely null and Y_ABSL_DCHECK and ReleaesLast might be
// redundant. The current approach takes extra cautious path not to disrupt
// production.
Message* sub_message =
@@ -1092,7 +1097,8 @@ int InitAttributes(CMessage* self, PyObject* args, PyObject* kwargs) {
reinterpret_cast<RepeatedCompositeContainer*>(container.get());
ScopedPyObjectPtr iter(PyObject_GetIter(value));
if (iter == nullptr) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
+ PyErr_Format(PyExc_TypeError, "Value of field '%s' must be iterable",
+ descriptor->name().c_str());
return -1;
}
ScopedPyObjectPtr next;
@@ -1121,7 +1127,8 @@ int InitAttributes(CMessage* self, PyObject* args, PyObject* kwargs) {
reinterpret_cast<RepeatedScalarContainer*>(container.get());
ScopedPyObjectPtr iter(PyObject_GetIter(value));
if (iter == nullptr) {
- PyErr_SetString(PyExc_TypeError, "Value must be iterable");
+ PyErr_Format(PyExc_TypeError, "Value of field '%s' must be iterable",
+ descriptor->name().c_str());
return -1;
}
ScopedPyObjectPtr next;
@@ -1258,8 +1265,8 @@ static void Dealloc(CMessage* self) {
PyObject_ClearWeakRefs(reinterpret_cast<PyObject*>(self));
}
// At this point all dependent objects have been removed.
- GOOGLE_DCHECK(!self->child_submessages || self->child_submessages->empty());
- GOOGLE_DCHECK(!self->composite_fields || self->composite_fields->empty());
+ Y_ABSL_DCHECK(!self->child_submessages || self->child_submessages->empty());
+ Y_ABSL_DCHECK(!self->composite_fields || self->composite_fields->empty());
delete self->child_submessages;
delete self->composite_fields;
if (self->unknown_field_set) {
@@ -1333,7 +1340,7 @@ int HasFieldByDescriptor(CMessage* self,
}
const FieldDescriptor* FindFieldWithOneofs(const Message* message,
- ConstStringParam field_name,
+ y_absl::string_view field_name,
bool* in_oneof) {
*in_oneof = false;
const Descriptor* descriptor = message->GetDescriptor();
@@ -1382,8 +1389,8 @@ PyObject* HasField(CMessage* self, PyObject* arg) {
Message* message = self->message;
bool is_in_oneof;
- const FieldDescriptor* field_descriptor =
- FindFieldWithOneofs(message, StringParam(field_name, size), &is_in_oneof);
+ const FieldDescriptor* field_descriptor = FindFieldWithOneofs(
+ message, y_absl::string_view(field_name, size), &is_in_oneof);
if (field_descriptor == nullptr) {
if (!is_in_oneof) {
PyErr_Format(PyExc_ValueError, "Protocol message %s has no field %s.",
@@ -1567,7 +1574,7 @@ PyObject* ClearField(CMessage* self, PyObject* arg) {
AssureWritable(self);
bool is_in_oneof;
const FieldDescriptor* field_descriptor = FindFieldWithOneofs(
- self->message, StringParam(field_name, field_size), &is_in_oneof);
+ self->message, y_absl::string_view(field_name, field_size), &is_in_oneof);
if (field_descriptor == nullptr) {
if (is_in_oneof) {
// We gave the name of a oneof, and none of its fields are set.
@@ -1704,7 +1711,7 @@ static PyObject* InternalSerializeToString(
coded_out.SetSerializationDeterministic(deterministic);
}
self->message->SerializeWithCachedSizes(&coded_out);
- GOOGLE_CHECK(!coded_out.HadError());
+ Y_ABSL_CHECK(!coded_out.HadError());
return result;
}
@@ -1877,7 +1884,7 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) {
const char* ptr;
internal::ParseContext ctx(
depth, false, &ptr,
- StringPiece(static_cast<const char*>(data.buf), data.len));
+ y_absl::string_view(static_cast<const char*>(data.buf), data.len));
PyBuffer_Release(&data);
ctx.data().pool = factory->pool->pool;
ctx.data().factory = factory->message_factory;
@@ -1967,7 +1974,7 @@ static PyObject* WhichOneof(CMessage* self, PyObject* arg) {
if (PyString_AsStringAndSize(arg, &name_data, &name_size) < 0) return nullptr;
const OneofDescriptor* oneof_desc =
self->message->GetDescriptor()->FindOneofByName(
- StringParam(name_data, name_size));
+ y_absl::string_view(name_data, name_size));
if (oneof_desc == nullptr) {
PyErr_Format(PyExc_ValueError,
"Protocol message has no oneof \"%s\" field.", name_data);
@@ -2244,37 +2251,37 @@ int InternalSetNonOneofScalar(
switch (field_descriptor->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT32(arg, value, -1);
reflection->SetInt32(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT64(arg, value, -1);
reflection->SetInt64(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_UINT32(arg, value, -1);
reflection->SetUInt32(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(arg, value, -1);
+ PROTOBUF_CHECK_GET_UINT64(arg, value, -1);
reflection->SetUInt64(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(arg, value, -1);
+ PROTOBUF_CHECK_GET_FLOAT(arg, value, -1);
reflection->SetFloat(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(arg, value, -1);
+ PROTOBUF_CHECK_GET_DOUBLE(arg, value, -1);
reflection->SetDouble(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(arg, value, -1);
+ PROTOBUF_CHECK_GET_BOOL(arg, value, -1);
reflection->SetBool(message, field_descriptor, value);
break;
}
@@ -2286,7 +2293,7 @@ int InternalSetNonOneofScalar(
break;
}
case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT32(arg, value, -1);
if (reflection->SupportsUnknownEnumValues()) {
reflection->SetEnumValue(message, field_descriptor, value);
} else {
@@ -2366,7 +2373,7 @@ PyObject* DeepCopy(CMessage* self, PyObject* arg) {
PyObject* ToUnicode(CMessage* self) {
// Lazy import to prevent circular dependencies
ScopedPyObjectPtr text_format(
- PyImport_ImportModule("google.protobuf.text_format"));
+ PyImport_ImportModule(PROTOBUF_PYTHON_PUBLIC ".text_format"));
if (text_format == nullptr) {
return nullptr;
}
@@ -2667,22 +2674,22 @@ CMessage* CMessage::BuildSubMessageFromPointer(
if (!this->child_submessages) {
this->child_submessages = new CMessage::SubMessagesMap();
}
- CMessage* cmsg = FindPtrOrNull(
- *this->child_submessages, sub_message);
- if (cmsg) {
- Py_INCREF(cmsg);
- } else {
- cmsg = cmessage::NewEmptyMessage(message_class);
+ auto it = this->child_submessages->find(sub_message);
+ if (it != this->child_submessages->end()) {
+ Py_INCREF(it->second);
+ return it->second;
+ }
- if (cmsg == nullptr) {
- return nullptr;
- }
- cmsg->message = sub_message;
- Py_INCREF(this);
- cmsg->parent = this;
- cmsg->parent_field_descriptor = field_descriptor;
- cmessage::SetSubmessage(this, cmsg);
+ CMessage* cmsg = cmessage::NewEmptyMessage(message_class);
+
+ if (cmsg == nullptr) {
+ return nullptr;
}
+ cmsg->message = sub_message;
+ Py_INCREF(this);
+ cmsg->parent = this;
+ cmsg->parent_field_descriptor = field_descriptor;
+ cmessage::SetSubmessage(this, cmsg);
return cmsg;
}
@@ -2690,11 +2697,10 @@ CMessage* CMessage::MaybeReleaseSubMessage(Message* sub_message) {
if (!this->child_submessages) {
return nullptr;
}
- CMessage* released = FindPtrOrNull(
- *this->child_submessages, sub_message);
- if (!released) {
- return nullptr;
- }
+ auto it = this->child_submessages->find(sub_message);
+ if (it == this->child_submessages->end()) return nullptr;
+ CMessage* released = it->second;
+
// The target message will now own its content.
Py_CLEAR(released->parent);
released->parent_field_descriptor = nullptr;
@@ -3028,8 +3034,8 @@ bool InitProto2MessageModule(PyObject *m) {
PyModule_AddObject(m, "MethodDescriptor",
reinterpret_cast<PyObject*>(&PyMethodDescriptor_Type));
- PyObject* enum_type_wrapper = PyImport_ImportModule(
- "google.protobuf.internal.enum_type_wrapper");
+ PyObject* enum_type_wrapper =
+ PyImport_ImportModule(PROTOBUF_PYTHON_INTERNAL ".enum_type_wrapper");
if (enum_type_wrapper == nullptr) {
return false;
}
@@ -3037,8 +3043,8 @@ bool InitProto2MessageModule(PyObject *m) {
PyObject_GetAttrString(enum_type_wrapper, "EnumTypeWrapper");
Py_DECREF(enum_type_wrapper);
- PyObject* message_module = PyImport_ImportModule(
- "google.protobuf.message");
+ PyObject* message_module =
+ PyImport_ImportModule(PROTOBUF_PYTHON_PUBLIC ".message");
if (message_module == nullptr) {
return false;
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message.h b/contrib/python/protobuf/py3/google/protobuf/pyext/message.h
index 28dbda6798..474f2a99ca 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message.h
@@ -42,7 +42,7 @@
#include <string>
#include <unordered_map>
-#include <google/protobuf/stubs/common.h>
+#include "google/protobuf/stubs/common.h"
namespace google {
namespace protobuf {
@@ -287,46 +287,46 @@ PyObject* SetAllowOversizeProtos(PyObject* m, PyObject* arg);
#define FIELD_IS_REPEATED(field_descriptor) \
((field_descriptor)->label() == FieldDescriptor::LABEL_REPEATED)
-#define GOOGLE_CHECK_GET_INT32(arg, value, err) \
- int32_t value; \
- if (!CheckAndGetInteger(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_INT32(arg, value, err) \
+ int32_t value; \
+ if (!CheckAndGetInteger(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_INT64(arg, value, err) \
- int64_t value; \
- if (!CheckAndGetInteger(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_INT64(arg, value, err) \
+ int64_t value; \
+ if (!CheckAndGetInteger(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_UINT32(arg, value, err) \
- uint32_t value; \
- if (!CheckAndGetInteger(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_UINT32(arg, value, err) \
+ uint32_t value; \
+ if (!CheckAndGetInteger(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_UINT64(arg, value, err) \
- uint64_t value; \
- if (!CheckAndGetInteger(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_UINT64(arg, value, err) \
+ uint64_t value; \
+ if (!CheckAndGetInteger(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_FLOAT(arg, value, err) \
- float value; \
- if (!CheckAndGetFloat(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_FLOAT(arg, value, err) \
+ float value; \
+ if (!CheckAndGetFloat(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_DOUBLE(arg, value, err) \
- double value; \
- if (!CheckAndGetDouble(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_DOUBLE(arg, value, err) \
+ double value; \
+ if (!CheckAndGetDouble(arg, &value)) { \
+ return err; \
}
-#define GOOGLE_CHECK_GET_BOOL(arg, value, err) \
- bool value; \
- if (!CheckAndGetBool(arg, &value)) { \
- return err; \
+#define PROTOBUF_CHECK_GET_BOOL(arg, value, err) \
+ bool value; \
+ if (!CheckAndGetBool(arg, &value)) { \
+ return err; \
}
#define FULL_MODULE_NAME "google.protobuf.pyext._message"
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.cc
index 35d6214d7d..060cc767fd 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.cc
@@ -29,15 +29,16 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <unordered_map>
+#include <utility>
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include "google/protobuf/dynamic_message.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
#define PyString_AsStringAndSize(ob, charpp, sizep) \
(PyUnicode_Check(ob) \
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.h b/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.h
index 7dfe425dd5..1d911a827d 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message_factory.h
@@ -35,8 +35,8 @@
#include <Python.h>
#include <unordered_map>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
index 2d3c1d2087..f4692066d8 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/message_module.cc
@@ -31,12 +31,12 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/message_lite.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/proto_api.h>
+#include "google/protobuf/message_lite.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/proto_api.h"
namespace {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.cc
index 0b63f82256..cac8f1af69 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.cc
@@ -31,22 +31,19 @@
// Author: anuraag@google.com (Anuraag Agrawal)
// Author: tibell@google.com (Johan Tibell)
-#include <google/protobuf/pyext/repeated_composite_container.h>
+#include "google/protobuf/pyext/repeated_composite_container.h"
#include <memory>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/reflection.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/message_factory.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/stubs/map_util.h>
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/dynamic_message.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/reflection.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/message_factory.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.h b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.h
index 6fa6e176f1..8964374dbc 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_composite_container.h
@@ -37,7 +37,7 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.cc
index a9d96f03a0..9d5fab4065 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.cc
@@ -31,20 +31,20 @@
// Author: anuraag@google.com (Anuraag Agrawal)
// Author: tibell@google.com (Johan Tibell)
-#include <google/protobuf/pyext/repeated_scalar_container.h>
+#include "google/protobuf/pyext/repeated_scalar_container.h"
#include <cstdint>
#include <memory>
+#include <string>
-#include <google/protobuf/stubs/common.h>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/dynamic_message.h>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/descriptor.h>
-#include <google/protobuf/pyext/descriptor_pool.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include "google/protobuf/stubs/common.h"
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/dynamic_message.h"
+#include "google/protobuf/message.h"
+#include "google/protobuf/pyext/descriptor.h"
+#include "google/protobuf/pyext/descriptor_pool.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
#define PyString_AsString(ob) \
(PyUnicode_Check(ob) ? PyUnicode_AsUTF8(ob) : PyBytes_AsString(ob))
@@ -108,37 +108,37 @@ static int AssignItem(PyObject* pself, Py_ssize_t index, PyObject* arg) {
switch (field_descriptor->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT32(arg, value, -1);
reflection->SetRepeatedInt32(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT64(arg, value, -1);
reflection->SetRepeatedInt64(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_UINT32(arg, value, -1);
reflection->SetRepeatedUInt32(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(arg, value, -1);
+ PROTOBUF_CHECK_GET_UINT64(arg, value, -1);
reflection->SetRepeatedUInt64(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(arg, value, -1);
+ PROTOBUF_CHECK_GET_FLOAT(arg, value, -1);
reflection->SetRepeatedFloat(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(arg, value, -1);
+ PROTOBUF_CHECK_GET_DOUBLE(arg, value, -1);
reflection->SetRepeatedDouble(message, field_descriptor, index, value);
break;
}
case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(arg, value, -1);
+ PROTOBUF_CHECK_GET_BOOL(arg, value, -1);
reflection->SetRepeatedBool(message, field_descriptor, index, value);
break;
}
@@ -150,7 +150,7 @@ static int AssignItem(PyObject* pself, Py_ssize_t index, PyObject* arg) {
break;
}
case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(arg, value, -1);
+ PROTOBUF_CHECK_GET_INT32(arg, value, -1);
if (reflection->SupportsUnknownEnumValues()) {
reflection->SetRepeatedEnumValue(message, field_descriptor, index,
value);
@@ -333,37 +333,37 @@ PyObject* Append(RepeatedScalarContainer* self, PyObject* item) {
const Reflection* reflection = message->GetReflection();
switch (field_descriptor->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32: {
- GOOGLE_CHECK_GET_INT32(item, value, nullptr);
+ PROTOBUF_CHECK_GET_INT32(item, value, nullptr);
reflection->AddInt32(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_INT64: {
- GOOGLE_CHECK_GET_INT64(item, value, nullptr);
+ PROTOBUF_CHECK_GET_INT64(item, value, nullptr);
reflection->AddInt64(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT32: {
- GOOGLE_CHECK_GET_UINT32(item, value, nullptr);
+ PROTOBUF_CHECK_GET_UINT32(item, value, nullptr);
reflection->AddUInt32(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_UINT64: {
- GOOGLE_CHECK_GET_UINT64(item, value, nullptr);
+ PROTOBUF_CHECK_GET_UINT64(item, value, nullptr);
reflection->AddUInt64(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_FLOAT: {
- GOOGLE_CHECK_GET_FLOAT(item, value, nullptr);
+ PROTOBUF_CHECK_GET_FLOAT(item, value, nullptr);
reflection->AddFloat(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_DOUBLE: {
- GOOGLE_CHECK_GET_DOUBLE(item, value, nullptr);
+ PROTOBUF_CHECK_GET_DOUBLE(item, value, nullptr);
reflection->AddDouble(message, field_descriptor, value);
break;
}
case FieldDescriptor::CPPTYPE_BOOL: {
- GOOGLE_CHECK_GET_BOOL(item, value, nullptr);
+ PROTOBUF_CHECK_GET_BOOL(item, value, nullptr);
reflection->AddBool(message, field_descriptor, value);
break;
}
@@ -375,7 +375,7 @@ PyObject* Append(RepeatedScalarContainer* self, PyObject* item) {
break;
}
case FieldDescriptor::CPPTYPE_ENUM: {
- GOOGLE_CHECK_GET_INT32(item, value, nullptr);
+ PROTOBUF_CHECK_GET_INT32(item, value, nullptr);
if (reflection->SupportsUnknownEnumValues()) {
reflection->AddEnumValue(message, field_descriptor, value);
} else {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.h b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.h
index 67423ab4ce..0189886c52 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/repeated_scalar_container.h
@@ -37,8 +37,8 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <google/protobuf/descriptor.h>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/descriptor.h"
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/safe_numerics.h b/contrib/python/protobuf/py3/google/protobuf/pyext/safe_numerics.h
index 93ae640e8b..a6368768de 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/safe_numerics.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/safe_numerics.h
@@ -34,8 +34,7 @@
#include <limits>
-#include <google/protobuf/stubs/logging.h>
-#include <google/protobuf/stubs/common.h>
+#include "y_absl/log/absl_check.h"
namespace google {
namespace protobuf {
@@ -153,7 +152,7 @@ inline bool IsValidNumericCast(Source source) {
// (this is static_asserted), though this could be supported if necessary.
template <class Dest, class Source>
inline Dest checked_numeric_cast(Source source) {
- GOOGLE_CHECK(IsValidNumericCast<Dest>(source));
+ Y_ABSL_CHECK(IsValidNumericCast<Dest>(source));
return static_cast<Dest>(source);
}
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/scoped_pyobject_ptr.h b/contrib/python/protobuf/py3/google/protobuf/pyext/scoped_pyobject_ptr.h
index ad3fa9462d..d39a4d13fe 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/scoped_pyobject_ptr.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/scoped_pyobject_ptr.h
@@ -33,8 +33,6 @@
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
#define GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
-#include <google/protobuf/stubs/common.h>
-
#define PY_SSIZE_T_CLEAN
#include <Python.h>
namespace google {
@@ -50,6 +48,8 @@ class ScopedPythonPtr {
// The reference count of the specified py_object is not incremented.
explicit ScopedPythonPtr(PyObjectStruct* py_object = nullptr)
: ptr_(py_object) {}
+ ScopedPythonPtr(const ScopedPythonPtr&) = delete;
+ ScopedPythonPtr& operator=(const ScopedPythonPtr&) = delete;
// If a PyObject is owned, decrement its reference count.
~ScopedPythonPtr() { Py_XDECREF(ptr_); }
@@ -89,8 +89,6 @@ class ScopedPythonPtr {
private:
PyObjectStruct* ptr_;
-
- GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPythonPtr);
};
typedef ScopedPythonPtr<PyObject> ScopedPyObjectPtr;
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.cc
index 42f9bbcb04..59d1d0bb65 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.cc
@@ -28,7 +28,7 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include <google/protobuf/pyext/unknown_field_set.h>
+#include "google/protobuf/pyext/unknown_field_set.h"
#define PY_SSIZE_T_CLEAN
#include <Python.h>
@@ -36,11 +36,11 @@
#include <memory>
#include <set>
-#include <google/protobuf/message.h>
-#include <google/protobuf/unknown_field_set.h>
-#include <google/protobuf/wire_format_lite.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
+#include "google/protobuf/message.h"
+#include "google/protobuf/unknown_field_set.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "google/protobuf/wire_format_lite.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.h b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.h
index 3fa764d01e..92a889dc59 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_field_set.h
@@ -37,7 +37,7 @@
#include <memory>
#include <set>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.cc b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.cc
index dcd63b2e29..bb6549d33a 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.cc
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.cc
@@ -28,18 +28,19 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#include <google/protobuf/pyext/unknown_fields.h>
+#include "google/protobuf/pyext/unknown_fields.h"
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include <set>
+
#include <memory>
+#include <set>
-#include <google/protobuf/message.h>
-#include <google/protobuf/pyext/message.h>
-#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
-#include <google/protobuf/unknown_field_set.h>
-#include <google/protobuf/wire_format_lite.h>
+#include "google/protobuf/message.h"
+#include "google/protobuf/unknown_field_set.h"
+#include "google/protobuf/pyext/message.h"
+#include "google/protobuf/pyext/scoped_pyobject_ptr.h"
+#include "google/protobuf/wire_format_lite.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.h b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.h
index e7b0b35c41..81ee1a9139 100644
--- a/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.h
+++ b/contrib/python/protobuf/py3/google/protobuf/pyext/unknown_fields.h
@@ -37,7 +37,7 @@
#include <memory>
#include <set>
-#include <google/protobuf/pyext/message.h>
+#include "google/protobuf/pyext/message.h"
namespace google {
namespace protobuf {
diff --git a/contrib/python/protobuf/py3/google/protobuf/python_protobuf.h b/contrib/python/protobuf/py3/google/protobuf/python_protobuf.h
new file mode 100644
index 0000000000..4fcf065404
--- /dev/null
+++ b/contrib/python/protobuf/py3/google/protobuf/python_protobuf.h
@@ -0,0 +1,58 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc. All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Author: qrczak@google.com (Marcin Kowalczyk)
+//
+// This module exposes the C proto inside the given Python proto, in
+// case the Python proto is implemented with a C proto.
+
+#ifndef GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
+#define GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
+
+#define PY_SSIZE_T_CLEAN
+#include <Python.h>
+
+namespace google {
+namespace protobuf {
+
+class Message;
+
+namespace python {
+
+// Return the pointer to the C proto inside the given Python proto,
+// or NULL when this is not a Python proto implemented with a C proto.
+const Message* GetCProtoInsidePyProto(PyObject* msg);
+Message* MutableCProtoInsidePyProto(PyObject* msg);
+
+} // namespace python
+} // namespace protobuf
+} // namespace google
+
+#endif // GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
diff --git a/contrib/python/protobuf/py3/google/protobuf/reflection.py b/contrib/python/protobuf/py3/google/protobuf/reflection.py
index 81e18859a8..1627669b95 100644
--- a/contrib/python/protobuf/py3/google/protobuf/reflection.py
+++ b/contrib/python/protobuf/py3/google/protobuf/reflection.py
@@ -92,4 +92,4 @@ def MakeClass(descriptor):
# Original implementation leads to duplicate message classes, which won't play
# well with extensions. Message factory info is also missing.
# Redirect to message_factory.
- return symbol_database.Default().GetPrototype(descriptor)
+ return message_factory.GetMessageClass(descriptor)
diff --git a/contrib/python/protobuf/py3/google/protobuf/symbol_database.py b/contrib/python/protobuf/py3/google/protobuf/symbol_database.py
index fdcf8cf06c..390c49810d 100644
--- a/contrib/python/protobuf/py3/google/protobuf/symbol_database.py
+++ b/contrib/python/protobuf/py3/google/protobuf/symbol_database.py
@@ -57,15 +57,41 @@ Example usage::
my_message_instance = db.GetSymbol('MyMessage')()
"""
+import warnings
from google.protobuf.internal import api_implementation
from google.protobuf import descriptor_pool
from google.protobuf import message_factory
-class SymbolDatabase(message_factory.MessageFactory):
+class SymbolDatabase():
"""A database of Python generated symbols."""
+ # local cache of registered classes.
+ _classes = {}
+
+ def __init__(self, pool=None):
+ """Initializes a new SymbolDatabase."""
+ self.pool = pool or descriptor_pool.DescriptorPool()
+
+ def GetPrototype(self, descriptor):
+ warnings.warn('SymbolDatabase.GetPrototype() is deprecated. Please '
+ 'use message_factory.GetMessageClass() instead. '
+ 'SymbolDatabase.GetPrototype() will be removed soon.')
+ return message_factory.GetMessageClass(descriptor)
+
+ def CreatePrototype(self, descriptor):
+ warnings.warn('Directly call CreatePrototype() is wrong. Please use '
+ 'message_factory.GetMessageClass() instead. '
+ 'SymbolDatabase.CreatePrototype() will be removed soon.')
+ return message_factory._InternalCreateMessageClass(descriptor)
+
+ def GetMessages(self, files):
+ warnings.warn('SymbolDatabase.GetMessages() is deprecated. Please use '
+ 'message_factory.GetMessageClassedForFiles() instead. '
+ 'SymbolDatabase.GetMessages() will be removed soon.')
+ return message_factory.GetMessageClassedForFiles(files, self.pool)
+
def RegisterMessage(self, message):
"""Registers the given message type in the local database.
diff --git a/contrib/python/protobuf/py3/google/protobuf/text_encoding.py b/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
index 759cf11f62..1955b6a3c0 100644
--- a/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
+++ b/contrib/python/protobuf/py3/google/protobuf/text_encoding.py
@@ -53,8 +53,7 @@ for byte, string in _cescape_chr_to_symbol_map.items():
del byte, string
-def CEscape(text, as_utf8):
- # type: (...) -> str
+def CEscape(text, as_utf8) -> str:
"""Escape a bytes string for use in an text protocol buffer.
Args:
@@ -83,8 +82,7 @@ def CEscape(text, as_utf8):
_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
-def CUnescape(text):
- # type: (str) -> bytes
+def CUnescape(text: str) -> bytes:
"""Unescape a text string with C-style escape sequences to UTF-8 bytes.
Args:
diff --git a/contrib/python/protobuf/py3/google/protobuf/text_format.py b/contrib/python/protobuf/py3/google/protobuf/text_format.py
index a6d8bcf648..e1a5ad5449 100644
--- a/contrib/python/protobuf/py3/google/protobuf/text_format.py
+++ b/contrib/python/protobuf/py3/google/protobuf/text_format.py
@@ -67,6 +67,7 @@ _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE)
_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE)
_QUOTES = frozenset(("'", '"'))
_ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
+_DEBUG_STRING_SILENT_MARKER = '\t '
class Error(Exception):
@@ -125,8 +126,7 @@ def MessageToString(
indent=0,
message_formatter=None,
print_unknown_fields=False,
- force_colon=False):
- # type: (...) -> str
+ force_colon=False) -> str:
"""Convert protobuf message to text format.
Double values can be formatted compactly with 15 digits of
@@ -191,8 +191,7 @@ def MessageToString(
return result
-def MessageToBytes(message, **kwargs):
- # type: (...) -> bytes
+def MessageToBytes(message, **kwargs) -> bytes:
"""Convert protobuf message to encoded text format. See MessageToString."""
text = MessageToString(message, **kwargs)
if isinstance(text, bytes):
@@ -331,17 +330,16 @@ def _BuildMessageFromTypeName(type_name, descriptor_pool):
if descriptor_pool is None:
from google.protobuf import descriptor_pool as pool_mod
descriptor_pool = pool_mod.Default()
- from google.protobuf import symbol_database
- database = symbol_database.Default()
+ from google.protobuf import message_factory
try:
message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
except KeyError:
return None
- message_type = database.GetPrototype(message_descriptor)
+ message_type = message_factory.GetMessageClass(message_descriptor)
return message_type()
-# These values must match WireType enum in google/protobuf/wire_format.h.
+# These values must match WireType enum in //google/protobuf/wire_format.h.
WIRETYPE_LENGTH_DELIMITED = 2
WIRETYPE_START_GROUP = 3
@@ -558,7 +556,7 @@ class _Printer(object):
# For groups, use the capitalized name.
out.write(field.message_type.name)
else:
- out.write(field.name)
+ out.write(field.name)
if (self.force_colon or
field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE):
@@ -856,10 +854,15 @@ class _Parser(object):
ParseError: On text parsing problems.
"""
# Tokenize expects native str lines.
- str_lines = (
- line if isinstance(line, str) else line.decode('utf-8')
- for line in lines)
- tokenizer = Tokenizer(str_lines)
+ try:
+ str_lines = (
+ line if isinstance(line, str) else line.decode('utf-8')
+ for line in lines)
+ tokenizer = Tokenizer(str_lines)
+ except UnicodeDecodeError as e:
+ raise ParseError from e
+ if message:
+ self.root_type = message.DESCRIPTOR.full_name
while not tokenizer.AtEnd():
self._MergeField(tokenizer, message)
@@ -879,6 +882,8 @@ class _Parser(object):
type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
tokenizer.Consume(']')
tokenizer.TryConsume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ type_url_prefix + '/' + packed_type_name)
if tokenizer.TryConsume('<'):
expanded_any_end_token = '>'
else:
@@ -917,8 +922,6 @@ class _Parser(object):
# pylint: disable=protected-access
field = message.Extensions._FindExtensionByName(name)
# pylint: enable=protected-access
-
-
if not field:
if self.allow_unknown_extension:
field = None
@@ -978,9 +981,13 @@ class _Parser(object):
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
tokenizer.TryConsume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ field.full_name)
merger = self._MergeMessageField
else:
tokenizer.Consume(':')
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
+ field.full_name)
merger = self._MergeScalarField
if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
@@ -998,13 +1005,19 @@ class _Parser(object):
else: # Proto field is unknown.
assert (self.allow_unknown_extension or self.allow_unknown_field)
- _SkipFieldContents(tokenizer)
+ self._SkipFieldContents(tokenizer, name, message_descriptor.full_name)
# For historical reasons, fields may optionally be separated by commas or
# semicolons.
if not tokenizer.TryConsume(','):
tokenizer.TryConsume(';')
+ def _LogSilentMarker(self, immediate_message_type, field_name):
+ pass
+
+ def _DetectSilentMarker(self, tokenizer, immediate_message_type, field_name):
+ if tokenizer.contains_silent_marker_before_current_token:
+ self._LogSilentMarker(immediate_message_type, field_name)
def _ConsumeAnyTypeUrl(self, tokenizer):
"""Consumes a google.protobuf.Any type URL and returns the type name."""
@@ -1079,12 +1092,6 @@ class _Parser(object):
else:
getattr(message, field.name)[sub_message.key] = sub_message.value
- @staticmethod
- def _IsProto3Syntax(message):
- message_descriptor = message.DESCRIPTOR
- return (hasattr(message_descriptor, 'syntax') and
- message_descriptor.syntax == 'proto3')
-
def _MergeScalarField(self, tokenizer, message, field):
"""Merges a single scalar field into a message.
@@ -1136,7 +1143,7 @@ class _Parser(object):
else:
if field.is_extension:
if (not self._allow_multiple_scalars and
- not self._IsProto3Syntax(message) and
+ field.has_presence and
message.HasExtension(field)):
raise tokenizer.ParseErrorPreviousToken(
'Message type "%s" should not have multiple "%s" extensions.' %
@@ -1146,12 +1153,12 @@ class _Parser(object):
else:
duplicate_error = False
if not self._allow_multiple_scalars:
- if self._IsProto3Syntax(message):
- # Proto3 doesn't represent presence so we try best effort to check
- # multiple scalars by compare to default values.
- duplicate_error = bool(getattr(message, field.name))
- else:
+ if field.has_presence:
duplicate_error = message.HasField(field.name)
+ else:
+ # For field that doesn't represent presence, try best effort to
+ # check multiple scalars by compare to default values.
+ duplicate_error = bool(getattr(message, field.name))
if duplicate_error:
raise tokenizer.ParseErrorPreviousToken(
@@ -1160,105 +1167,117 @@ class _Parser(object):
else:
setattr(message, field.name, value)
+ def _SkipFieldContents(self, tokenizer, field_name, immediate_message_type):
+ """Skips over contents (value or message) of a field.
-def _SkipFieldContents(tokenizer):
- """Skips over contents (value or message) of a field.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
- # Try to guess the type of this field.
- # If this field is not a message, there should be a ":" between the
- # field name and the field value and also the field value should not
- # start with "{" or "<" which indicates the beginning of a message body.
- # If there is no ":" or there is a "{" or "<" after ":", this field has
- # to be a message or the input is ill-formed.
- if tokenizer.TryConsume(
- ':') and not tokenizer.LookingAt('{') and not tokenizer.LookingAt('<'):
- if tokenizer.LookingAt('['):
- _SkipRepeatedFieldValue(tokenizer)
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ field_name: The field name currently being parsed.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker.
+ """
+ # Try to guess the type of this field.
+ # If this field is not a message, there should be a ":" between the
+ # field name and the field value and also the field value should not
+ # start with "{" or "<" which indicates the beginning of a message body.
+ # If there is no ":" or there is a "{" or "<" after ":", this field has
+ # to be a message or the input is ill-formed.
+ if tokenizer.TryConsume(
+ ':') and not tokenizer.LookingAt('{') and not tokenizer.LookingAt('<'):
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
+ if tokenizer.LookingAt('['):
+ self._SkipRepeatedFieldValue(tokenizer)
+ else:
+ self._SkipFieldValue(tokenizer)
else:
- _SkipFieldValue(tokenizer)
- else:
- _SkipFieldMessage(tokenizer)
-
-
-def _SkipField(tokenizer):
- """Skips over a complete field (name and value/message).
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
- if tokenizer.TryConsume('['):
- # Consume extension name.
- tokenizer.ConsumeIdentifier()
- while tokenizer.TryConsume('.'):
- tokenizer.ConsumeIdentifier()
- tokenizer.Consume(']')
- else:
- tokenizer.ConsumeIdentifierOrNumber()
-
- _SkipFieldContents(tokenizer)
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
+ self._SkipFieldMessage(tokenizer, immediate_message_type)
- # For historical reasons, fields may optionally be separated by commas or
- # semicolons.
- if not tokenizer.TryConsume(','):
- tokenizer.TryConsume(';')
+ def _SkipField(self, tokenizer, immediate_message_type):
+ """Skips over a complete field (name and value/message).
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker.
+ """
+ field_name = ''
+ if tokenizer.TryConsume('['):
+ # Consume extension or google.protobuf.Any type URL
+ field_name += '[' + tokenizer.ConsumeIdentifier()
+ num_identifiers = 1
+ while tokenizer.TryConsume('.'):
+ field_name += '.' + tokenizer.ConsumeIdentifier()
+ num_identifiers += 1
+ # This is possibly a type URL for an Any message.
+ if num_identifiers == 3 and tokenizer.TryConsume('/'):
+ field_name += '/' + tokenizer.ConsumeIdentifier()
+ while tokenizer.TryConsume('.'):
+ field_name += '.' + tokenizer.ConsumeIdentifier()
+ tokenizer.Consume(']')
+ field_name += ']'
+ else:
+ field_name += tokenizer.ConsumeIdentifierOrNumber()
-def _SkipFieldMessage(tokenizer):
- """Skips over a field message.
-
- Args:
- tokenizer: A tokenizer to parse the field name and values.
- """
+ self._SkipFieldContents(tokenizer, field_name, immediate_message_type)
- if tokenizer.TryConsume('<'):
- delimiter = '>'
- else:
- tokenizer.Consume('{')
- delimiter = '}'
+ # For historical reasons, fields may optionally be separated by commas or
+ # semicolons.
+ if not tokenizer.TryConsume(','):
+ tokenizer.TryConsume(';')
- while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
- _SkipField(tokenizer)
+ def _SkipFieldMessage(self, tokenizer, immediate_message_type):
+ """Skips over a field message.
- tokenizer.Consume(delimiter)
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
+ immediate_message_type: The type of the message immediately containing
+ the silent marker
+ """
+ if tokenizer.TryConsume('<'):
+ delimiter = '>'
+ else:
+ tokenizer.Consume('{')
+ delimiter = '}'
+ while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
+ self._SkipField(tokenizer, immediate_message_type)
-def _SkipFieldValue(tokenizer):
- """Skips over a field value.
+ tokenizer.Consume(delimiter)
- Args:
- tokenizer: A tokenizer to parse the field name and values.
+ def _SkipFieldValue(self, tokenizer):
+ """Skips over a field value.
- Raises:
- ParseError: In case an invalid field value is found.
- """
- # String/bytes tokens can come in multiple adjacent string literals.
- # If we can consume one, consume as many as we can.
- if tokenizer.TryConsumeByteString():
- while tokenizer.TryConsumeByteString():
- pass
- return
+ Args:
+ tokenizer: A tokenizer to parse the field name and values.
- if (not tokenizer.TryConsumeIdentifier() and
- not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and
- not tokenizer.TryConsumeFloat()):
- raise ParseError('Invalid field value: ' + tokenizer.token)
+ Raises:
+ ParseError: In case an invalid field value is found.
+ """
+ # String/bytes tokens can come in multiple adjacent string literals.
+ # If we can consume one, consume as many as we can.
+ if tokenizer.TryConsumeByteString():
+ while tokenizer.TryConsumeByteString():
+ pass
+ return
+ if (not tokenizer.TryConsumeIdentifier() and
+ not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and
+ not tokenizer.TryConsumeFloat()):
+ raise ParseError('Invalid field value: ' + tokenizer.token)
-def _SkipRepeatedFieldValue(tokenizer):
- """Skips over a repeated field value.
+ def _SkipRepeatedFieldValue(self, tokenizer):
+ """Skips over a repeated field value.
- Args:
- tokenizer: A tokenizer to parse the field value.
- """
- tokenizer.Consume('[')
- if not tokenizer.LookingAt(']'):
- _SkipFieldValue(tokenizer)
- while tokenizer.TryConsume(','):
- _SkipFieldValue(tokenizer)
- tokenizer.Consume(']')
+ Args:
+ tokenizer: A tokenizer to parse the field value.
+ """
+ tokenizer.Consume('[')
+ if not tokenizer.LookingAt(']'):
+ self._SkipFieldValue(tokenizer)
+ while tokenizer.TryConsume(','):
+ self._SkipFieldValue(tokenizer)
+ tokenizer.Consume(']')
class Tokenizer(object):
@@ -1299,6 +1318,8 @@ class Tokenizer(object):
self._skip_comments = skip_comments
self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
or self._WHITESPACE)
+ self.contains_silent_marker_before_current_token = False
+
self._SkipWhitespace()
self.NextToken()
@@ -1331,6 +1352,8 @@ class Tokenizer(object):
match = self._whitespace_pattern.match(self._current_line, self._column)
if not match:
break
+ self.contains_silent_marker_before_current_token = match.group(0) == (
+ ' ' + _DEBUG_STRING_SILENT_MARKER)
length = len(match.group(0))
self._column += length
@@ -1583,6 +1606,7 @@ class Tokenizer(object):
"""Reads the next meaningful token."""
self._previous_line = self._line
self._previous_column = self._column
+ self.contains_silent_marker_before_current_token = False
self._column += len(self.token)
self._SkipWhitespace()
@@ -1829,12 +1853,8 @@ def ParseEnum(field, value):
raise ValueError('Enum type "%s" has no value named %s.' %
(enum_descriptor.full_name, value))
else:
- # Numeric value.
- if hasattr(field.file, 'syntax'):
- # Attribute is checked for compatibility.
- if field.file.syntax == 'proto3':
- # Proto3 accept numeric unknown enums.
- return number
+ if not field.enum_type.is_closed:
+ return number
enum_value = enum_descriptor.values_by_number.get(number, None)
if enum_value is None:
raise ValueError('Enum type "%s" has no value with number %d.' %
diff --git a/contrib/python/protobuf/py3/ya.make b/contrib/python/protobuf/py3/ya.make
index c00e01b812..da1daa078e 100644
--- a/contrib/python/protobuf/py3/ya.make
+++ b/contrib/python/protobuf/py3/ya.make
@@ -1,4 +1,4 @@
-# Generated by devtools/yamaker from nixpkgs 22.05.
+# Generated by devtools/yamaker/ym2
PY3_LIBRARY()
@@ -9,93 +9,36 @@ LICENSE(
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
-VERSION(4.21.7)
+VERSION(4.22.5)
-ORIGINAL_SOURCE(mirror://pypi/p/protobuf/protobuf-4.21.7.tar.gz)
+ORIGINAL_SOURCE(https://github.com/protocolbuffers/protobuf/archive/refs/tags/v4.22.5.tar.gz)
+
+NO_COMPILER_WARNINGS()
PEERDIR(
contrib/libs/protobuf
contrib/libs/protobuf/builtin_proto/protos_from_protobuf
contrib/libs/protobuf/builtin_proto/protos_from_protoc
contrib/libs/python
+ contrib/restricted/abseil-cpp-tstring/y_absl/strings
+ contrib/restricted/abseil-cpp-tstring/y_absl/log
)
+NO_LINT()
+
ADDINCL(
contrib/python/protobuf/py3
)
-NO_COMPILER_WARNINGS()
-
-NO_LINT()
-
CFLAGS(
-DPYTHON_PROTO2_CPP_IMPL_V2
)
-SRCS(
- google/protobuf/internal/api_implementation.cc
- google/protobuf/pyext/descriptor.cc
- google/protobuf/pyext/descriptor_containers.cc
- google/protobuf/pyext/descriptor_database.cc
- google/protobuf/pyext/descriptor_pool.cc
- google/protobuf/pyext/extension_dict.cc
- google/protobuf/pyext/field.cc
- google/protobuf/pyext/map_container.cc
- google/protobuf/pyext/message.cc
- google/protobuf/pyext/message_factory.cc
- google/protobuf/pyext/message_module.cc
- google/protobuf/pyext/repeated_composite_container.cc
- google/protobuf/pyext/repeated_scalar_container.cc
- google/protobuf/pyext/unknown_field_set.cc
- google/protobuf/pyext/unknown_fields.cc
-)
+INCLUDE(ya.make.inc)
PY_REGISTER(
google.protobuf.internal._api_implementation
google.protobuf.pyext._message
)
-PY_SRCS(
- TOP_LEVEL
- google/__init__.py
- google/protobuf/__init__.py
- google/protobuf/compiler/__init__.py
- google/protobuf/descriptor.py
- google/protobuf/descriptor_database.py
- google/protobuf/descriptor_pool.py
- google/protobuf/internal/__init__.py
- google/protobuf/internal/api_implementation.py
- google/protobuf/internal/builder.py
- google/protobuf/internal/containers.py
- google/protobuf/internal/decoder.py
- google/protobuf/internal/encoder.py
- google/protobuf/internal/enum_type_wrapper.py
- google/protobuf/internal/extension_dict.py
- google/protobuf/internal/message_listener.py
- google/protobuf/internal/python_message.py
- google/protobuf/internal/type_checkers.py
- google/protobuf/internal/well_known_types.py
- google/protobuf/internal/wire_format.py
- google/protobuf/json_format.py
- google/protobuf/message.py
- google/protobuf/message_factory.py
- google/protobuf/proto_builder.py
- google/protobuf/pyext/__init__.py
- google/protobuf/pyext/cpp_message.py
- google/protobuf/reflection.py
- google/protobuf/service.py
- google/protobuf/service_reflection.py
- google/protobuf/symbol_database.py
- google/protobuf/text_encoding.py
- google/protobuf/text_format.py
- google/protobuf/unknown_fields.py
- google/protobuf/util/__init__.py
-)
-
-RESOURCE_FILES(
- PREFIX contrib/python/protobuf/py3/
- .dist-info/METADATA
- .dist-info/top_level.txt
-)
-
END()
diff --git a/contrib/python/protobuf/py3/ya.make.inc b/contrib/python/protobuf/py3/ya.make.inc
new file mode 100644
index 0000000000..a6e45bc1fe
--- /dev/null
+++ b/contrib/python/protobuf/py3/ya.make.inc
@@ -0,0 +1,57 @@
+PY_SRCS(
+ TOP_LEVEL
+ google/__init__.py
+ google/protobuf/__init__.py
+ google/protobuf/compiler/__init__.py
+ google/protobuf/descriptor.py
+ google/protobuf/descriptor_database.py
+ google/protobuf/descriptor_pool.py
+ google/protobuf/internal/__init__.py
+ google/protobuf/internal/_parameterized.py
+ google/protobuf/internal/api_implementation.py
+ google/protobuf/internal/builder.py
+ google/protobuf/internal/containers.py
+ google/protobuf/internal/decoder.py
+ google/protobuf/internal/encoder.py
+ google/protobuf/internal/enum_type_wrapper.py
+ google/protobuf/internal/extension_dict.py
+ google/protobuf/internal/field_mask.py
+ google/protobuf/internal/message_listener.py
+ google/protobuf/internal/python_message.py
+ google/protobuf/internal/testing_refleaks.py
+ google/protobuf/internal/type_checkers.py
+ google/protobuf/internal/well_known_types.py
+ google/protobuf/internal/wire_format.py
+ google/protobuf/json_format.py
+ google/protobuf/message.py
+ google/protobuf/message_factory.py
+ google/protobuf/proto_builder.py
+ google/protobuf/pyext/__init__.py
+ google/protobuf/pyext/cpp_message.py
+ google/protobuf/reflection.py
+ google/protobuf/service.py
+ google/protobuf/service_reflection.py
+ google/protobuf/symbol_database.py
+ google/protobuf/text_encoding.py
+ google/protobuf/text_format.py
+ google/protobuf/unknown_fields.py
+ google/protobuf/util/__init__.py
+)
+SRCS(
+ google/protobuf/internal/api_implementation.cc
+ google/protobuf/internal/python_protobuf.cc
+ google/protobuf/pyext/descriptor.cc
+ google/protobuf/pyext/descriptor_containers.cc
+ google/protobuf/pyext/descriptor_database.cc
+ google/protobuf/pyext/descriptor_pool.cc
+ google/protobuf/pyext/extension_dict.cc
+ google/protobuf/pyext/field.cc
+ google/protobuf/pyext/map_container.cc
+ google/protobuf/pyext/message.cc
+ google/protobuf/pyext/message_factory.cc
+ google/protobuf/pyext/message_module.cc
+ google/protobuf/pyext/repeated_composite_container.cc
+ google/protobuf/pyext/repeated_scalar_container.cc
+ google/protobuf/pyext/unknown_field_set.cc
+ google/protobuf/pyext/unknown_fields.cc
+)