aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/mypy-protobuf
diff options
context:
space:
mode:
authorDevtools Arcadia <arcadia-devtools@yandex-team.ru>2022-02-07 18:08:42 +0300
committerDevtools Arcadia <arcadia-devtools@mous.vla.yp-c.yandex.net>2022-02-07 18:08:42 +0300
commit1110808a9d39d4b808aef724c861a2e1a38d2a69 (patch)
treee26c9fed0de5d9873cce7e00bc214573dc2195b7 /contrib/python/mypy-protobuf
downloadydb-1110808a9d39d4b808aef724c861a2e1a38d2a69.tar.gz
intermediate changes
ref:cde9a383711a11544ce7e107a78147fb96cc4029
Diffstat (limited to 'contrib/python/mypy-protobuf')
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/METADATA18
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/entry_points.txt4
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/top_level.txt1
-rw-r--r--contrib/python/mypy-protobuf/LICENSE202
-rw-r--r--contrib/python/mypy-protobuf/README.md263
-rw-r--r--contrib/python/mypy-protobuf/bin/protoc-gen-mypy/bin/ya.make13
-rw-r--r--contrib/python/mypy-protobuf/bin/protoc-gen-mypy/ya.make13
-rw-r--r--contrib/python/mypy-protobuf/bin/protoc-gen-mypy_grpc/ya.make13
-rw-r--r--contrib/python/mypy-protobuf/bin/ya.make6
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/__init__.py0
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py62
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/main.py1077
-rw-r--r--contrib/python/mypy-protobuf/patches/01-arcadia.patch20
-rw-r--r--contrib/python/mypy-protobuf/ya.make33
14 files changed, 1725 insertions, 0 deletions
diff --git a/contrib/python/mypy-protobuf/.dist-info/METADATA b/contrib/python/mypy-protobuf/.dist-info/METADATA
new file mode 100644
index 00000000000..1cfc66d9dfc
--- /dev/null
+++ b/contrib/python/mypy-protobuf/.dist-info/METADATA
@@ -0,0 +1,18 @@
+Metadata-Version: 2.1
+Name: mypy-protobuf
+Version: 2.10
+Summary: Generate mypy stub files from protobuf specs
+Home-page: https://github.com/dropbox/mypy-protobuf
+Author: Nipunn Koorapati
+Author-email: nipunn1313@gmail.com
+License: Apache License 2.0
+Download-URL: https://github.com/dropbox/mypy-protobuf/releases
+Keywords: mypy proto dropbox
+Platform: UNKNOWN
+Requires-Python: >=3.6
+License-File: LICENSE
+Requires-Dist: protobuf (>=3.17.3)
+Requires-Dist: types-protobuf (>=3.17.4)
+
+UNKNOWN
+
diff --git a/contrib/python/mypy-protobuf/.dist-info/entry_points.txt b/contrib/python/mypy-protobuf/.dist-info/entry_points.txt
new file mode 100644
index 00000000000..9dbfc984183
--- /dev/null
+++ b/contrib/python/mypy-protobuf/.dist-info/entry_points.txt
@@ -0,0 +1,4 @@
+[console_scripts]
+protoc-gen-mypy = mypy_protobuf.main:main
+protoc-gen-mypy_grpc = mypy_protobuf.main:grpc
+
diff --git a/contrib/python/mypy-protobuf/.dist-info/top_level.txt b/contrib/python/mypy-protobuf/.dist-info/top_level.txt
new file mode 100644
index 00000000000..677dcb137be
--- /dev/null
+++ b/contrib/python/mypy-protobuf/.dist-info/top_level.txt
@@ -0,0 +1 @@
+mypy_protobuf
diff --git a/contrib/python/mypy-protobuf/LICENSE b/contrib/python/mypy-protobuf/LICENSE
new file mode 100644
index 00000000000..bd760ffabda
--- /dev/null
+++ b/contrib/python/mypy-protobuf/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright (c) 2017 Dropbox, Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/contrib/python/mypy-protobuf/README.md b/contrib/python/mypy-protobuf/README.md
new file mode 100644
index 00000000000..ef79728324c
--- /dev/null
+++ b/contrib/python/mypy-protobuf/README.md
@@ -0,0 +1,263 @@
+mypy-protobuf: Generate mypy stub files from protobuf specs
+
+[![CI](https://github.com/dropbox/mypy-protobuf/workflows/CI/badge.svg)](https://github.com/dropbox/mypy-protobuf/actions?query=branch%3Amain)
+[![pypi](https://img.shields.io/pypi/v/mypy-protobuf?logo=Pypi)](https://pypi.org/project/mypy-protobuf/)
+[![license](https://img.shields.io/github/license/dropbox/mypy-protobuf)](https://github.com/dropbox/mypy-protobuf/blob/main/LICENSE)
+===========================================================
+
+2.10 will be the last version of mypy-protobuf which supports targeting python 2.7. Next will be
+mypy-protobuf 3.0 which will auspiciously only support targeting python3
+
+See [Changelog](CHANGELOG.md) for recent changes.
+
+## Requirements to run mypy-protobuf
+[protoc >= 3.17.3](https://github.com/protocolbuffers/protobuf/releases)
+[python-protobuf >= 3.17.3](https://pypi.org/project/protobuf/) - matching protoc release
+[python >= 3.6](https://www.python.org/downloads/source/) - for running mypy-protobuf plugin.
+
+## Requirements to run mypy on stubs generated by mypy-protobuf
+[mypy >= v0.910](https://pypi.org/project/mypy)
+[python-protobuf >= 3.17.3](https://pypi.org/project/protobuf/) - matching protoc release
+[types-protobuf==3.17.3](https://pypi.org/project/types-protobuf/) - for stubs from the google.protobuf library
+
+### To run mypy on code generated with grpc plugin - you'll additionally need
+[grpcio>=1.38.1](https://pypi.org/project/grpcio/)
+[grpcio-tools>=1.38.1](https://pypi.org/project/grpcio-tools/)
+[grpc-stubs>=1.24.6](https://pypi.org/project/grpc-stubs/)
+
+Other configurations may work, but are not supported in testing currently. We would be open to expanding this list if a need arises - file an issue on the issue tracker.
+
+## Installation
+
+The plugin can be installed with
+```
+pip3 install mypy-protobuf
+```
+To install unreleased
+```
+REV=main # or whichever unreleased git rev you'd like
+pip3 install git+https://github.com/dropbox/mypy-protobuf.git@$REV
+
+# For older (1.x) versions of mypy protobuf - you may need
+pip3 install git+https://github.com/dropbox/mypy-protobuf.git@$REV#subdirectory=python
+```
+
+In order to run mypy on the generated code, you'll need to install
+```
+pip3 install mypy>=0.910 types-protobuf>=0.1.14
+```
+
+# Usage
+
+On posix, protoc-gen-mypy is installed to python's executable bin. Assuming that's
+on your $PATH, you can run
+```
+protoc --python_out=output/location --mypy_out=output/location
+```
+Alternately, you can explicitly provide the path:
+```
+protoc --plugin=protoc-gen-mypy=path/to/protoc-gen-mypy --python_out=output/location --mypy_out=output/location
+```
+Check the version number with
+```
+> protoc-gen-mypy --version
+```
+
+## Getting Help
+
+Find other developers in the mypy-protobuf slack workspace ([Invitation Link](https://join.slack.com/t/mypy-protobuf/shared_invite/zt-scogn8b5-MhetFnFYGi6V513aRsbe_Q)). If your company uses slack and mypy-protobuf, you may opt to use slack-connect to make a shared channel.
+
+## Implementation
+
+The implementation of the plugin is in `mypy_protobuf/main.py`, which installs to
+an executable protoc-gen-mypy. On windows it installs to `protoc-gen-mypy.exe`
+
+## Features
+
+See [Changelog](CHANGELOG.md) for full listing
+
+### Bring comments from .proto files to docstrings in .pyi files
+
+Comments in the .proto files on messages, fields, enums, enum variants, extensions, services, and methods
+will appear as docstrings in .pyi files. Useful in IDEs for showing completions with comments.
+
+### Types enum int values more strongly
+
+Enum int values produce stubs which wrap the int values in NewType
+```
+enum MyEnum {
+ FOO = 0;
+ BAR = 1;
+}
+```
+Will yield an [enum type wrapper](https://github.com/python/typeshed/blob/16ae4c61201cd8b96b8b22cdfb2ab9e89ba5bcf2/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi) whose methods type to `MyEnum.V` rather than `int`.
+This allows mypy to catch bugs where the wrong enum value is being used.
+
+mypy-protobuf autogenerates an instance of the EnumTypeWrapper as follows.
+
+```
+class MyEnum(_MyEnum, metaclass=_MyEnumEnumTypeWrapper):
+ pass
+class _MyEnum:
+ V = typing.NewType('V', builtins.int)
+class _MyEnumEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_MyEnum.V], builtins.type):
+ DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
+ FOO = MyEnum.V(0)
+ BAR = MyEnum.V(1)
+FOO = MyEnum.V(0)
+BAR = MyEnum.V(1)
+```
+
+`_MyEnumEnumTypeWrapper` extends the EnumTypeWrapper to take/return MyEnum.V rather than int
+`MyEnum` is an instance of the `EnumTypeWrapper`.
+- Use `_MyEnum` and of metaclass is an implementation detail to make MyEnum.V a valid type w/o a circular dependency
+
+Calling code may be typed as follows.
+
+In python >= 3.7
+```
+# Need [PEP 563](https://www.python.org/dev/peps/pep-0563/) to postpone evaluation of annotations
+from __future__ import annotations # Not needed with python>=3.10
+def f(x: MyEnum.V):
+ print(x)
+f(MyEnum.Value("FOO"))
+```
+
+Note that for usages of cast, the type of `x` must be quoted
+until [upstream protobuf](https://github.com/protocolbuffers/protobuf/pull/8182) includes `V`
+```
+cast('MyEnum.V', x)
+```
+
+### Supports generating type wrappers for fields and maps
+
+M.proto
+```
+message M {
+ uint32 user_id = 1 [(mypy_protobuf.casttype)="mymod.UserId"
+ map<uint32, string> email_by_uid = 2 [
+ (mypy_protobuf.keytype)="path/to/mymod.UserId",
+ (mypy_protobuf.valuetype)="path/to/mymod.Email"
+ ];
+}
+```
+mymod.py
+```
+UserId = NewType("UserId", int)
+Email = NewType("Email", Text)
+```
+
+### `py_generic_services`
+If `py_generic_services` is set in your proto file, then mypy-protobuf will
+generate service stubs. If you want GRPC stubs instead - use the GRPC instructions.
+
+### `readable_stubs`
+If `readable_stubs` is set, mypy-protobuf will generate easier-to-read stubs. The downside
+to this approach - is that it's possible to generate stubs which do not pass mypy - particularly
+in the case of name collisions. mypy-protobuf defaults to generating stubs with fully qualified
+imports and mangled global-level identifiers to defend against name collisions between global
+identifiers and field names.
+
+If you're ok with this risk, try it out!
+```
+protoc --python_out=output/location --mypy_out=readable_stubs:output/location
+```
+
+### `relax_strict_optional_primitives`
+
+If you are using proto3, then primitives cannot be represented as NULL on the wire -
+only as their zero value. By default mypy-protobuf types message constructors to have
+non-nullable primitives (eg `int` instead of `Optional[int]`). python-protobuf itself will
+internally convert None -> zero value. If you intentionally want to use this behavior,
+set this flag! We recommend avoiding this, as it can lead to developer error - confusing
+NULL and 0 as distinct on the wire.
+However, it may be helpful when migrating existing proto2 code, where the distinction is meaningful
+
+```
+protoc --python_out=output/location --mypy_out=relax_strict_optional_primitives:output/location
+```
+
+### Output suppression
+To suppress output, you can run
+```
+protoc --python_out=output/location --mypy_out=quiet:output/location
+```
+
+### GRPC
+
+This plugin provides stubs generation for grpcio generated code.
+```
+protoc \
+ --python_out=output/location \
+ --mypy_out=output/location \
+ --grpc_out=output/location \
+ --mypy_grpc_out=output/location
+```
+
+Note that generated code for grpc will work only together with code for python and locations should be the same.
+If you need stubs for grpc internal code we suggest using this package https://github.com/shabbyrobe/grpc-stubs
+
+### Targeting python2 support
+
+mypy-protobuf's drops support for targeting python2 with version 3.0. If you still need python2 support -
+```
+python3 -m pip install mypy_protobuf==2.10
+protoc --python_out=output/location --mypy_out=output/location
+mypy --target-version=2.7 {files}
+```
+
+
+## Contributing
+Contributions to the implementation are welcome. Please run tests using `./run_test.sh`.
+Ensure code is formatted using black.
+```
+pip3 install black
+black .
+```
+
+## Contributors
+
+### Dropboxers
+- [@nipunn1313](https://github.com/nipunn1313)
+- [@dzbarsky](https://github.com/dzbarsky)
+- [@gvanrossum](https://github.com/gvanrossum)
+- [@peterlvilim](https://github.com/peterlvilim)
+- [@msullivan](https://github.com/msullivan)
+- [@bradenaw](https://github.com/bradenaw)
+- [@ilevkivskyi](https://github.com/ilevkivskyi)
+
+### Others
+- [@Ketouem](https://github.com/Ketouem)
+- [@nmiculinic](https://github.com/nmiculinic)
+- [@onto](https://github.com/onto)
+- [@jcppkkk](https://github.com/jcppkkk)
+- [@drather19](https://github.com/drather19)
+- [@smessmer](https://github.com/smessmer)
+- [@pcorpet](https://github.com/pcorpet)
+- [@zozoens31](https://github.com/zozoens31)
+- [@abhishekrb19](https://github.com/abhishekrb19)
+- [@jaens](https://github.com/jaens)
+- [@arussellsaw](https://github.com/arussellsaw)
+- [@shabbyrobe](https://github.com/shabbyrobe)
+- [@reorx](https://github.com/reorx)
+- [@zifter](https://github.com/zifter)
+- [@juzna](https://github.com/juzna)
+- [@mikolajz](https://github.com/mikolajz)
+- [@chadrik](https://github.com/chadrik)
+- [@EPronovost](https://github.com/EPronovost)
+- [@chrislawlor](https://github.com/chrislawlor)
+- [@henribru](https://github.com/henribru)
+- [@Evgenus](https://github.com/Evgenus)
+- [@MHDante](https://github.com/MHDante)
+- [@nelfin](https://github.com/nelfin)
+- [@alkasm](https://github.com/alkasm)
+- [@tarmath](https://github.com/tarmath)
+
+Licence etc.
+------------
+
+1. License: Apache 2.0.
+2. Copyright attribution: Copyright (c) 2017 Dropbox, Inc.
+3. External contributions to the project should be subject to
+ Dropbox's Contributor License Agreement (CLA):
+ https://opensource.dropbox.com/cla/
diff --git a/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/bin/ya.make b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/bin/ya.make
new file mode 100644
index 00000000000..b63a9d4a517
--- /dev/null
+++ b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/bin/ya.make
@@ -0,0 +1,13 @@
+OWNER(torkve g:python-contrib)
+
+PY3_PROGRAM_BIN(protoc-gen-mypy)
+
+PEERDIR(
+ contrib/python/mypy-protobuf
+)
+
+PY_MAIN(mypy_protobuf.main:main)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/ya.make b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/ya.make
new file mode 100644
index 00000000000..c4601711668
--- /dev/null
+++ b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/ya.make
@@ -0,0 +1,13 @@
+OWNER(torkve g:python-contrib)
+
+IF (USE_PREBUILT_TOOLS)
+ INCLUDE(${ARCADIA_ROOT}/build/prebuilt/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/ya.make.prebuilt)
+ENDIF()
+
+IF (NOT PREBUILT)
+ INCLUDE(${ARCADIA_ROOT}/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/bin/ya.make)
+ENDIF()
+
+RECURSE(
+ bin
+)
diff --git a/contrib/python/mypy-protobuf/bin/protoc-gen-mypy_grpc/ya.make b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy_grpc/ya.make
new file mode 100644
index 00000000000..bc94d264cb3
--- /dev/null
+++ b/contrib/python/mypy-protobuf/bin/protoc-gen-mypy_grpc/ya.make
@@ -0,0 +1,13 @@
+OWNER(torkve g:python-contrib)
+
+PY3_PROGRAM(protoc-gen-mypy_grpc)
+
+PEERDIR(
+ contrib/python/mypy-protobuf
+)
+
+PY_MAIN(mypy_protobuf.main:grpc)
+
+NO_LINT()
+
+END()
diff --git a/contrib/python/mypy-protobuf/bin/ya.make b/contrib/python/mypy-protobuf/bin/ya.make
new file mode 100644
index 00000000000..b14eec5f932
--- /dev/null
+++ b/contrib/python/mypy-protobuf/bin/ya.make
@@ -0,0 +1,6 @@
+OWNER(torkve g:python-contrib)
+
+RECURSE(
+ protoc-gen-mypy
+ protoc-gen-mypy_grpc
+)
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py b/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py b/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
new file mode 100644
index 00000000000..745bc028ef8
--- /dev/null
+++ b/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: mypy_protobuf/extensions.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='mypy_protobuf/extensions.proto',
+ package='mypy_protobuf',
+ syntax='proto2',
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n\x1emypy_protobuf/extensions.proto\x12\rmypy_protobuf\x1a google/protobuf/descriptor.proto:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xe0\xd4\x03 \x01(\t:0\n\x07keytype\x12\x1d.google.protobuf.FieldOptions\x18\xe2\xd4\x03 \x01(\t:2\n\tvaluetype\x12\x1d.google.protobuf.FieldOptions\x18\xe3\xd4\x03 \x01(\t'
+ ,
+ dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
+
+
+CASTTYPE_FIELD_NUMBER = 60000
+casttype = _descriptor.FieldDescriptor(
+ name='casttype', full_name='mypy_protobuf.casttype', index=0,
+ number=60000, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=True, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
+KEYTYPE_FIELD_NUMBER = 60002
+keytype = _descriptor.FieldDescriptor(
+ name='keytype', full_name='mypy_protobuf.keytype', index=1,
+ number=60002, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=True, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
+VALUETYPE_FIELD_NUMBER = 60003
+valuetype = _descriptor.FieldDescriptor(
+ name='valuetype', full_name='mypy_protobuf.valuetype', index=2,
+ number=60003, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=True, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
+
+DESCRIPTOR.extensions_by_name['casttype'] = casttype
+DESCRIPTOR.extensions_by_name['keytype'] = keytype
+DESCRIPTOR.extensions_by_name['valuetype'] = valuetype
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype)
+google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(keytype)
+google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(valuetype)
+
+# @@protoc_insertion_point(module_scope)
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/main.py b/contrib/python/mypy-protobuf/mypy_protobuf/main.py
new file mode 100644
index 00000000000..f2b6c055072
--- /dev/null
+++ b/contrib/python/mypy-protobuf/mypy_protobuf/main.py
@@ -0,0 +1,1077 @@
+#!/usr/bin/env python
+"""Protoc Plugin to generate mypy stubs. Loosely based on @zbarsky's go implementation"""
+import os
+
+import sys
+from collections import defaultdict
+from contextlib import contextmanager
+from functools import wraps
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ Sequence,
+ Tuple,
+)
+
+import google.protobuf.descriptor_pb2 as d
+from google.protobuf.compiler import plugin_pb2 as plugin_pb2
+from google.protobuf.internal.containers import RepeatedCompositeFieldContainer
+from google.protobuf.internal.well_known_types import WKTBASES
+from . import extensions_pb2
+
+__version__ = "2.10"
+
+# SourceCodeLocation is defined by `message Location` here
+# https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto
+SourceCodeLocation = List[int]
+
+# So phabricator doesn't think mypy_protobuf.py is generated
+GENERATED = "@ge" + "nerated"
+HEADER = """\"\"\"
+{} by mypy-protobuf. Do not edit manually!
+isort:skip_file
+\"\"\"
+""".format(
+ GENERATED
+)
+
+# See https://github.com/dropbox/mypy-protobuf/issues/73 for details
+PYTHON_RESERVED = {
+ "False",
+ "None",
+ "True",
+ "and",
+ "as",
+ "async",
+ "await",
+ "assert",
+ "break",
+ "class",
+ "continue",
+ "def",
+ "del",
+ "elif",
+ "else",
+ "except",
+ "finally",
+ "for",
+ "from",
+ "global",
+ "if",
+ "import",
+ "in",
+ "is",
+ "lambda",
+ "nonlocal",
+ "not",
+ "or",
+ "pass",
+ "raise",
+ "return",
+ "try",
+ "while",
+ "with",
+ "yield",
+}
+
+PROTO_ENUM_RESERVED = {
+ "Name",
+ "Value",
+ "keys",
+ "values",
+ "items",
+}
+
+
+def _mangle_global_identifier(name: str) -> str:
+ """
+ Module level identifiers are mangled and aliased so that they can be disambiguated
+ from fields/enum variants with the same name within the file.
+
+ Eg:
+ Enum variant `Name` or message field `Name` might conflict with a top level
+ message or enum named `Name`, so mangle it with a global___ prefix for
+ internal references. Note that this doesn't affect inner enums/messages
+ because they get fuly qualified when referenced within a file"""
+ return "global___{}".format(name)
+
+
+class Descriptors(object):
+ def __init__(self, request: plugin_pb2.CodeGeneratorRequest) -> None:
+ files = {f.name: f for f in request.proto_file}
+ to_generate = {n: files[n] for n in request.file_to_generate}
+ self.files: Dict[str, d.FileDescriptorProto] = files
+ self.to_generate: Dict[str, d.FileDescriptorProto] = to_generate
+ self.messages: Dict[str, d.DescriptorProto] = {}
+ self.message_to_fd: Dict[str, d.FileDescriptorProto] = {}
+
+ def _add_enums(
+ enums: "RepeatedCompositeFieldContainer[d.EnumDescriptorProto]",
+ prefix: str,
+ _fd: d.FileDescriptorProto,
+ ) -> None:
+ for enum in enums:
+ self.message_to_fd[prefix + enum.name] = _fd
+ self.message_to_fd[prefix + enum.name + ".V"] = _fd
+
+ def _add_messages(
+ messages: "RepeatedCompositeFieldContainer[d.DescriptorProto]",
+ prefix: str,
+ _fd: d.FileDescriptorProto,
+ ) -> None:
+ for message in messages:
+ self.messages[prefix + message.name] = message
+ self.message_to_fd[prefix + message.name] = _fd
+ sub_prefix = prefix + message.name + "."
+ _add_messages(message.nested_type, sub_prefix, _fd)
+ _add_enums(message.enum_type, sub_prefix, _fd)
+
+ for fd in request.proto_file:
+ start_prefix = "." + fd.package + "." if fd.package else "."
+ _add_messages(fd.message_type, start_prefix, fd)
+ _add_enums(fd.enum_type, start_prefix, fd)
+
+
+class PkgWriter(object):
+ """Writes a single pyi file"""
+
+ def __init__(
+ self,
+ fd: d.FileDescriptorProto,
+ descriptors: Descriptors,
+ readable_stubs: bool,
+ relax_strict_optional_primitives: bool,
+ grpc: bool,
+ ) -> None:
+ self.fd = fd
+ self.descriptors = descriptors
+ self.readable_stubs = readable_stubs
+ self.relax_strict_optional_primitives = relax_strict_optional_primitives
+ self.grpc = grpc
+ self.lines: List[str] = []
+ self.indent = ""
+
+ # Set of {x}, where {x} corresponds to to `import {x}`
+ self.imports: Set[str] = set()
+ # dictionary of x->(y,z) for `from {x} import {y} as {z}`
+ # if {z} is None, then it shortens to `from {x} import {y}`
+ self.from_imports: Dict[str, Set[Tuple[str, Optional[str]]]] = defaultdict(set)
+
+ # Comments
+ self.source_code_info_by_scl = {
+ tuple(location.path): location for location in fd.source_code_info.location
+ }
+
+ def _import(self, path: str, name: str) -> str:
+ """Imports a stdlib path and returns a handle to it
+ eg. self._import("typing", "Optional") -> "Optional"
+ """
+ imp = path.replace("/", ".")
+ if self.readable_stubs:
+ self.from_imports[imp].add((name, None))
+ return name
+ else:
+ self.imports.add(imp)
+ return imp + "." + name
+
+ def _import_message(self, name: str) -> str:
+ """Import a referenced message and return a handle"""
+ message_fd = self.descriptors.message_to_fd[name]
+ assert message_fd.name.endswith(".proto")
+
+ # Strip off package name
+ if message_fd.package:
+ assert name.startswith("." + message_fd.package + ".")
+ name = name[len("." + message_fd.package + ".") :]
+ else:
+ assert name.startswith(".")
+ name = name[1:]
+
+ # Use prepended "_r_" to disambiguate message names that alias python reserved keywords
+ split = name.split(".")
+ for i, part in enumerate(split):
+ if part in PYTHON_RESERVED:
+ split[i] = "_r_" + part
+ name = ".".join(split)
+
+ # Message defined in this file. Note: GRPC stubs in same .proto are generated into separate files
+ if not self.grpc and message_fd.name == self.fd.name:
+ return name if self.readable_stubs else _mangle_global_identifier(name)
+
+ # Not in file. Must import
+ # Python generated code ignores proto packages, so the only relevant factor is
+ # whether it is in the file or not.
+ import_name = self._import(
+ message_fd.name[:-6].replace("-", "_") + "_pb2", split[0]
+ )
+
+ remains = ".".join(split[1:])
+ if not remains:
+ return import_name
+
+ # remains could either be a direct import of a nested enum or message
+ # from another package.
+ return import_name + "." + remains
+
+ def _builtin(self, name: str) -> str:
+ return self._import("builtins", name)
+
+ @contextmanager
+ def _indent(self) -> Iterator[None]:
+ self.indent = self.indent + " "
+ yield
+ self.indent = self.indent[:-4]
+
+ def _write_line(self, line: str, *args: Any) -> None:
+ line = line.format(*args)
+ if line == "":
+ self.lines.append(line)
+ else:
+ self.lines.append(self.indent + line)
+
+ def _break_text(self, text_block: str) -> List[str]:
+ if text_block == "":
+ return []
+ return [
+ "{}".format(l[1:] if l.startswith(" ") else l)
+ for l in text_block.rstrip().split("\n")
+ ]
+
+ def _has_comments(self, scl: SourceCodeLocation) -> bool:
+ sci_loc = self.source_code_info_by_scl.get(tuple(scl))
+ return sci_loc is not None and bool(
+ sci_loc.leading_detached_comments
+ or sci_loc.leading_comments
+ or sci_loc.trailing_comments
+ )
+
+ def _write_comments(self, scl: SourceCodeLocation) -> bool:
+ """Return true if any comments were written"""
+ if not self._has_comments(scl):
+ return False
+
+ sci_loc = self.source_code_info_by_scl.get(tuple(scl))
+ assert sci_loc is not None
+
+ lines = []
+ for leading_detached_comment in sci_loc.leading_detached_comments:
+ lines.extend(self._break_text(leading_detached_comment))
+ lines.append("")
+ if sci_loc.leading_comments is not None:
+ lines.extend(self._break_text(sci_loc.leading_comments))
+ # Trailing comments also go in the header - to make sure it gets into the docstring
+ if sci_loc.trailing_comments is not None:
+ lines.extend(self._break_text(sci_loc.trailing_comments))
+
+ if len(lines) == 1:
+ self._write_line('"""{}"""', lines[0])
+ else:
+ for i, line in enumerate(lines):
+ if i == 0:
+ self._write_line('"""{}', line)
+ else:
+ self._write_line("{}", line)
+ self._write_line('"""')
+
+ return True
+
+ def write_enum_values(
+ self,
+ values: Iterable[Tuple[int, d.EnumValueDescriptorProto]],
+ value_type: str,
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ for i, val in values:
+ if val.name in PYTHON_RESERVED:
+ continue
+
+ scl = scl_prefix + [i]
+ self._write_line(
+ "{} = {}({})",
+ val.name,
+ value_type,
+ val.number,
+ )
+ if self._write_comments(scl):
+ self._write_line("") # Extra newline to separate
+
+ def write_module_attributes(self) -> None:
+ l = self._write_line
+ l(
+ "DESCRIPTOR: {} = ...",
+ self._import("google.protobuf.descriptor", "FileDescriptor"),
+ )
+ l("")
+
+ def write_enums(
+ self,
+ enums: Iterable[d.EnumDescriptorProto],
+ prefix: str,
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+ for i, enum in enumerate(enums):
+ class_name = (
+ enum.name if enum.name not in PYTHON_RESERVED else "_r_" + enum.name
+ )
+ value_type_fq = prefix + class_name + ".V"
+
+ l(
+ "class {}({}, metaclass={}):",
+ class_name,
+ "_" + enum.name,
+ "_" + enum.name + "EnumTypeWrapper",
+ )
+ with self._indent():
+ scl = scl_prefix + [i]
+ self._write_comments(scl)
+ l("pass")
+ l("class {}:", "_" + enum.name)
+ with self._indent():
+ l(
+ "V = {}('V', {})",
+ self._import("typing", "NewType"),
+ self._builtin("int"),
+ )
+ l(
+ "class {}({}[{}], {}):",
+ "_" + enum.name + "EnumTypeWrapper",
+ self._import(
+ "google.protobuf.internal.enum_type_wrapper", "_EnumTypeWrapper"
+ ),
+ "_" + enum.name + ".V",
+ self._builtin("type"),
+ )
+ with self._indent():
+ l(
+ "DESCRIPTOR: {} = ...",
+ self._import("google.protobuf.descriptor", "EnumDescriptor"),
+ )
+ self.write_enum_values(
+ [
+ (i, v)
+ for i, v in enumerate(enum.value)
+ if v.name not in PROTO_ENUM_RESERVED
+ ],
+ value_type_fq,
+ scl + [d.EnumDescriptorProto.VALUE_FIELD_NUMBER],
+ )
+ l("")
+
+ self.write_enum_values(
+ enumerate(enum.value),
+ value_type_fq,
+ scl + [d.EnumDescriptorProto.VALUE_FIELD_NUMBER],
+ )
+ if prefix == "" and not self.readable_stubs:
+ l("{} = {}", _mangle_global_identifier(class_name), class_name)
+ l("")
+ l("")
+
+ def write_messages(
+ self,
+ messages: Iterable[d.DescriptorProto],
+ prefix: str,
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+
+ for i, desc in enumerate(messages):
+ qualified_name = prefix + desc.name
+
+ # Reproduce some hardcoded logic from the protobuf implementation - where
+ # some specific "well_known_types" generated protos to have additional
+ # base classes
+ addl_base = u""
+ if self.fd.package + "." + desc.name in WKTBASES:
+ # chop off the .proto - and import the well known type
+ # eg `from google.protobuf.duration import Duration`
+ well_known_type = WKTBASES[self.fd.package + "." + desc.name]
+ addl_base = ", " + self._import(
+ "google.protobuf.internal.well_known_types",
+ well_known_type.__name__,
+ )
+
+ class_name = (
+ desc.name if desc.name not in PYTHON_RESERVED else "_r_" + desc.name
+ )
+ message_class = self._import("google.protobuf.message", "Message")
+ l("class {}({}{}):", class_name, message_class, addl_base)
+ with self._indent():
+ scl = scl_prefix + [i]
+ self._write_comments(scl)
+
+ l(
+ "DESCRIPTOR: {} = ...",
+ self._import("google.protobuf.descriptor", "Descriptor"),
+ )
+
+ # Nested enums/messages
+ self.write_enums(
+ desc.enum_type,
+ qualified_name + ".",
+ scl + [d.DescriptorProto.ENUM_TYPE_FIELD_NUMBER],
+ )
+ self.write_messages(
+ desc.nested_type,
+ qualified_name + ".",
+ scl + [d.DescriptorProto.NESTED_TYPE_FIELD_NUMBER],
+ )
+
+ # integer constants for field numbers
+ for f in desc.field:
+ l("{}_FIELD_NUMBER: {}", f.name.upper(), self._builtin("int"))
+
+ for idx, field in enumerate(desc.field):
+ if field.name in PYTHON_RESERVED:
+ continue
+
+ if (
+ is_scalar(field)
+ and field.label != d.FieldDescriptorProto.LABEL_REPEATED
+ ):
+ # Scalar non repeated fields are r/w
+ l("{}: {} = ...", field.name, self.python_type(field))
+ if self._write_comments(
+ scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx]
+ ):
+ l("")
+ else:
+ # r/o Getters for non-scalar fields and scalar-repeated fields
+ scl_field = scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx]
+ l("@property")
+ l(
+ "def {}(self) -> {}:{}",
+ field.name,
+ self.python_type(field),
+ " ..." if not self._has_comments(scl_field) else "",
+ )
+ if self._has_comments(scl_field):
+ with self._indent():
+ self._write_comments(scl_field)
+ l("pass")
+
+ self.write_extensions(
+ desc.extension, scl + [d.DescriptorProto.EXTENSION_FIELD_NUMBER]
+ )
+
+ # Constructor
+ self_arg = (
+ "self_" if any(f.name == "self" for f in desc.field) else "self"
+ )
+ l("def __init__({},", self_arg)
+ with self._indent():
+ constructor_fields = [
+ f for f in desc.field if f.name not in PYTHON_RESERVED
+ ]
+ if len(constructor_fields) > 0:
+ # Only positional args allowed
+ # See https://github.com/dropbox/mypy-protobuf/issues/71
+ l("*,")
+ for field in constructor_fields:
+ if (
+ self.fd.syntax == "proto3"
+ and is_scalar(field)
+ and field.label != d.FieldDescriptorProto.LABEL_REPEATED
+ and not self.relax_strict_optional_primitives
+ ):
+ l(
+ "{} : {} = ...,",
+ field.name,
+ self.python_type(field, generic_container=True),
+ )
+ else:
+ l(
+ "{} : {}[{}] = ...,",
+ field.name,
+ self._import("typing", "Optional"),
+ self.python_type(field, generic_container=True),
+ )
+ l(") -> None: ...")
+
+ self.write_stringly_typed_fields(desc)
+
+ if prefix == "" and not self.readable_stubs:
+ l("{} = {}", _mangle_global_identifier(class_name), class_name)
+ l("")
+
+ def write_stringly_typed_fields(self, desc: d.DescriptorProto) -> None:
+ """Type the stringly-typed methods as a Union[Literal, Literal ...]"""
+ l = self._write_line
+ # HasField, ClearField, WhichOneof accepts both bytes/unicode
+ # HasField only supports singular. ClearField supports repeated as well
+ # In proto3, HasField only supports message fields and optional fields
+ # HasField always supports oneof fields
+ hf_fields = [
+ f.name
+ for f in desc.field
+ if f.HasField("oneof_index")
+ or (
+ f.label != d.FieldDescriptorProto.LABEL_REPEATED
+ and (
+ self.fd.syntax != "proto3"
+ or f.type == d.FieldDescriptorProto.TYPE_MESSAGE
+ or f.proto3_optional
+ )
+ )
+ ]
+ cf_fields = [f.name for f in desc.field]
+ wo_fields = {
+ oneof.name: [
+ f.name
+ for f in desc.field
+ if f.HasField("oneof_index") and f.oneof_index == idx
+ ]
+ for idx, oneof in enumerate(desc.oneof_decl)
+ }
+
+ hf_fields.extend(wo_fields.keys())
+ cf_fields.extend(wo_fields.keys())
+
+ hf_fields_text = ",".join(
+ sorted('u"{}",b"{}"'.format(name, name) for name in hf_fields)
+ )
+ cf_fields_text = ",".join(
+ sorted('u"{}",b"{}"'.format(name, name) for name in cf_fields)
+ )
+
+ if not hf_fields and not cf_fields and not wo_fields:
+ return
+
+ if hf_fields:
+ l(
+ "def HasField(self, field_name: {}[{}]) -> {}: ...",
+ self._import("typing_extensions", "Literal"),
+ hf_fields_text,
+ self._builtin("bool"),
+ )
+ if cf_fields:
+ l(
+ "def ClearField(self, field_name: {}[{}]) -> None: ...",
+ self._import("typing_extensions", "Literal"),
+ cf_fields_text,
+ )
+
+ for wo_field, members in sorted(wo_fields.items()):
+ if len(wo_fields) > 1:
+ l("@{}", self._import("typing", "overload"))
+ l(
+ "def WhichOneof(self, oneof_group: {}[{}]) -> {}[{}[{}]]: ...",
+ self._import("typing_extensions", "Literal"),
+ # Accepts both unicode and bytes in both py2 and py3
+ 'u"{}",b"{}"'.format(wo_field, wo_field),
+ self._import("typing", "Optional"),
+ self._import("typing_extensions", "Literal"),
+ # Returns `str` in both py2 and py3 (bytes in py2, unicode in py3)
+ ",".join('"{}"'.format(m) for m in members),
+ )
+
+ def write_extensions(
+ self,
+ extensions: Sequence[d.FieldDescriptorProto],
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+ for i, ext in enumerate(extensions):
+ scl = scl_prefix + [i]
+
+ l(
+ "{}: {}[{}, {}] = ...",
+ ext.name,
+ self._import(
+ "google.protobuf.internal.extension_dict",
+ "_ExtensionFieldDescriptor",
+ ),
+ self._import_message(ext.extendee),
+ self.python_type(ext),
+ )
+ self._write_comments(scl)
+ l("")
+
+ def write_methods(
+ self,
+ service: d.ServiceDescriptorProto,
+ is_abstract: bool,
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+ methods = [
+ (i, m)
+ for i, m in enumerate(service.method)
+ if m.name not in PYTHON_RESERVED
+ ]
+ if not methods:
+ l("pass")
+ for i, method in methods:
+ if is_abstract:
+ l("@{}", self._import("abc", "abstractmethod"))
+ l("def {}(self,", method.name)
+ with self._indent():
+ l(
+ "rpc_controller: {},",
+ self._import("google.protobuf.service", "RpcController"),
+ )
+ l("request: {},", self._import_message(method.input_type))
+ l(
+ "done: {}[{}[[{}], None]],",
+ self._import("typing", "Optional"),
+ self._import("typing", "Callable"),
+ self._import_message(method.output_type),
+ )
+
+ scl_method = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
+ l(
+ ") -> {}[{}]:{}",
+ self._import("concurrent.futures", "Future"),
+ self._import_message(method.output_type),
+ " ..." if not self._has_comments(scl_method) else "",
+ )
+ if self._has_comments(scl_method):
+ with self._indent():
+ self._write_comments(scl_method)
+ l("pass")
+
+ def write_services(
+ self,
+ services: Iterable[d.ServiceDescriptorProto],
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+ for i, service in enumerate(services):
+ scl = scl_prefix + [i]
+ class_name = (
+ service.name
+ if service.name not in PYTHON_RESERVED
+ else "_r_" + service.name
+ )
+ # The service definition interface
+ l(
+ "class {}({}, metaclass={}):",
+ class_name,
+ self._import("google.protobuf.service", "Service"),
+ self._import("abc", "ABCMeta"),
+ )
+ with self._indent():
+ self._write_comments(scl)
+ self.write_methods(service, is_abstract=True, scl_prefix=scl)
+
+ # The stub client
+ l("class {}({}):", service.name + "_Stub", class_name)
+ with self._indent():
+ self._write_comments(scl)
+ l(
+ "def __init__(self, rpc_channel: {}) -> None: ...",
+ self._import("google.protobuf.service", "RpcChannel"),
+ )
+ self.write_methods(service, is_abstract=False, scl_prefix=scl)
+
+ def _import_casttype(self, casttype: str) -> str:
+ split = casttype.split(".")
+ assert (
+ len(split) == 2
+ ), "mypy_protobuf.[casttype,keytype,valuetype] is expected to be of format path/to/file.TypeInFile"
+ pkg = split[0].replace("/", ".")
+ return self._import(pkg, split[1])
+
+ def _map_key_value_types(
+ self,
+ map_field: d.FieldDescriptorProto,
+ key_field: d.FieldDescriptorProto,
+ value_field: d.FieldDescriptorProto,
+ ) -> Tuple[str, str]:
+ key_casttype = map_field.options.Extensions[extensions_pb2.keytype]
+ ktype = (
+ self._import_casttype(key_casttype)
+ if key_casttype
+ else self.python_type(key_field)
+ )
+ value_casttype = map_field.options.Extensions[extensions_pb2.valuetype]
+ vtype = (
+ self._import_casttype(value_casttype)
+ if value_casttype
+ else self.python_type(value_field)
+ )
+ return ktype, vtype
+
+ def _callable_type(self, method: d.MethodDescriptorProto) -> str:
+ if method.client_streaming:
+ if method.server_streaming:
+ return self._import("grpc", "StreamStreamMultiCallable")
+ else:
+ return self._import("grpc", "StreamUnaryMultiCallable")
+ else:
+ if method.server_streaming:
+ return self._import("grpc", "UnaryStreamMultiCallable")
+ else:
+ return self._import("grpc", "UnaryUnaryMultiCallable")
+
+ def _input_type(
+ self, method: d.MethodDescriptorProto, use_stream_iterator: bool = True
+ ) -> str:
+ result = self._import_message(method.input_type)
+ if use_stream_iterator and method.client_streaming:
+ result = "{}[{}]".format(self._import("typing", "Iterator"), result)
+ return result
+
+ def _output_type(
+ self, method: d.MethodDescriptorProto, use_stream_iterator: bool = True
+ ) -> str:
+ result = self._import_message(method.output_type)
+ if use_stream_iterator and method.server_streaming:
+ result = "{}[{}]".format(self._import("typing", "Iterator"), result)
+ return result
+
+ def write_grpc_methods(
+ self, service: d.ServiceDescriptorProto, scl_prefix: SourceCodeLocation
+ ) -> None:
+ l = self._write_line
+ methods = [
+ (i, m)
+ for i, m in enumerate(service.method)
+ if m.name not in PYTHON_RESERVED
+ ]
+ if not methods:
+ l("pass")
+ l("")
+ for i, method in methods:
+ scl = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
+
+ l("@{}", self._import("abc", "abstractmethod"))
+ l("def {}(self,", method.name)
+ with self._indent():
+ l("request: {},", self._input_type(method))
+ l("context: {},", self._import("grpc", "ServicerContext"))
+ l(
+ ") -> {}:{}",
+ self._output_type(method),
+ " ..." if not self._has_comments(scl) else "",
+ ),
+ if self._has_comments(scl):
+ with self._indent():
+ self._write_comments(scl)
+ l("pass")
+ l("")
+
+ def write_grpc_stub_methods(
+ self, service: d.ServiceDescriptorProto, scl_prefix: SourceCodeLocation
+ ) -> None:
+ l = self._write_line
+ methods = [
+ (i, m)
+ for i, m in enumerate(service.method)
+ if m.name not in PYTHON_RESERVED
+ ]
+ if not methods:
+ l("pass")
+ l("")
+ for i, method in methods:
+ scl = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
+
+ l("{}: {}[", method.name, self._callable_type(method))
+ with self._indent():
+ l("{},", self._input_type(method, False))
+ l("{}] = ...", self._output_type(method, False))
+ self._write_comments(scl)
+ l("")
+
+ def write_grpc_services(
+ self,
+ services: Iterable[d.ServiceDescriptorProto],
+ scl_prefix: SourceCodeLocation,
+ ) -> None:
+ l = self._write_line
+ for i, service in enumerate(services):
+ if service.name in PYTHON_RESERVED:
+ continue
+
+ scl = scl_prefix + [i]
+
+ # The stub client
+ l("class {}Stub:", service.name)
+ with self._indent():
+ self._write_comments(scl)
+ l(
+ "def __init__(self, channel: {}) -> None: ...",
+ self._import("grpc", "Channel"),
+ )
+ self.write_grpc_stub_methods(service, scl)
+ l("")
+
+ # The service definition interface
+ l(
+ "class {}Servicer(metaclass={}):",
+ service.name,
+ self._import("abc", "ABCMeta"),
+ )
+ with self._indent():
+ self._write_comments(scl)
+ self.write_grpc_methods(service, scl)
+ l("")
+ l(
+ "def add_{}Servicer_to_server(servicer: {}Servicer, server: {}) -> None: ...",
+ service.name,
+ service.name,
+ self._import("grpc", "Server"),
+ )
+ l("")
+
+ def python_type(
+ self, field: d.FieldDescriptorProto, generic_container: bool = False
+ ) -> str:
+ """
+ generic_container
+ if set, type the field with generic interfaces. Eg.
+ - Iterable[int] rather than RepeatedScalarFieldContainer[int]
+ - Mapping[k, v] rather than MessageMap[k, v]
+ Can be useful for input types (eg constructor)
+ """
+ casttype = field.options.Extensions[extensions_pb2.casttype]
+ if casttype:
+ return self._import_casttype(casttype)
+
+ mapping: Dict[d.FieldDescriptorProto.Type.V, Callable[[], str]] = {
+ d.FieldDescriptorProto.TYPE_DOUBLE: lambda: self._builtin("float"),
+ d.FieldDescriptorProto.TYPE_FLOAT: lambda: self._builtin("float"),
+ d.FieldDescriptorProto.TYPE_INT64: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_UINT64: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_FIXED64: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_SFIXED64: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_SINT64: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_INT32: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_UINT32: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_FIXED32: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_SFIXED32: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_SINT32: lambda: self._builtin("int"),
+ d.FieldDescriptorProto.TYPE_BOOL: lambda: self._builtin("bool"),
+ d.FieldDescriptorProto.TYPE_STRING: lambda: self._import("typing", "Text"),
+ d.FieldDescriptorProto.TYPE_BYTES: lambda: self._builtin("bytes"),
+ d.FieldDescriptorProto.TYPE_ENUM: lambda: self._import_message(
+ field.type_name + ".V"
+ ),
+ d.FieldDescriptorProto.TYPE_MESSAGE: lambda: self._import_message(
+ field.type_name
+ ),
+ d.FieldDescriptorProto.TYPE_GROUP: lambda: self._import_message(
+ field.type_name
+ ),
+ }
+
+ assert field.type in mapping, "Unrecognized type: " + repr(field.type)
+ field_type = mapping[field.type]()
+
+ # For non-repeated fields, we're done!
+ if field.label != d.FieldDescriptorProto.LABEL_REPEATED:
+ return field_type
+
+ # Scalar repeated fields go in RepeatedScalarFieldContainer
+ if is_scalar(field):
+ container = (
+ self._import("typing", "Iterable")
+ if generic_container
+ else self._import(
+ "google.protobuf.internal.containers",
+ "RepeatedScalarFieldContainer",
+ )
+ )
+ return "{}[{}]".format(container, field_type)
+
+ # non-scalar repeated map fields go in ScalarMap/MessageMap
+ msg = self.descriptors.messages[field.type_name]
+ if msg.options.map_entry:
+ # map generates a special Entry wrapper message
+ if generic_container:
+ container = self._import("typing", "Mapping")
+ elif is_scalar(msg.field[1]):
+ container = self._import(
+ "google.protobuf.internal.containers", "ScalarMap"
+ )
+ else:
+ container = self._import(
+ "google.protobuf.internal.containers", "MessageMap"
+ )
+ ktype, vtype = self._map_key_value_types(field, msg.field[0], msg.field[1])
+ return "{}[{}, {}]".format(container, ktype, vtype)
+
+ # non-scalar repetated fields go in RepeatedCompositeFieldContainer
+ container = (
+ self._import("typing", "Iterable")
+ if generic_container
+ else self._import(
+ "google.protobuf.internal.containers",
+ "RepeatedCompositeFieldContainer",
+ )
+ )
+ return "{}[{}]".format(container, field_type)
+
+ def write(self) -> str:
+ for reexport_idx in self.fd.public_dependency:
+ reexport_file = self.fd.dependency[reexport_idx]
+ reexport_fd = self.descriptors.files[reexport_file]
+ reexport_imp = (
+ reexport_file[:-6].replace("-", "_").replace("/", ".") + "_pb2"
+ )
+ names = (
+ [m.name for m in reexport_fd.message_type]
+ + [m.name for m in reexport_fd.enum_type]
+ + [v.name for m in reexport_fd.enum_type for v in m.value]
+ + [m.name for m in reexport_fd.extension]
+ )
+ if reexport_fd.options.py_generic_services:
+ names.extend(m.name for m in reexport_fd.service)
+
+ if names:
+ # n,n to force a reexport (from x import y as y)
+ self.from_imports[reexport_imp].update((n, n) for n in names)
+
+ import_lines = []
+ for pkg in sorted(self.imports):
+ import_lines.append(u"import {}".format(pkg))
+
+ for pkg, items in sorted(self.from_imports.items()):
+ import_lines.append(u"from {} import (".format(pkg))
+ for (name, reexport_name) in sorted(items):
+ if reexport_name is None:
+ import_lines.append(u" {},".format(name))
+ else:
+ import_lines.append(u" {} as {},".format(name, reexport_name))
+ import_lines.append(u")\n")
+ import_lines.append("")
+
+ return "\n".join(import_lines + self.lines)
+
+
+def is_scalar(fd: d.FieldDescriptorProto) -> bool:
+ return not (
+ fd.type == d.FieldDescriptorProto.TYPE_MESSAGE
+ or fd.type == d.FieldDescriptorProto.TYPE_GROUP
+ )
+
+
+def generate_mypy_stubs(
+ descriptors: Descriptors,
+ response: plugin_pb2.CodeGeneratorResponse,
+ quiet: bool,
+ readable_stubs: bool,
+ relax_strict_optional_primitives: bool,
+) -> None:
+ for name, fd in descriptors.to_generate.items():
+ pkg_writer = PkgWriter(
+ fd,
+ descriptors,
+ readable_stubs,
+ relax_strict_optional_primitives,
+ grpc=False,
+ )
+
+ pkg_writer.write_module_attributes()
+ pkg_writer.write_enums(
+ fd.enum_type, "", [d.FileDescriptorProto.ENUM_TYPE_FIELD_NUMBER]
+ )
+ pkg_writer.write_messages(
+ fd.message_type, "", [d.FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER]
+ )
+ pkg_writer.write_extensions(
+ fd.extension, [d.FileDescriptorProto.EXTENSION_FIELD_NUMBER]
+ )
+ if fd.options.py_generic_services:
+ pkg_writer.write_services(
+ fd.service, [d.FileDescriptorProto.SERVICE_FIELD_NUMBER]
+ )
+
+ assert name == fd.name
+ assert fd.name.endswith(".proto")
+ output = response.file.add()
+ output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2.pyi"
+ output.content = HEADER + pkg_writer.write()
+
+
+def generate_mypy_grpc_stubs(
+ descriptors: Descriptors,
+ response: plugin_pb2.CodeGeneratorResponse,
+ quiet: bool,
+ readable_stubs: bool,
+ relax_strict_optional_primitives: bool,
+) -> None:
+ for name, fd in descriptors.to_generate.items():
+ pkg_writer = PkgWriter(
+ fd,
+ descriptors,
+ readable_stubs,
+ relax_strict_optional_primitives,
+ grpc=True,
+ )
+ pkg_writer.write_grpc_services(
+ fd.service, [d.FileDescriptorProto.SERVICE_FIELD_NUMBER]
+ )
+
+ assert name == fd.name
+ assert fd.name.endswith(".proto")
+ output = response.file.add()
+ output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2_grpc.pyi"
+ output.content = HEADER + pkg_writer.write()
+
+
+@contextmanager
+def code_generation() -> Iterator[
+ Tuple[plugin_pb2.CodeGeneratorRequest, plugin_pb2.CodeGeneratorResponse],
+]:
+ if len(sys.argv) > 1 and sys.argv[1] in ("-V", "--version"):
+ print("mypy-protobuf " + __version__)
+ sys.exit(0)
+
+ # Read request message from stdin
+ data = sys.stdin.buffer.read()
+
+ # Parse request
+ request = plugin_pb2.CodeGeneratorRequest()
+ request.ParseFromString(data)
+
+ # Create response
+ response = plugin_pb2.CodeGeneratorResponse()
+
+ # Declare support for optional proto3 fields
+ response.supported_features |= (
+ plugin_pb2.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL
+ )
+
+ yield request, response
+
+ # Serialise response message
+ output = response.SerializeToString()
+
+ # Write to stdout
+ sys.stdout.buffer.write(output)
+
+
+def main() -> None:
+ # Generate mypy
+ with code_generation() as (request, response):
+ generate_mypy_stubs(
+ Descriptors(request),
+ response,
+ "quiet" in request.parameter,
+ "readable_stubs" in request.parameter,
+ "relax_strict_optional_primitives" in request.parameter,
+ )
+
+
+def grpc() -> None:
+ # Generate grpc mypy
+ with code_generation() as (request, response):
+ generate_mypy_grpc_stubs(
+ Descriptors(request),
+ response,
+ "quiet" in request.parameter,
+ "readable_stubs" in request.parameter,
+ "relax_strict_optional_primitives" in request.parameter,
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/mypy-protobuf/patches/01-arcadia.patch b/contrib/python/mypy-protobuf/patches/01-arcadia.patch
new file mode 100644
index 00000000000..6f718593f33
--- /dev/null
+++ b/contrib/python/mypy-protobuf/patches/01-arcadia.patch
@@ -0,0 +1,20 @@
+--- contrib/python/mypy-protobuf/mypy_protobuf/main.py (index)
++++ contrib/python/mypy-protobuf/mypy_protobuf/main.py (working tree)
+@@ -989,8 +989,6 @@ def generate_mypy_stubs(
+ output = response.file.add()
+ output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2.pyi"
+ output.content = HEADER + pkg_writer.write()
+- if not quiet:
+- print("Writing mypy to", output.name, file=sys.stderr)
+
+
+ def generate_mypy_grpc_stubs(
+@@ -1017,8 +1015,6 @@ def generate_mypy_grpc_stubs(
+ output = response.file.add()
+ output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2_grpc.pyi"
+ output.content = HEADER + pkg_writer.write()
+- if not quiet:
+- print("Writing mypy to", output.name, file=sys.stderr)
+
+
+ @contextmanager
diff --git a/contrib/python/mypy-protobuf/ya.make b/contrib/python/mypy-protobuf/ya.make
new file mode 100644
index 00000000000..4f86fe6bf97
--- /dev/null
+++ b/contrib/python/mypy-protobuf/ya.make
@@ -0,0 +1,33 @@
+PY3_LIBRARY()
+
+OWNER(torkve g:python-contrib)
+
+VERSION(2.10)
+
+LICENSE(Apache-2.0)
+
+PEERDIR(
+ contrib/python/protobuf
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ mypy_protobuf/__init__.py
+ mypy_protobuf/extensions_pb2.py
+ mypy_protobuf/main.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/mypy-protobuf/
+ .dist-info/METADATA
+ .dist-info/entry_points.txt
+ .dist-info/top_level.txt
+)
+
+END()
+
+RECURSE(
+ bin
+)