aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/mypy-protobuf
diff options
context:
space:
mode:
authormonster <monster@ydb.tech>2022-07-07 14:41:37 +0300
committermonster <monster@ydb.tech>2022-07-07 14:41:37 +0300
commit06e5c21a835c0e923506c4ff27929f34e00761c2 (patch)
tree75efcbc6854ef9bd476eb8bf00cc5c900da436a2 /contrib/python/mypy-protobuf
parent03f024c4412e3aa613bb543cf1660176320ba8f4 (diff)
downloadydb-06e5c21a835c0e923506c4ff27929f34e00761c2.tar.gz
fix ya.make
Diffstat (limited to 'contrib/python/mypy-protobuf')
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/METADATA18
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/entry_points.txt4
-rw-r--r--contrib/python/mypy-protobuf/.dist-info/top_level.txt1
-rw-r--r--contrib/python/mypy-protobuf/LICENSE202
-rw-r--r--contrib/python/mypy-protobuf/README.md281
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/__init__.py0
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py34
-rw-r--r--contrib/python/mypy-protobuf/mypy_protobuf/main.py1086
-rw-r--r--contrib/python/mypy-protobuf/patches/01-arcadia.patch20
9 files changed, 0 insertions, 1646 deletions
diff --git a/contrib/python/mypy-protobuf/.dist-info/METADATA b/contrib/python/mypy-protobuf/.dist-info/METADATA
deleted file mode 100644
index 3d16b659353..00000000000
--- a/contrib/python/mypy-protobuf/.dist-info/METADATA
+++ /dev/null
@@ -1,18 +0,0 @@
-Metadata-Version: 2.1
-Name: mypy-protobuf
-Version: 3.2.0
-Summary: Generate mypy stub files from protobuf specs
-Home-page: https://github.com/dropbox/mypy-protobuf
-Author: Nipunn Koorapati
-Author-email: nipunn1313@gmail.com
-License: Apache License 2.0
-Download-URL: https://github.com/dropbox/mypy-protobuf/releases
-Keywords: mypy proto dropbox
-Platform: UNKNOWN
-Requires-Python: >=3.6
-License-File: LICENSE
-Requires-Dist: protobuf (>=3.19.3)
-Requires-Dist: types-protobuf (>=3.19.5)
-
-UNKNOWN
-
diff --git a/contrib/python/mypy-protobuf/.dist-info/entry_points.txt b/contrib/python/mypy-protobuf/.dist-info/entry_points.txt
deleted file mode 100644
index 9dbfc984183..00000000000
--- a/contrib/python/mypy-protobuf/.dist-info/entry_points.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-[console_scripts]
-protoc-gen-mypy = mypy_protobuf.main:main
-protoc-gen-mypy_grpc = mypy_protobuf.main:grpc
-
diff --git a/contrib/python/mypy-protobuf/.dist-info/top_level.txt b/contrib/python/mypy-protobuf/.dist-info/top_level.txt
deleted file mode 100644
index 677dcb137be..00000000000
--- a/contrib/python/mypy-protobuf/.dist-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-mypy_protobuf
diff --git a/contrib/python/mypy-protobuf/LICENSE b/contrib/python/mypy-protobuf/LICENSE
deleted file mode 100644
index bd760ffabda..00000000000
--- a/contrib/python/mypy-protobuf/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright (c) 2017 Dropbox, Inc.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/contrib/python/mypy-protobuf/README.md b/contrib/python/mypy-protobuf/README.md
deleted file mode 100644
index 19ecd5824aa..00000000000
--- a/contrib/python/mypy-protobuf/README.md
+++ /dev/null
@@ -1,281 +0,0 @@
-mypy-protobuf: Generate mypy stub files from protobuf specs
-
-[![CI](https://github.com/dropbox/mypy-protobuf/workflows/CI/badge.svg)](https://github.com/dropbox/mypy-protobuf/actions?query=branch%3Amain)
-[![pypi](https://img.shields.io/pypi/v/mypy-protobuf?logo=Pypi)](https://pypi.org/project/mypy-protobuf/)
-[![license](https://img.shields.io/github/license/dropbox/mypy-protobuf)](https://github.com/dropbox/mypy-protobuf/blob/main/LICENSE)
-===========================================================
-
-Starting in mypy-protobuf 3.0.0, only python3 targeting is supported
-2.10 is the last version of mypy-protobuf which supports targeting python 2.7.
-
-See [Changelog](CHANGELOG.md) for recent changes.
-
-## Requirements to run mypy-protobuf
-Earlier releases might work, but aren't tested
-- [protoc >= 3.19.3](https://github.com/protocolbuffers/protobuf/releases)
-- [python-protobuf >= 3.19.3](https://pypi.org/project/protobuf/) - matching protoc release
-- [python >= 3.7](https://www.python.org/downloads/source/) - for running mypy-protobuf plugin.
-
-## Requirements to run typecheckers on stubs generated by mypy-protobuf
-Earlier releases might work, but aren't tested
-- [mypy >= v0.931](https://pypi.org/project/mypy) or [pyright >= 1.1.206](https://github.com/microsoft/pyright)
-- [python-protobuf >= 3.19.3](https://pypi.org/project/protobuf/) - matching protoc release
-- [types-protobuf >= 3.19.5](https://pypi.org/project/types-protobuf/) - for stubs from the google.protobuf library
-
-### To run typecheckers on code generated with grpc plugin - you'll additionally need
-Earlier releases might work, but aren't tested
-- [grpcio>=1.40.0](https://pypi.org/project/grpcio/)
-- [grpcio-tools>=1.40.0](https://pypi.org/project/grpcio-tools/)
-- [grpc-stubs>=1.24.7](https://pypi.org/project/grpc-stubs/)
-
-Other configurations may work, but are not continuously tested currently.
-We would be open to expanding this list - file an issue on the issue tracker.
-
-## Installation
-
-The plugin can be installed with
-```
-pip3 install mypy-protobuf
-```
-To install unreleased
-```
-REV=main # or whichever unreleased git rev you'd like
-pip3 install git+https://github.com/dropbox/mypy-protobuf.git@$REV
-
-# For older (1.x) versions of mypy protobuf - you may need
-pip3 install git+https://github.com/dropbox/mypy-protobuf.git@$REV#subdirectory=python
-```
-
-In order to run mypy on the generated code, you'll need to install
-```
-pip3 install mypy>=0.910 types-protobuf>=0.1.14
-```
-
-# Usage
-
-On posix, protoc-gen-mypy is installed to python's executable bin. Assuming that's
-on your $PATH, you can run
-```
-protoc --python_out=output/location --mypy_out=output/location
-```
-Alternately, you can explicitly provide the path:
-```
-protoc --plugin=protoc-gen-mypy=path/to/protoc-gen-mypy --python_out=output/location --mypy_out=output/location
-```
-Check the version number with
-```
-> protoc-gen-mypy --version
-```
-
-## Implementation
-
-The implementation of the plugin is in `mypy_protobuf/main.py`, which installs to
-an executable protoc-gen-mypy. On windows it installs to `protoc-gen-mypy.exe`
-
-## Features
-
-See [Changelog](CHANGELOG.md) for full listing
-
-### Bring comments from .proto files to docstrings in .pyi files
-
-Comments in the .proto files on messages, fields, enums, enum variants, extensions, services, and methods
-will appear as docstrings in .pyi files. Useful in IDEs for showing completions with comments.
-
-### Types enum int values more strongly
-
-Enum int values produce stubs which wrap the int values in NewType
-```proto
-enum MyEnum {
- HELLO = 0;
- WORLD = 1;
-}
-```
-Will yield an [enum type wrapper](https://github.com/python/typeshed/blob/16ae4c61201cd8b96b8b22cdfb2ab9e89ba5bcf2/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi) whose methods type to `MyEnum.ValueType` (a `NewType(int)` rather than `int`.
-This allows mypy to catch bugs where the wrong enum value is being used.
-
-Calling code may be typed as follows.
-
-In python >= 3.7
-```python
-# May need [PEP 563](https://www.python.org/dev/peps/pep-0563/) to postpone evaluation of annotations
-# from __future__ import annotations # Not needed with python>=3.10 or protobuf>=3.20.0
-def f(x: MyEnum.ValueType):
- print(x)
-f(MyEnum.Value("HELLO"))
-```
-
-With protobuf <= 3.20.0, for usages of cast, the type of `x` must be quoted
-After protobuf >= 3.20.0 - `ValueType` exists in the python code and quotes aren't needed
-until [upstream protobuf](https://github.com/protocolbuffers/protobuf/pull/8182) includes `ValueType`
-```python
-cast('MyEnum.ValueType', x)
-```
-
-Similarly, for type aliases with protobuf < 3.20.0, you must either quote the type or hide it behind `TYPE_CHECKING`
-```python
-from typing import Tuple, TYPE_CHECKING
-HELLO = Tuple['MyEnum.ValueType', 'MyEnum.ValueType']
-if TYPE_CHECKING:
- HELLO = Tuple[MyEnum.ValueType, MyEnum.ValueType]
-```
-
-#### Enum int impl details
-
-mypy-protobuf autogenerates an instance of the EnumTypeWrapper as follows.
-
-```python
-class _MyEnum:
- ValueType = typing.NewType('ValueType', builtins.int)
- V: typing_extensions.TypeAlias = ValueType
-class _MyEnumEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_MyEnum.ValueType], builtins.type):
- DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
- HELLO: _MyEnum.ValueType # 0
- WORLD: _MyEnum.ValueType # 1
-class MyEnum(_MyEnum, metaclass=_MyEnumEnumTypeWrapper):
- pass
-
-HELLO: MyEnum.ValueType # 0
-WORLD: MyEnum.ValueType # 1
-```
-
-`_MyEnumEnumTypeWrapper` extends the EnumTypeWrapper to take/return MyEnum.ValueType rather than int
-`MyEnum` is an instance of the `EnumTypeWrapper`.
-- Use `_MyEnum` and of metaclass is an implementation detail to make MyEnum.ValueType a valid type w/o a circular dependency
-- `V` is supported as an alias of `ValueType` for backward compatibility
-
-
-
-### Supports generating type wrappers for fields and maps
-
-M.proto
-```proto
-message M {
- uint32 user_id = 1 [(mypy_protobuf.casttype)="mymod.UserId"];
- map<uint32, string> email_by_uid = 2 [
- (mypy_protobuf.keytype)="path/to/mymod.UserId",
- (mypy_protobuf.valuetype)="path/to/mymod.Email"
- ];
-}
-```
-mymod.py
-```python
-UserId = NewType("UserId", int)
-Email = NewType("Email", Text)
-```
-
-### `py_generic_services`
-If `py_generic_services` is set in your proto file, then mypy-protobuf will
-generate service stubs. If you want GRPC stubs instead - use the GRPC instructions.
-
-### `readable_stubs`
-If `readable_stubs` is set, mypy-protobuf will generate easier-to-read stubs. The downside
-to this approach - is that it's possible to generate stubs which do not pass mypy - particularly
-in the case of name collisions. mypy-protobuf defaults to generating stubs with fully qualified
-imports and mangled global-level identifiers to defend against name collisions between global
-identifiers and field names.
-
-If you're ok with this risk, try it out!
-```
-protoc --python_out=output/location --mypy_out=readable_stubs:output/location
-```
-
-### `relax_strict_optional_primitives`
-
-If you are using proto3, then primitives cannot be represented as NULL on the wire -
-only as their zero value. By default mypy-protobuf types message constructors to have
-non-nullable primitives (eg `int` instead of `Optional[int]`). python-protobuf itself will
-internally convert None -> zero value. If you intentionally want to use this behavior,
-set this flag! We recommend avoiding this, as it can lead to developer error - confusing
-NULL and 0 as distinct on the wire.
-However, it may be helpful when migrating existing proto2 code, where the distinction is meaningful
-
-```
-protoc --python_out=output/location --mypy_out=relax_strict_optional_primitives:output/location
-```
-
-### Output suppression
-To suppress output, you can run
-```
-protoc --python_out=output/location --mypy_out=quiet:output/location
-```
-
-### GRPC
-
-This plugin provides stubs generation for grpcio generated code.
-```
-protoc \
- --python_out=output/location \
- --mypy_out=output/location \
- --grpc_out=output/location \
- --mypy_grpc_out=output/location
-```
-
-Note that generated code for grpc will work only together with code for python and locations should be the same.
-If you need stubs for grpc internal code we suggest using this package https://github.com/shabbyrobe/grpc-stubs
-
-### Targeting python2 support
-
-mypy-protobuf's drops support for targeting python2 with version 3.0. If you still need python2 support -
-```
-python3 -m pip install mypy_protobuf==2.10
-protoc --python_out=output/location --mypy_out=output/location
-mypy --target-version=2.7 {files}
-```
-
-
-## Contributing
-Contributions to the implementation are welcome. Please run tests using `./run_test.sh`.
-Ensure code is formatted using black.
-```
-pip3 install black
-black .
-```
-
-## Contributors
-
-### Dropboxers
-- [@nipunn1313](https://github.com/nipunn1313)
-- [@dzbarsky](https://github.com/dzbarsky)
-- [@gvanrossum](https://github.com/gvanrossum)
-- [@peterlvilim](https://github.com/peterlvilim)
-- [@msullivan](https://github.com/msullivan)
-- [@bradenaw](https://github.com/bradenaw)
-- [@ilevkivskyi](https://github.com/ilevkivskyi)
-
-### Others
-- [@Ketouem](https://github.com/Ketouem)
-- [@nmiculinic](https://github.com/nmiculinic)
-- [@onto](https://github.com/onto)
-- [@jcppkkk](https://github.com/jcppkkk)
-- [@drather19](https://github.com/drather19)
-- [@smessmer](https://github.com/smessmer)
-- [@pcorpet](https://github.com/pcorpet)
-- [@zozoens31](https://github.com/zozoens31)
-- [@abhishekrb19](https://github.com/abhishekrb19)
-- [@jaens](https://github.com/jaens)
-- [@arussellsaw](https://github.com/arussellsaw)
-- [@shabbyrobe](https://github.com/shabbyrobe)
-- [@reorx](https://github.com/reorx)
-- [@zifter](https://github.com/zifter)
-- [@juzna](https://github.com/juzna)
-- [@mikolajz](https://github.com/mikolajz)
-- [@chadrik](https://github.com/chadrik)
-- [@EPronovost](https://github.com/EPronovost)
-- [@chrislawlor](https://github.com/chrislawlor)
-- [@henribru](https://github.com/henribru)
-- [@Evgenus](https://github.com/Evgenus)
-- [@MHDante](https://github.com/MHDante)
-- [@nelfin](https://github.com/nelfin)
-- [@alkasm](https://github.com/alkasm)
-- [@tarmath](https://github.com/tarmath)
-- [@jaredkhan](https://github.com/jaredkhan)
-- [@sodul](https://github.com/sodul)
-
-Licence etc.
-------------
-
-1. License: Apache 2.0.
-2. Copyright attribution: Copyright (c) 2017 Dropbox, Inc.
-3. External contributions to the project should be subject to
- Dropbox's Contributor License Agreement (CLA):
- https://opensource.dropbox.com/cla/
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py b/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py
deleted file mode 100644
index e69de29bb2d..00000000000
--- a/contrib/python/mypy-protobuf/mypy_protobuf/__init__.py
+++ /dev/null
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py b/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
deleted file mode 100644
index 0ee46207804..00000000000
--- a/contrib/python/mypy-protobuf/mypy_protobuf/extensions_pb2.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: mypy_protobuf/extensions.proto
-"""Generated protocol buffer code."""
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import descriptor_pool as _descriptor_pool
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
-
-
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1emypy_protobuf/extensions.proto\x12\rmypy_protobuf\x1a google/protobuf/descriptor.proto:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xe0\xd4\x03 \x01(\t:0\n\x07keytype\x12\x1d.google.protobuf.FieldOptions\x18\xe2\xd4\x03 \x01(\t:2\n\tvaluetype\x12\x1d.google.protobuf.FieldOptions\x18\xe3\xd4\x03 \x01(\t')
-
-
-CASTTYPE_FIELD_NUMBER = 60000
-casttype = DESCRIPTOR.extensions_by_name['casttype']
-KEYTYPE_FIELD_NUMBER = 60002
-keytype = DESCRIPTOR.extensions_by_name['keytype']
-VALUETYPE_FIELD_NUMBER = 60003
-valuetype = DESCRIPTOR.extensions_by_name['valuetype']
-
-if _descriptor._USE_C_DESCRIPTORS == False:
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype)
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(keytype)
- google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(valuetype)
-
- DESCRIPTOR._options = None
-# @@protoc_insertion_point(module_scope)
diff --git a/contrib/python/mypy-protobuf/mypy_protobuf/main.py b/contrib/python/mypy-protobuf/mypy_protobuf/main.py
deleted file mode 100644
index 6e825d82801..00000000000
--- a/contrib/python/mypy-protobuf/mypy_protobuf/main.py
+++ /dev/null
@@ -1,1086 +0,0 @@
-#!/usr/bin/env python
-"""Protoc Plugin to generate mypy stubs. Loosely based on @zbarsky's go implementation"""
-import os
-
-import sys
-from collections import defaultdict
-from contextlib import contextmanager
-from functools import wraps
-from typing import (
- Any,
- Callable,
- Dict,
- Iterable,
- Iterator,
- List,
- Optional,
- Set,
- Sequence,
- Tuple,
-)
-
-import google.protobuf.descriptor_pb2 as d
-from google.protobuf.compiler import plugin_pb2 as plugin_pb2
-from google.protobuf.internal.containers import RepeatedCompositeFieldContainer
-from google.protobuf.internal.well_known_types import WKTBASES
-from . import extensions_pb2
-
-__version__ = "3.2.0"
-
-# SourceCodeLocation is defined by `message Location` here
-# https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto
-SourceCodeLocation = List[int]
-
-# So phabricator doesn't think mypy_protobuf.py is generated
-GENERATED = "@ge" + "nerated"
-HEADER = f"""\"\"\"
-{GENERATED} by mypy-protobuf. Do not edit manually!
-isort:skip_file
-\"\"\"
-"""
-
-# See https://github.com/dropbox/mypy-protobuf/issues/73 for details
-PYTHON_RESERVED = {
- "False",
- "None",
- "True",
- "and",
- "as",
- "async",
- "await",
- "assert",
- "break",
- "class",
- "continue",
- "def",
- "del",
- "elif",
- "else",
- "except",
- "finally",
- "for",
- "from",
- "global",
- "if",
- "import",
- "in",
- "is",
- "lambda",
- "nonlocal",
- "not",
- "or",
- "pass",
- "raise",
- "return",
- "try",
- "while",
- "with",
- "yield",
-}
-
-PROTO_ENUM_RESERVED = {
- "Name",
- "Value",
- "keys",
- "values",
- "items",
-}
-
-
-def _mangle_global_identifier(name: str) -> str:
- """
- Module level identifiers are mangled and aliased so that they can be disambiguated
- from fields/enum variants with the same name within the file.
-
- Eg:
- Enum variant `Name` or message field `Name` might conflict with a top level
- message or enum named `Name`, so mangle it with a global___ prefix for
- internal references. Note that this doesn't affect inner enums/messages
- because they get fuly qualified when referenced within a file"""
- return f"global___{name}"
-
-
-class Descriptors(object):
- def __init__(self, request: plugin_pb2.CodeGeneratorRequest) -> None:
- files = {f.name: f for f in request.proto_file}
- to_generate = {n: files[n] for n in request.file_to_generate}
- self.files: Dict[str, d.FileDescriptorProto] = files
- self.to_generate: Dict[str, d.FileDescriptorProto] = to_generate
- self.messages: Dict[str, d.DescriptorProto] = {}
- self.message_to_fd: Dict[str, d.FileDescriptorProto] = {}
-
- def _add_enums(
- enums: "RepeatedCompositeFieldContainer[d.EnumDescriptorProto]",
- prefix: str,
- _fd: d.FileDescriptorProto,
- ) -> None:
- for enum in enums:
- self.message_to_fd[prefix + enum.name] = _fd
- self.message_to_fd[prefix + enum.name + ".ValueType"] = _fd
-
- def _add_messages(
- messages: "RepeatedCompositeFieldContainer[d.DescriptorProto]",
- prefix: str,
- _fd: d.FileDescriptorProto,
- ) -> None:
- for message in messages:
- self.messages[prefix + message.name] = message
- self.message_to_fd[prefix + message.name] = _fd
- sub_prefix = prefix + message.name + "."
- _add_messages(message.nested_type, sub_prefix, _fd)
- _add_enums(message.enum_type, sub_prefix, _fd)
-
- for fd in request.proto_file:
- start_prefix = "." + fd.package + "." if fd.package else "."
- _add_messages(fd.message_type, start_prefix, fd)
- _add_enums(fd.enum_type, start_prefix, fd)
-
-
-class PkgWriter(object):
- """Writes a single pyi file"""
-
- def __init__(
- self,
- fd: d.FileDescriptorProto,
- descriptors: Descriptors,
- readable_stubs: bool,
- relax_strict_optional_primitives: bool,
- grpc: bool,
- ) -> None:
- self.fd = fd
- self.descriptors = descriptors
- self.readable_stubs = readable_stubs
- self.relax_strict_optional_primitives = relax_strict_optional_primitives
- self.grpc = grpc
- self.lines: List[str] = []
- self.indent = ""
-
- # Set of {x}, where {x} corresponds to to `import {x}`
- self.imports: Set[str] = set()
- # dictionary of x->(y,z) for `from {x} import {y} as {z}`
- # if {z} is None, then it shortens to `from {x} import {y}`
- self.from_imports: Dict[str, Set[Tuple[str, Optional[str]]]] = defaultdict(set)
-
- # Comments
- self.source_code_info_by_scl = {
- tuple(location.path): location for location in fd.source_code_info.location
- }
-
- def _import(self, path: str, name: str) -> str:
- """Imports a stdlib path and returns a handle to it
- eg. self._import("typing", "Optional") -> "Optional"
- """
- imp = path.replace("/", ".")
- if self.readable_stubs:
- self.from_imports[imp].add((name, None))
- return name
- else:
- self.imports.add(imp)
- return imp + "." + name
-
- def _import_message(self, name: str) -> str:
- """Import a referenced message and return a handle"""
- message_fd = self.descriptors.message_to_fd[name]
- assert message_fd.name.endswith(".proto")
-
- # Strip off package name
- if message_fd.package:
- assert name.startswith("." + message_fd.package + ".")
- name = name[len("." + message_fd.package + ".") :]
- else:
- assert name.startswith(".")
- name = name[1:]
-
- # Use prepended "_r_" to disambiguate message names that alias python reserved keywords
- split = name.split(".")
- for i, part in enumerate(split):
- if part in PYTHON_RESERVED:
- split[i] = "_r_" + part
- name = ".".join(split)
-
- # Message defined in this file. Note: GRPC stubs in same .proto are generated into separate files
- if not self.grpc and message_fd.name == self.fd.name:
- return name if self.readable_stubs else _mangle_global_identifier(name)
-
- # Not in file. Must import
- # Python generated code ignores proto packages, so the only relevant factor is
- # whether it is in the file or not.
- import_name = self._import(
- message_fd.name[:-6].replace("-", "_") + "_pb2", split[0]
- )
-
- remains = ".".join(split[1:])
- if not remains:
- return import_name
-
- # remains could either be a direct import of a nested enum or message
- # from another package.
- return import_name + "." + remains
-
- def _builtin(self, name: str) -> str:
- return self._import("builtins", name)
-
- @contextmanager
- def _indent(self) -> Iterator[None]:
- self.indent = self.indent + " "
- yield
- self.indent = self.indent[:-4]
-
- def _write_line(self, line: str, *args: Any) -> None:
- if args:
- line = line.format(*args)
- if line == "":
- self.lines.append(line)
- else:
- self.lines.append(self.indent + line)
-
- def _break_text(self, text_block: str) -> List[str]:
- if text_block == "":
- return []
- return [
- l[1:] if l.startswith(" ") else l for l in text_block.rstrip().split("\n")
- ]
-
- def _has_comments(self, scl: SourceCodeLocation) -> bool:
- sci_loc = self.source_code_info_by_scl.get(tuple(scl))
- return sci_loc is not None and bool(
- sci_loc.leading_detached_comments
- or sci_loc.leading_comments
- or sci_loc.trailing_comments
- )
-
- def _write_comments(self, scl: SourceCodeLocation) -> bool:
- """Return true if any comments were written"""
- if not self._has_comments(scl):
- return False
-
- sci_loc = self.source_code_info_by_scl.get(tuple(scl))
- assert sci_loc is not None
-
- lines = []
- for leading_detached_comment in sci_loc.leading_detached_comments:
- lines.extend(self._break_text(leading_detached_comment))
- lines.append("")
- if sci_loc.leading_comments is not None:
- lines.extend(self._break_text(sci_loc.leading_comments))
- # Trailing comments also go in the header - to make sure it gets into the docstring
- if sci_loc.trailing_comments is not None:
- lines.extend(self._break_text(sci_loc.trailing_comments))
-
- lines = [
- # Escape triple-quotes that would otherwise end the docstring early.
- line.replace("\\", "\\\\").replace('"""', r"\"\"\"")
- for line in lines
- ]
- if len(lines) == 1:
- line = lines[0]
- if line.endswith(('"', "\\")):
- # Docstrings are terminated with triple-quotes, so if the documentation itself ends in a quote,
- # insert some whitespace to separate it from the closing quotes.
- # This is not necessary with multiline comments
- # because in that case we always insert a newline before the trailing triple-quotes.
- line = line + " "
- self._write_line(f'"""{line}"""')
- else:
- for i, line in enumerate(lines):
- if i == 0:
- self._write_line(f'"""{line}')
- else:
- self._write_line(f"{line}")
- self._write_line('"""')
-
- return True
-
- def write_enum_values(
- self,
- values: Iterable[Tuple[int, d.EnumValueDescriptorProto]],
- value_type: str,
- scl_prefix: SourceCodeLocation,
- ) -> None:
- for i, val in values:
- if val.name in PYTHON_RESERVED:
- continue
-
- scl = scl_prefix + [i]
- self._write_line(
- f"{val.name}: {value_type} # {val.number}",
- )
- if self._write_comments(scl):
- self._write_line("") # Extra newline to separate
-
- def write_module_attributes(self) -> None:
- l = self._write_line
- fd_type = self._import("google.protobuf.descriptor", "FileDescriptor")
- l(f"DESCRIPTOR: {fd_type}")
- l("")
-
- def write_enums(
- self,
- enums: Iterable[d.EnumDescriptorProto],
- prefix: str,
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
- for i, enum in enumerate(enums):
- class_name = (
- enum.name if enum.name not in PYTHON_RESERVED else "_r_" + enum.name
- )
- value_type_fq = prefix + class_name + ".ValueType"
- enum_helper_class = "_" + enum.name
- value_type_helper_fq = prefix + enum_helper_class + ".ValueType"
- etw_helper_class = "_" + enum.name + "EnumTypeWrapper"
- scl = scl_prefix + [i]
-
- l(f"class {enum_helper_class}:")
- with self._indent():
- l(
- "ValueType = {}('ValueType', {})",
- self._import("typing", "NewType"),
- self._builtin("int"),
- )
- # Alias to the classic shorter definition "V"
- l("V: {} = ValueType", self._import("typing_extensions", "TypeAlias"))
- l(
- "class {}({}[{}], {}):",
- etw_helper_class,
- self._import(
- "google.protobuf.internal.enum_type_wrapper", "_EnumTypeWrapper"
- ),
- value_type_helper_fq,
- self._builtin("type"),
- )
- with self._indent():
- ed = self._import("google.protobuf.descriptor", "EnumDescriptor")
- l(f"DESCRIPTOR: {ed}")
- self.write_enum_values(
- [
- (i, v)
- for i, v in enumerate(enum.value)
- if v.name not in PROTO_ENUM_RESERVED
- ],
- value_type_helper_fq,
- scl + [d.EnumDescriptorProto.VALUE_FIELD_NUMBER],
- )
- l(f"class {class_name}({enum_helper_class}, metaclass={etw_helper_class}):")
- with self._indent():
- self._write_comments(scl)
- l("pass")
- l("")
-
- self.write_enum_values(
- enumerate(enum.value),
- value_type_fq,
- scl + [d.EnumDescriptorProto.VALUE_FIELD_NUMBER],
- )
- if prefix == "" and not self.readable_stubs:
- l(f"{_mangle_global_identifier(class_name)} = {class_name}")
- l("")
- l("")
-
- def write_messages(
- self,
- messages: Iterable[d.DescriptorProto],
- prefix: str,
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
-
- for i, desc in enumerate(messages):
- qualified_name = prefix + desc.name
-
- # Reproduce some hardcoded logic from the protobuf implementation - where
- # some specific "well_known_types" generated protos to have additional
- # base classes
- addl_base = ""
- if self.fd.package + "." + desc.name in WKTBASES:
- # chop off the .proto - and import the well known type
- # eg `from google.protobuf.duration import Duration`
- well_known_type = WKTBASES[self.fd.package + "." + desc.name]
- addl_base = ", " + self._import(
- "google.protobuf.internal.well_known_types",
- well_known_type.__name__,
- )
-
- class_name = (
- desc.name if desc.name not in PYTHON_RESERVED else "_r_" + desc.name
- )
- message_class = self._import("google.protobuf.message", "Message")
- l(f"class {class_name}({message_class}{addl_base}):")
- with self._indent():
- scl = scl_prefix + [i]
- self._write_comments(scl)
-
- desc_type = self._import("google.protobuf.descriptor", "Descriptor")
- l(f"DESCRIPTOR: {desc_type}")
-
- # Nested enums/messages
- self.write_enums(
- desc.enum_type,
- qualified_name + ".",
- scl + [d.DescriptorProto.ENUM_TYPE_FIELD_NUMBER],
- )
- self.write_messages(
- desc.nested_type,
- qualified_name + ".",
- scl + [d.DescriptorProto.NESTED_TYPE_FIELD_NUMBER],
- )
-
- # integer constants for field numbers
- for f in desc.field:
- l(f"{f.name.upper()}_FIELD_NUMBER: {self._builtin('int')}")
-
- for idx, field in enumerate(desc.field):
- if field.name in PYTHON_RESERVED:
- continue
- field_type = self.python_type(field)
-
- if (
- is_scalar(field)
- and field.label != d.FieldDescriptorProto.LABEL_REPEATED
- ):
- # Scalar non repeated fields are r/w
- l(f"{field.name}: {field_type}")
- if self._write_comments(
- scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx]
- ):
- l("")
- else:
- # r/o Getters for non-scalar fields and scalar-repeated fields
- scl_field = scl + [d.DescriptorProto.FIELD_FIELD_NUMBER, idx]
- l("@property")
- body = " ..." if not self._has_comments(scl_field) else ""
- l(f"def {field.name}(self) -> {field_type}:{body}")
- if self._has_comments(scl_field):
- with self._indent():
- self._write_comments(scl_field)
- l("pass")
-
- self.write_extensions(
- desc.extension, scl + [d.DescriptorProto.EXTENSION_FIELD_NUMBER]
- )
-
- # Constructor
- if any(f.name == "self" for f in desc.field):
- l("# pyright: reportSelfClsParameterName=false")
- l(f"def __init__(self_,")
- else:
- l(f"def __init__(self,")
- with self._indent():
- constructor_fields = [
- f for f in desc.field if f.name not in PYTHON_RESERVED
- ]
- if len(constructor_fields) > 0:
- # Only positional args allowed
- # See https://github.com/dropbox/mypy-protobuf/issues/71
- l("*,")
- for field in constructor_fields:
- field_type = self.python_type(field, generic_container=True)
- if (
- self.fd.syntax == "proto3"
- and is_scalar(field)
- and field.label != d.FieldDescriptorProto.LABEL_REPEATED
- and not self.relax_strict_optional_primitives
- and not field.proto3_optional
- ):
- l(f"{field.name}: {field_type} = ...,")
- else:
- opt = self._import("typing", "Optional")
- l(f"{field.name}: {opt}[{field_type}] = ...,")
- l(") -> None: ...")
-
- self.write_stringly_typed_fields(desc)
-
- if prefix == "" and not self.readable_stubs:
- l(f"{_mangle_global_identifier(class_name)} = {class_name}")
- l("")
-
- def write_stringly_typed_fields(self, desc: d.DescriptorProto) -> None:
- """Type the stringly-typed methods as a Union[Literal, Literal ...]"""
- l = self._write_line
- # HasField, ClearField, WhichOneof accepts both bytes/str
- # HasField only supports singular. ClearField supports repeated as well
- # In proto3, HasField only supports message fields and optional fields
- # HasField always supports oneof fields
- hf_fields = [
- f.name
- for f in desc.field
- if f.HasField("oneof_index")
- or (
- f.label != d.FieldDescriptorProto.LABEL_REPEATED
- and (
- self.fd.syntax != "proto3"
- or f.type == d.FieldDescriptorProto.TYPE_MESSAGE
- or f.proto3_optional
- )
- )
- ]
- cf_fields = [f.name for f in desc.field]
- wo_fields = {
- oneof.name: [
- f.name
- for f in desc.field
- if f.HasField("oneof_index") and f.oneof_index == idx
- ]
- for idx, oneof in enumerate(desc.oneof_decl)
- }
-
- hf_fields.extend(wo_fields.keys())
- cf_fields.extend(wo_fields.keys())
-
- hf_fields_text = ",".join(sorted(f'"{name}",b"{name}"' for name in hf_fields))
- cf_fields_text = ",".join(sorted(f'"{name}",b"{name}"' for name in cf_fields))
-
- if not hf_fields and not cf_fields and not wo_fields:
- return
-
- if hf_fields:
- l(
- "def HasField(self, field_name: {}[{}]) -> {}: ...",
- self._import("typing_extensions", "Literal"),
- hf_fields_text,
- self._builtin("bool"),
- )
- if cf_fields:
- l(
- "def ClearField(self, field_name: {}[{}]) -> None: ...",
- self._import("typing_extensions", "Literal"),
- cf_fields_text,
- )
-
- for wo_field, members in sorted(wo_fields.items()):
- if len(wo_fields) > 1:
- l("@{}", self._import("typing", "overload"))
- l(
- "def WhichOneof(self, oneof_group: {}[{}]) -> {}[{}[{}]]: ...",
- self._import("typing_extensions", "Literal"),
- # Accepts both str and bytes
- f'"{wo_field}",b"{wo_field}"',
- self._import("typing", "Optional"),
- self._import("typing_extensions", "Literal"),
- # Returns `str`
- ",".join(f'"{m}"' for m in members),
- )
-
- def write_extensions(
- self,
- extensions: Sequence[d.FieldDescriptorProto],
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
-
- for ext in extensions:
- l(f"{ext.name.upper()}_FIELD_NUMBER: {self._builtin('int')}")
-
- for i, ext in enumerate(extensions):
- scl = scl_prefix + [i]
-
- l(
- "{}: {}[{}, {}]",
- ext.name,
- self._import(
- "google.protobuf.internal.extension_dict",
- "_ExtensionFieldDescriptor",
- ),
- self._import_message(ext.extendee),
- self.python_type(ext),
- )
- self._write_comments(scl)
- l("")
-
- def write_methods(
- self,
- service: d.ServiceDescriptorProto,
- class_name: str,
- is_abstract: bool,
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
- l(
- "DESCRIPTOR: {}",
- self._import("google.protobuf.descriptor", "ServiceDescriptor"),
- )
- methods = [
- (i, m)
- for i, m in enumerate(service.method)
- if m.name not in PYTHON_RESERVED
- ]
- if not methods:
- l("pass")
- for i, method in methods:
- if is_abstract:
- l("@{}", self._import("abc", "abstractmethod"))
- l(f"def {method.name}(")
- with self._indent():
- l(f"inst: {class_name},")
- l(
- "rpc_controller: {},",
- self._import("google.protobuf.service", "RpcController"),
- )
- l("request: {},", self._import_message(method.input_type))
- l(
- "callback: {}[{}[[{}], None]]{},",
- self._import("typing", "Optional"),
- self._import("typing", "Callable"),
- self._import_message(method.output_type),
- "" if is_abstract else " = None",
- )
-
- scl_method = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
- l(
- ") -> {}[{}]:{}",
- self._import("concurrent.futures", "Future"),
- self._import_message(method.output_type),
- " ..." if not self._has_comments(scl_method) else "",
- )
- if self._has_comments(scl_method):
- with self._indent():
- self._write_comments(scl_method)
- l("pass")
-
- def write_services(
- self,
- services: Iterable[d.ServiceDescriptorProto],
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
- for i, service in enumerate(services):
- scl = scl_prefix + [i]
- class_name = (
- service.name
- if service.name not in PYTHON_RESERVED
- else "_r_" + service.name
- )
- # The service definition interface
- l(
- "class {}({}, metaclass={}):",
- class_name,
- self._import("google.protobuf.service", "Service"),
- self._import("abc", "ABCMeta"),
- )
- with self._indent():
- self._write_comments(scl)
- self.write_methods(
- service, class_name, is_abstract=True, scl_prefix=scl
- )
-
- # The stub client
- stub_class_name = service.name + "_Stub"
- l("class {}({}):", stub_class_name, class_name)
- with self._indent():
- self._write_comments(scl)
- l(
- "def __init__(self, rpc_channel: {}) -> None: ...",
- self._import("google.protobuf.service", "RpcChannel"),
- )
- self.write_methods(
- service, stub_class_name, is_abstract=False, scl_prefix=scl
- )
-
- def _import_casttype(self, casttype: str) -> str:
- split = casttype.split(".")
- assert (
- len(split) == 2
- ), "mypy_protobuf.[casttype,keytype,valuetype] is expected to be of format path/to/file.TypeInFile"
- pkg = split[0].replace("/", ".")
- return self._import(pkg, split[1])
-
- def _map_key_value_types(
- self,
- map_field: d.FieldDescriptorProto,
- key_field: d.FieldDescriptorProto,
- value_field: d.FieldDescriptorProto,
- ) -> Tuple[str, str]:
- key_casttype = map_field.options.Extensions[extensions_pb2.keytype]
- ktype = (
- self._import_casttype(key_casttype)
- if key_casttype
- else self.python_type(key_field)
- )
- value_casttype = map_field.options.Extensions[extensions_pb2.valuetype]
- vtype = (
- self._import_casttype(value_casttype)
- if value_casttype
- else self.python_type(value_field)
- )
- return ktype, vtype
-
- def _callable_type(self, method: d.MethodDescriptorProto) -> str:
- if method.client_streaming:
- if method.server_streaming:
- return self._import("grpc", "StreamStreamMultiCallable")
- else:
- return self._import("grpc", "StreamUnaryMultiCallable")
- else:
- if method.server_streaming:
- return self._import("grpc", "UnaryStreamMultiCallable")
- else:
- return self._import("grpc", "UnaryUnaryMultiCallable")
-
- def _input_type(
- self, method: d.MethodDescriptorProto, use_stream_iterator: bool = True
- ) -> str:
- result = self._import_message(method.input_type)
- if use_stream_iterator and method.client_streaming:
- result = f"{self._import('typing', 'Iterator')}[{result}]"
- return result
-
- def _output_type(
- self, method: d.MethodDescriptorProto, use_stream_iterator: bool = True
- ) -> str:
- result = self._import_message(method.output_type)
- if use_stream_iterator and method.server_streaming:
- result = f"{self._import('typing', 'Iterator')}[{result}]"
- return result
-
- def write_grpc_methods(
- self, service: d.ServiceDescriptorProto, scl_prefix: SourceCodeLocation
- ) -> None:
- l = self._write_line
- methods = [
- (i, m)
- for i, m in enumerate(service.method)
- if m.name not in PYTHON_RESERVED
- ]
- if not methods:
- l("pass")
- l("")
- for i, method in methods:
- scl = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
-
- l("@{}", self._import("abc", "abstractmethod"))
- l("def {}(self,", method.name)
- with self._indent():
- input_name = (
- "request_iterator" if method.client_streaming else "request"
- )
- input_type = self._input_type(method)
- l(f"{input_name}: {input_type},")
- l("context: {},", self._import("grpc", "ServicerContext"))
- l(
- ") -> {}:{}",
- self._output_type(method),
- " ..." if not self._has_comments(scl) else "",
- ),
- if self._has_comments(scl):
- with self._indent():
- self._write_comments(scl)
- l("pass")
- l("")
-
- def write_grpc_stub_methods(
- self, service: d.ServiceDescriptorProto, scl_prefix: SourceCodeLocation
- ) -> None:
- l = self._write_line
- methods = [
- (i, m)
- for i, m in enumerate(service.method)
- if m.name not in PYTHON_RESERVED
- ]
- if not methods:
- l("pass")
- l("")
- for i, method in methods:
- scl = scl_prefix + [d.ServiceDescriptorProto.METHOD_FIELD_NUMBER, i]
-
- l("{}: {}[", method.name, self._callable_type(method))
- with self._indent():
- l("{},", self._input_type(method, False))
- l("{}]", self._output_type(method, False))
- self._write_comments(scl)
- l("")
-
- def write_grpc_services(
- self,
- services: Iterable[d.ServiceDescriptorProto],
- scl_prefix: SourceCodeLocation,
- ) -> None:
- l = self._write_line
- for i, service in enumerate(services):
- if service.name in PYTHON_RESERVED:
- continue
-
- scl = scl_prefix + [i]
-
- # The stub client
- l(f"class {service.name}Stub:")
- with self._indent():
- self._write_comments(scl)
- l(
- "def __init__(self, channel: {}) -> None: ...",
- self._import("grpc", "Channel"),
- )
- self.write_grpc_stub_methods(service, scl)
- l("")
-
- # The service definition interface
- l(
- "class {}Servicer(metaclass={}):",
- service.name,
- self._import("abc", "ABCMeta"),
- )
- with self._indent():
- self._write_comments(scl)
- self.write_grpc_methods(service, scl)
- l("")
- l(
- "def add_{}Servicer_to_server(servicer: {}Servicer, server: {}) -> None: ...",
- service.name,
- service.name,
- self._import("grpc", "Server"),
- )
- l("")
-
- def python_type(
- self, field: d.FieldDescriptorProto, generic_container: bool = False
- ) -> str:
- """
- generic_container
- if set, type the field with generic interfaces. Eg.
- - Iterable[int] rather than RepeatedScalarFieldContainer[int]
- - Mapping[k, v] rather than MessageMap[k, v]
- Can be useful for input types (eg constructor)
- """
- casttype = field.options.Extensions[extensions_pb2.casttype]
- if casttype:
- return self._import_casttype(casttype)
-
- mapping: Dict[d.FieldDescriptorProto.Type.V, Callable[[], str]] = {
- d.FieldDescriptorProto.TYPE_DOUBLE: lambda: self._builtin("float"),
- d.FieldDescriptorProto.TYPE_FLOAT: lambda: self._builtin("float"),
- d.FieldDescriptorProto.TYPE_INT64: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_UINT64: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_FIXED64: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_SFIXED64: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_SINT64: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_INT32: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_UINT32: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_FIXED32: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_SFIXED32: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_SINT32: lambda: self._builtin("int"),
- d.FieldDescriptorProto.TYPE_BOOL: lambda: self._builtin("bool"),
- d.FieldDescriptorProto.TYPE_STRING: lambda: self._import("typing", "Text"),
- d.FieldDescriptorProto.TYPE_BYTES: lambda: self._builtin("bytes"),
- d.FieldDescriptorProto.TYPE_ENUM: lambda: self._import_message(
- field.type_name + ".ValueType"
- ),
- d.FieldDescriptorProto.TYPE_MESSAGE: lambda: self._import_message(
- field.type_name
- ),
- d.FieldDescriptorProto.TYPE_GROUP: lambda: self._import_message(
- field.type_name
- ),
- }
-
- assert field.type in mapping, "Unrecognized type: " + repr(field.type)
- field_type = mapping[field.type]()
-
- # For non-repeated fields, we're done!
- if field.label != d.FieldDescriptorProto.LABEL_REPEATED:
- return field_type
-
- # Scalar repeated fields go in RepeatedScalarFieldContainer
- if is_scalar(field):
- container = (
- self._import("typing", "Iterable")
- if generic_container
- else self._import(
- "google.protobuf.internal.containers",
- "RepeatedScalarFieldContainer",
- )
- )
- return f"{container}[{field_type}]"
-
- # non-scalar repeated map fields go in ScalarMap/MessageMap
- msg = self.descriptors.messages[field.type_name]
- if msg.options.map_entry:
- # map generates a special Entry wrapper message
- if generic_container:
- container = self._import("typing", "Mapping")
- elif is_scalar(msg.field[1]):
- container = self._import(
- "google.protobuf.internal.containers", "ScalarMap"
- )
- else:
- container = self._import(
- "google.protobuf.internal.containers", "MessageMap"
- )
- ktype, vtype = self._map_key_value_types(field, msg.field[0], msg.field[1])
- return f"{container}[{ktype}, {vtype}]"
-
- # non-scalar repetated fields go in RepeatedCompositeFieldContainer
- container = (
- self._import("typing", "Iterable")
- if generic_container
- else self._import(
- "google.protobuf.internal.containers",
- "RepeatedCompositeFieldContainer",
- )
- )
- return f"{container}[{field_type}]"
-
- def write(self) -> str:
- for reexport_idx in self.fd.public_dependency:
- reexport_file = self.fd.dependency[reexport_idx]
- reexport_fd = self.descriptors.files[reexport_file]
- reexport_imp = (
- reexport_file[:-6].replace("-", "_").replace("/", ".") + "_pb2"
- )
- names = (
- [m.name for m in reexport_fd.message_type]
- + [m.name for m in reexport_fd.enum_type]
- + [v.name for m in reexport_fd.enum_type for v in m.value]
- + [m.name for m in reexport_fd.extension]
- )
- if reexport_fd.options.py_generic_services:
- names.extend(m.name for m in reexport_fd.service)
-
- if names:
- # n,n to force a reexport (from x import y as y)
- self.from_imports[reexport_imp].update((n, n) for n in names)
-
- import_lines = []
- for pkg in sorted(self.imports):
- import_lines.append(f"import {pkg}")
-
- for pkg, items in sorted(self.from_imports.items()):
- import_lines.append(f"from {pkg} import (")
- for (name, reexport_name) in sorted(items):
- if reexport_name is None:
- import_lines.append(f" {name},")
- else:
- import_lines.append(f" {name} as {reexport_name},")
- import_lines.append(")\n")
- import_lines.append("")
-
- return "\n".join(import_lines + self.lines)
-
-
-def is_scalar(fd: d.FieldDescriptorProto) -> bool:
- return not (
- fd.type == d.FieldDescriptorProto.TYPE_MESSAGE
- or fd.type == d.FieldDescriptorProto.TYPE_GROUP
- )
-
-
-def generate_mypy_stubs(
- descriptors: Descriptors,
- response: plugin_pb2.CodeGeneratorResponse,
- quiet: bool,
- readable_stubs: bool,
- relax_strict_optional_primitives: bool,
-) -> None:
- for name, fd in descriptors.to_generate.items():
- pkg_writer = PkgWriter(
- fd,
- descriptors,
- readable_stubs,
- relax_strict_optional_primitives,
- grpc=False,
- )
-
- pkg_writer.write_module_attributes()
- pkg_writer.write_enums(
- fd.enum_type, "", [d.FileDescriptorProto.ENUM_TYPE_FIELD_NUMBER]
- )
- pkg_writer.write_messages(
- fd.message_type, "", [d.FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER]
- )
- pkg_writer.write_extensions(
- fd.extension, [d.FileDescriptorProto.EXTENSION_FIELD_NUMBER]
- )
- if fd.options.py_generic_services:
- pkg_writer.write_services(
- fd.service, [d.FileDescriptorProto.SERVICE_FIELD_NUMBER]
- )
-
- assert name == fd.name
- assert fd.name.endswith(".proto")
- output = response.file.add()
- output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2.pyi"
- output.content = HEADER + pkg_writer.write()
-
-
-def generate_mypy_grpc_stubs(
- descriptors: Descriptors,
- response: plugin_pb2.CodeGeneratorResponse,
- quiet: bool,
- readable_stubs: bool,
- relax_strict_optional_primitives: bool,
-) -> None:
- for name, fd in descriptors.to_generate.items():
- pkg_writer = PkgWriter(
- fd,
- descriptors,
- readable_stubs,
- relax_strict_optional_primitives,
- grpc=True,
- )
- pkg_writer.write_grpc_services(
- fd.service, [d.FileDescriptorProto.SERVICE_FIELD_NUMBER]
- )
-
- assert name == fd.name
- assert fd.name.endswith(".proto")
- output = response.file.add()
- output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2_grpc.pyi"
- output.content = HEADER + pkg_writer.write()
-
-
-@contextmanager
-def code_generation() -> Iterator[
- Tuple[plugin_pb2.CodeGeneratorRequest, plugin_pb2.CodeGeneratorResponse],
-]:
- if len(sys.argv) > 1 and sys.argv[1] in ("-V", "--version"):
- print("mypy-protobuf " + __version__)
- sys.exit(0)
-
- # Read request message from stdin
- data = sys.stdin.buffer.read()
-
- # Parse request
- request = plugin_pb2.CodeGeneratorRequest()
- request.ParseFromString(data)
-
- # Create response
- response = plugin_pb2.CodeGeneratorResponse()
-
- # Declare support for optional proto3 fields
- response.supported_features |= (
- plugin_pb2.CodeGeneratorResponse.FEATURE_PROTO3_OPTIONAL
- )
-
- yield request, response
-
- # Serialise response message
- output = response.SerializeToString()
-
- # Write to stdout
- sys.stdout.buffer.write(output)
-
-
-def main() -> None:
- # Generate mypy
- with code_generation() as (request, response):
- generate_mypy_stubs(
- Descriptors(request),
- response,
- "quiet" in request.parameter,
- "readable_stubs" in request.parameter,
- "relax_strict_optional_primitives" in request.parameter,
- )
-
-
-def grpc() -> None:
- # Generate grpc mypy
- with code_generation() as (request, response):
- generate_mypy_grpc_stubs(
- Descriptors(request),
- response,
- "quiet" in request.parameter,
- "readable_stubs" in request.parameter,
- "relax_strict_optional_primitives" in request.parameter,
- )
-
-
-if __name__ == "__main__":
- main()
diff --git a/contrib/python/mypy-protobuf/patches/01-arcadia.patch b/contrib/python/mypy-protobuf/patches/01-arcadia.patch
deleted file mode 100644
index 6f718593f33..00000000000
--- a/contrib/python/mypy-protobuf/patches/01-arcadia.patch
+++ /dev/null
@@ -1,20 +0,0 @@
---- contrib/python/mypy-protobuf/mypy_protobuf/main.py (index)
-+++ contrib/python/mypy-protobuf/mypy_protobuf/main.py (working tree)
-@@ -989,8 +989,6 @@ def generate_mypy_stubs(
- output = response.file.add()
- output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2.pyi"
- output.content = HEADER + pkg_writer.write()
-- if not quiet:
-- print("Writing mypy to", output.name, file=sys.stderr)
-
-
- def generate_mypy_grpc_stubs(
-@@ -1017,8 +1015,6 @@ def generate_mypy_grpc_stubs(
- output = response.file.add()
- output.name = fd.name[:-6].replace("-", "_").replace(".", "/") + "_pb2_grpc.pyi"
- output.content = HEADER + pkg_writer.write()
-- if not quiet:
-- print("Writing mypy to", output.name, file=sys.stderr)
-
-
- @contextmanager