aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authorAlexander Smirnov <alex@ydb.tech>2024-04-23 09:07:41 +0000
committerAlexander Smirnov <alex@ydb.tech>2024-04-23 09:07:41 +0000
commitb35b2344f47ddaef21fb74c7f5cad9cd91d3cb45 (patch)
treee7b382f5c6cce63ce1e160d51ad1aac846ba2905 /contrib/python
parentb3bee3aa6d7c8767695b8917484e6bb488e9c8ca (diff)
parentae5472d0928c374dc719b154c9dcb2be6e0a0695 (diff)
downloadydb-b35b2344f47ddaef21fb74c7f5cad9cd91d3cb45.tar.gz
Merge branch 'rightlib' into mergelibs-240423-0906
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/argcomplete/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/argcomplete/py3/argcomplete/packages/_argparse.py5
-rw-r--r--contrib/python/argcomplete/py3/ya.make2
-rw-r--r--contrib/python/clickhouse-connect/.dist-info/METADATA2
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/__version__.py2
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/cc_sqlalchemy/datatypes/base.py6
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/driver/client.py33
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/driver/httpclient.py50
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/driver/query.py10
-rw-r--r--contrib/python/clickhouse-connect/clickhouse_connect/tools/testing.py5
-rw-r--r--contrib/python/clickhouse-connect/ya.make2
-rw-r--r--contrib/python/fonttools/.dist-info/METADATA8
-rw-r--r--contrib/python/fonttools/fontTools/__init__.py2
-rw-r--r--contrib/python/fonttools/fontTools/ttLib/reorderGlyphs.py278
-rw-r--r--contrib/python/fonttools/fontTools/ttLib/tables/otConverters.py2
-rw-r--r--contrib/python/fonttools/fontTools/ttLib/ttFont.py5
-rw-r--r--contrib/python/fonttools/ya.make3
-rw-r--r--contrib/python/future/py2/.dist-info/METADATA25
-rw-r--r--contrib/python/future/py2/.dist-info/entry_points.txt1
-rw-r--r--contrib/python/future/py2/LICENSE.txt2
-rw-r--r--contrib/python/future/py2/README.rst51
-rw-r--r--contrib/python/future/py2/future/__init__.py19
-rw-r--r--contrib/python/future/py2/future/backports/datetime.py2
-rw-r--r--contrib/python/future/py2/future/backports/email/_header_value_parser.py2
-rw-r--r--contrib/python/future/py2/future/backports/email/parser.py4
-rw-r--r--contrib/python/future/py2/future/backports/http/cookiejar.py2
-rw-r--r--contrib/python/future/py2/future/backports/xmlrpc/client.py9
-rw-r--r--contrib/python/future/py2/future/builtins/__init__.py2
-rw-r--r--contrib/python/future/py2/future/moves/_dummy_thread.py11
-rw-r--r--contrib/python/future/py2/future/moves/multiprocessing.py7
-rw-r--r--contrib/python/future/py2/future/standard_library/__init__.py14
-rw-r--r--contrib/python/future/py2/future/types/newint.py8
-rw-r--r--contrib/python/future/py2/future/types/newrange.py2
-rw-r--r--contrib/python/future/py2/past/__init__.py4
-rw-r--r--contrib/python/future/py2/past/builtins/misc.py9
-rw-r--r--contrib/python/future/py2/ya.make3
-rw-r--r--contrib/python/hypothesis/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py23
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py10
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/internal/conjecture/pareto.py3
-rw-r--r--contrib/python/hypothesis/py3/hypothesis/version.py2
-rw-r--r--contrib/python/hypothesis/py3/ya.make2
-rw-r--r--contrib/python/parso/py3/.dist-info/METADATA14
-rw-r--r--contrib/python/parso/py3/parso/__init__.py2
-rw-r--r--contrib/python/parso/py3/parso/grammar.py10
-rw-r--r--contrib/python/parso/py3/parso/pgen2/generator.py2
-rw-r--r--contrib/python/parso/py3/parso/python/errors.py51
-rw-r--r--contrib/python/parso/py3/parso/python/grammar313.txt169
-rw-r--r--contrib/python/parso/py3/parso/python/tree.py3
-rw-r--r--contrib/python/parso/py3/tests/test_python_errors.py23
-rw-r--r--contrib/python/parso/py3/ya.make3
-rw-r--r--contrib/python/typing-extensions/py3/.dist-info/METADATA2
-rw-r--r--contrib/python/typing-extensions/py3/typing_extensions.py201
-rw-r--r--contrib/python/typing-extensions/py3/ya.make2
54 files changed, 924 insertions, 194 deletions
diff --git a/contrib/python/argcomplete/py3/.dist-info/METADATA b/contrib/python/argcomplete/py3/.dist-info/METADATA
index acf27c40a4e..ffefd85b8cd 100644
--- a/contrib/python/argcomplete/py3/.dist-info/METADATA
+++ b/contrib/python/argcomplete/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: argcomplete
-Version: 3.2.3
+Version: 3.3.0
Summary: Bash tab completion for argparse
Home-page: https://github.com/kislyuk/argcomplete
Author: Andrey Kislyuk
diff --git a/contrib/python/argcomplete/py3/argcomplete/packages/_argparse.py b/contrib/python/argcomplete/py3/argcomplete/packages/_argparse.py
index 7a8b4fc8213..2a064f9a8f7 100644
--- a/contrib/python/argcomplete/py3/argcomplete/packages/_argparse.py
+++ b/contrib/python/argcomplete/py3/argcomplete/packages/_argparse.py
@@ -162,7 +162,10 @@ class IntrospectiveArgumentParser(ArgumentParser):
def consume_optional(start_index):
# get the optional identified at this index
option_tuple = option_string_indices[start_index]
- action, option_string, explicit_arg = option_tuple
+ if len(option_tuple) == 3:
+ action, option_string, explicit_arg = option_tuple
+ else: # Python 3.11.9+, 3.12.3+, 3.13+
+ action, option_string, _, explicit_arg = option_tuple
# identify additional optionals in the same arg string
# (e.g. -xyz is the same as -x -y -z if no args are required)
diff --git a/contrib/python/argcomplete/py3/ya.make b/contrib/python/argcomplete/py3/ya.make
index 02e0bd87f85..9b867e4d8bc 100644
--- a/contrib/python/argcomplete/py3/ya.make
+++ b/contrib/python/argcomplete/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(3.2.3)
+VERSION(3.3.0)
LICENSE(Apache-2.0)
diff --git a/contrib/python/clickhouse-connect/.dist-info/METADATA b/contrib/python/clickhouse-connect/.dist-info/METADATA
index 914107f75a4..0d75e95b0e4 100644
--- a/contrib/python/clickhouse-connect/.dist-info/METADATA
+++ b/contrib/python/clickhouse-connect/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: clickhouse-connect
-Version: 0.7.6
+Version: 0.7.7
Summary: ClickHouse Database Core Driver for Python, Pandas, and Superset
Home-page: https://github.com/ClickHouse/clickhouse-connect
Author: ClickHouse Inc.
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/__version__.py b/contrib/python/clickhouse-connect/clickhouse_connect/__version__.py
index 4a603d90096..9e59d8f8e18 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/__version__.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/__version__.py
@@ -1 +1 @@
-version = '0.7.6'
+version = '0.7.7'
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/cc_sqlalchemy/datatypes/base.py b/contrib/python/clickhouse-connect/clickhouse_connect/cc_sqlalchemy/datatypes/base.py
index 14d60351f42..0c0d25e6b0a 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/cc_sqlalchemy/datatypes/base.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/cc_sqlalchemy/datatypes/base.py
@@ -5,7 +5,7 @@ from sqlalchemy.exc import CompileError
from clickhouse_connect.datatypes.base import ClickHouseType, TypeDef, EMPTY_TYPE_DEF
from clickhouse_connect.datatypes.registry import parse_name, type_map
-from clickhouse_connect.driver.query import format_query_value
+from clickhouse_connect.driver.query import str_query_value
logger = logging.getLogger(__name__)
@@ -96,12 +96,12 @@ class ChSqlaType:
method and should be able to ignore literal_processor definitions in the dialect, which are verbose and
confusing.
"""
- return format_query_value
+ return str_query_value
def _compiler_dispatch(self, _visitor, **_):
"""
Override for the SqlAlchemy TypeEngine _compiler_dispatch method to sidestep unnecessary layers and complexity
- when generating the type name. The underlying ClickHouseType generates the correct name
+ when generating the type name. The underlying ClickHouseType generates the correct name for the type
:return: Name generated by the underlying driver.
"""
return self.name
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/driver/client.py b/contrib/python/clickhouse-connect/clickhouse_connect/driver/client.py
index cf16ec24ece..3cce716372d 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/driver/client.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/driver/client.py
@@ -256,8 +256,7 @@ class Client(ABC):
settings: Optional[Dict[str, Any]] = None,
fmt: str = None,
use_database: bool = True,
- external_data: Optional[ExternalData] = None,
- stream: bool = False) -> Union[bytes, io.IOBase]:
+ external_data: Optional[ExternalData] = None) -> bytes:
"""
Query method that simply returns the raw ClickHouse format bytes
:param query: Query statement/format string
@@ -270,6 +269,25 @@ class Client(ABC):
:return: bytes representing raw ClickHouse return value based on format
"""
+ @abstractmethod
+ def raw_stream(self, query: str,
+ parameters: Optional[Union[Sequence, Dict[str, Any]]] = None,
+ settings: Optional[Dict[str, Any]] = None,
+ fmt: str = None,
+ use_database: bool = True,
+ external_data: Optional[ExternalData] = None) -> io.IOBase:
+ """
+ Query method that returns the result as an io.IOBase iterator
+ :param query: Query statement/format string
+ :param parameters: Optional dictionary used to format the query
+ :param settings: Optional dictionary of ClickHouse settings (key/string values)
+ :param fmt: ClickHouse output format
+ :param use_database Send the database parameter to ClickHouse so the command will be executed in the client
+ database context.
+ :param external_data External data to send with the query
+ :return: io.IOBase stream/iterator for the result
+ """
+
# pylint: disable=duplicate-code,too-many-arguments,unused-argument
def query_np(self,
query: Optional[str] = None,
@@ -487,12 +505,11 @@ class Client(ABC):
:return: Generator that yields a PyArrow.Table for per block representing the result set
"""
settings = self._update_arrow_settings(settings, use_strings)
- return to_arrow_batches(self.raw_query(query,
- parameters,
- settings,
- fmt='ArrowStream',
- external_data=external_data,
- stream=True))
+ return to_arrow_batches(self.raw_stream(query,
+ parameters,
+ settings,
+ fmt='ArrowStream',
+ external_data=external_data))
def _update_arrow_settings(self,
settings: Optional[Dict[str, Any]],
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/driver/httpclient.py b/contrib/python/clickhouse-connect/clickhouse_connect/driver/httpclient.py
index 1a35470b437..7202bc2ef51 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/driver/httpclient.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/driver/httpclient.py
@@ -1,3 +1,4 @@
+import io
import json
import logging
import re
@@ -436,27 +437,36 @@ class HttpClient(Client):
else:
self._error_handler(response)
- def ping(self):
- """
- See BaseClient doc_string for this method
- """
- try:
- response = self.http.request('GET', f'{self.url}/ping', timeout=3)
- return 200 <= response.status < 300
- except HTTPError:
- logger.debug('ping failed', exc_info=True)
- return False
-
def raw_query(self, query: str,
parameters: Optional[Union[Sequence, Dict[str, Any]]] = None,
settings: Optional[Dict[str, Any]] = None,
fmt: str = None,
use_database: bool = True,
- external_data: Optional[ExternalData] = None,
- stream: bool = False) -> Union[bytes, HTTPResponse]:
+ external_data: Optional[ExternalData] = None) -> bytes:
"""
See BaseClient doc_string for this method
"""
+ body, params, fields = self._prep_raw_query(query, parameters, settings, fmt, use_database, external_data)
+ return self._raw_request(body, params, fields=fields).data
+
+ def raw_stream(self, query: str,
+ parameters: Optional[Union[Sequence, Dict[str, Any]]] = None,
+ settings: Optional[Dict[str, Any]] = None,
+ fmt: str = None,
+ use_database: bool = True,
+ external_data: Optional[ExternalData] = None) -> io.IOBase:
+ """
+ See BaseClient doc_string for this method
+ """
+ body, params, fields = self._prep_raw_query(query, parameters, settings, fmt, use_database, external_data)
+ return self._raw_request(body, params, fields=fields, stream=True)
+
+ def _prep_raw_query(self, query: str,
+ parameters: Optional[Union[Sequence, Dict[str, Any]]],
+ settings: Optional[Dict[str, Any]],
+ fmt: str,
+ use_database: bool,
+ external_data: Optional[ExternalData]):
final_query, bind_params = bind_query(query, parameters, self.server_tz)
if fmt:
final_query += f'\n FORMAT {fmt}'
@@ -472,8 +482,18 @@ class HttpClient(Client):
else:
body = final_query
fields = None
- response = self._raw_request(body, params, fields=fields, stream=stream)
- return response if stream else response.data
+ return body, params, fields
+
+ def ping(self):
+ """
+ See BaseClient doc_string for this method
+ """
+ try:
+ response = self.http.request('GET', f'{self.url}/ping', timeout=3)
+ return 200 <= response.status < 300
+ except HTTPError:
+ logger.debug('ping failed', exc_info=True)
+ return False
def close(self):
if self._owns_pool_manager:
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/driver/query.py b/contrib/python/clickhouse-connect/clickhouse_connect/driver/query.py
index 549dfc37177..42957d8eb8d 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/driver/query.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/driver/query.py
@@ -399,13 +399,13 @@ def format_query_value(value: Any, server_tz: tzinfo = pytz.UTC):
if isinstance(value, date):
return f"'{value.isoformat()}'"
if isinstance(value, list):
- return f"[{', '.join(format_query_value(x, server_tz) for x in value)}]"
+ return f"[{', '.join(str_query_value(x, server_tz) for x in value)}]"
if isinstance(value, tuple):
- return f"({', '.join(format_query_value(x, server_tz) for x in value)})"
+ return f"({', '.join(str_query_value(x, server_tz) for x in value)})"
if isinstance(value, dict):
if common.get_setting('dict_parameter_format') == 'json':
return format_str(any_to_json(value).decode())
- pairs = [format_query_value(k, server_tz) + ':' + format_query_value(v, server_tz)
+ pairs = [str_query_value(k, server_tz) + ':' + str_query_value(v, server_tz)
for k, v in value.items()]
return f"{{{', '.join(pairs)}}}"
if isinstance(value, Enum):
@@ -415,6 +415,10 @@ def format_query_value(value: Any, server_tz: tzinfo = pytz.UTC):
return value
+def str_query_value(value: Any, server_tz: tzinfo = pytz.UTC):
+ return str(format_query_value(value, server_tz))
+
+
# pylint: disable=too-many-branches
def format_bind_value(value: Any, server_tz: tzinfo = pytz.UTC, top_level: bool = True):
"""
diff --git a/contrib/python/clickhouse-connect/clickhouse_connect/tools/testing.py b/contrib/python/clickhouse-connect/clickhouse_connect/tools/testing.py
index ef3f835e185..7084c71a406 100644
--- a/contrib/python/clickhouse-connect/clickhouse_connect/tools/testing.py
+++ b/contrib/python/clickhouse-connect/clickhouse_connect/tools/testing.py
@@ -1,7 +1,7 @@
from typing import Sequence, Optional, Union, Dict, Any
from clickhouse_connect.driver import Client
-from clickhouse_connect.driver.query import format_query_value, quote_identifier
+from clickhouse_connect.driver.query import quote_identifier, str_query_value
class TableContext:
@@ -44,8 +44,7 @@ class TableContext:
if self.settings:
create_cmd += ' SETTINGS '
for key, value in self.settings.items():
-
- create_cmd += f'{key} = {format_query_value(value)}, '
+ create_cmd += f'{key} = {str_query_value(value)}, '
if create_cmd.endswith(', '):
create_cmd = create_cmd[:-2]
self.client.command(create_cmd)
diff --git a/contrib/python/clickhouse-connect/ya.make b/contrib/python/clickhouse-connect/ya.make
index 9c9ea8d7dfc..c48e09768d4 100644
--- a/contrib/python/clickhouse-connect/ya.make
+++ b/contrib/python/clickhouse-connect/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(0.7.6)
+VERSION(0.7.7)
LICENSE(Apache-2.0)
diff --git a/contrib/python/fonttools/.dist-info/METADATA b/contrib/python/fonttools/.dist-info/METADATA
index b374ebc4999..60b6e6df808 100644
--- a/contrib/python/fonttools/.dist-info/METADATA
+++ b/contrib/python/fonttools/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: fonttools
-Version: 4.50.0
+Version: 4.51.0
Summary: Tools to manipulate font files
Home-page: http://github.com/fonttools/fonttools
Author: Just van Rossum
@@ -375,6 +375,12 @@ Have fun!
Changelog
~~~~~~~~~
+4.51.0 (released 2024-04-05)
+----------------------------
+
+- [ttLib] Optimization on loading aux fields (#3464).
+- [ttFont] Add reorderGlyphs (#3468).
+
4.50.0 (released 2024-03-15)
----------------------------
diff --git a/contrib/python/fonttools/fontTools/__init__.py b/contrib/python/fonttools/fontTools/__init__.py
index ead49e26c89..5621f391f9b 100644
--- a/contrib/python/fonttools/fontTools/__init__.py
+++ b/contrib/python/fonttools/fontTools/__init__.py
@@ -3,6 +3,6 @@ from fontTools.misc.loggingTools import configLogger
log = logging.getLogger(__name__)
-version = __version__ = "4.50.0"
+version = __version__ = "4.51.0"
__all__ = ["version", "log", "configLogger"]
diff --git a/contrib/python/fonttools/fontTools/ttLib/reorderGlyphs.py b/contrib/python/fonttools/fontTools/ttLib/reorderGlyphs.py
new file mode 100644
index 00000000000..3221261f16a
--- /dev/null
+++ b/contrib/python/fonttools/fontTools/ttLib/reorderGlyphs.py
@@ -0,0 +1,278 @@
+"""Reorder glyphs in a font."""
+
+__author__ = "Rod Sheeter"
+
+# See https://docs.google.com/document/d/1h9O-C_ndods87uY0QeIIcgAMiX2gDTpvO_IhMJsKAqs/
+# for details.
+
+
+from fontTools import ttLib
+from fontTools.ttLib.tables import otBase
+from fontTools.ttLib.tables import otTables as ot
+from abc import ABC, abstractmethod
+from dataclasses import dataclass
+from collections import deque
+from typing import (
+ Optional,
+ Any,
+ Callable,
+ Deque,
+ Iterable,
+ List,
+ NamedTuple,
+ Tuple,
+ Union,
+)
+
+
+_COVERAGE_ATTR = "Coverage" # tables that have one coverage use this name
+
+
+def _sort_by_gid(
+ get_glyph_id: Callable[[str], int],
+ glyphs: List[str],
+ parallel_list: Optional[List[Any]],
+):
+ if parallel_list:
+ reordered = sorted(
+ ((g, e) for g, e in zip(glyphs, parallel_list)),
+ key=lambda t: get_glyph_id(t[0]),
+ )
+ sorted_glyphs, sorted_parallel_list = map(list, zip(*reordered))
+ parallel_list[:] = sorted_parallel_list
+ else:
+ sorted_glyphs = sorted(glyphs, key=get_glyph_id)
+
+ glyphs[:] = sorted_glyphs
+
+
+def _get_dotted_attr(value: Any, dotted_attr: str) -> Any:
+ attr_names = dotted_attr.split(".")
+ assert attr_names
+
+ while attr_names:
+ attr_name = attr_names.pop(0)
+ value = getattr(value, attr_name)
+ return value
+
+
+class ReorderRule(ABC):
+ """A rule to reorder something in a font to match the fonts glyph order."""
+
+ @abstractmethod
+ def apply(self, font: ttLib.TTFont, value: otBase.BaseTable) -> None: ...
+
+
+@dataclass(frozen=True)
+class ReorderCoverage(ReorderRule):
+ """Reorder a Coverage table, and optionally a list that is sorted parallel to it."""
+
+ # A list that is parallel to Coverage
+ parallel_list_attr: Optional[str] = None
+ coverage_attr: str = _COVERAGE_ATTR
+
+ def apply(self, font: ttLib.TTFont, value: otBase.BaseTable) -> None:
+ coverage = _get_dotted_attr(value, self.coverage_attr)
+
+ if type(coverage) is not list:
+ # Normal path, process one coverage that might have a parallel list
+ parallel_list = None
+ if self.parallel_list_attr:
+ parallel_list = _get_dotted_attr(value, self.parallel_list_attr)
+ assert (
+ type(parallel_list) is list
+ ), f"{self.parallel_list_attr} should be a list"
+ assert len(parallel_list) == len(coverage.glyphs), "Nothing makes sense"
+
+ _sort_by_gid(font.getGlyphID, coverage.glyphs, parallel_list)
+
+ else:
+ # A few tables have a list of coverage. No parallel list can exist.
+ assert (
+ not self.parallel_list_attr
+ ), f"Can't have multiple coverage AND a parallel list; {self}"
+ for coverage_entry in coverage:
+ _sort_by_gid(font.getGlyphID, coverage_entry.glyphs, None)
+
+
+@dataclass(frozen=True)
+class ReorderList(ReorderRule):
+ """Reorder the items within a list to match the updated glyph order.
+
+ Useful when a list ordered by coverage itself contains something ordered by a gid.
+ For example, the PairSet table of https://docs.microsoft.com/en-us/typography/opentype/spec/gpos#lookup-type-2-pair-adjustment-positioning-subtable.
+ """
+
+ list_attr: str
+ key: str
+
+ def apply(self, font: ttLib.TTFont, value: otBase.BaseTable) -> None:
+ lst = _get_dotted_attr(value, self.list_attr)
+ assert isinstance(lst, list), f"{self.list_attr} should be a list"
+ lst.sort(key=lambda v: font.getGlyphID(getattr(v, self.key)))
+
+
+# (Type, Optional Format) => List[ReorderRule]
+# Encodes the relationships Cosimo identified
+_REORDER_RULES = {
+ # GPOS
+ (ot.SinglePos, 1): [ReorderCoverage()],
+ (ot.SinglePos, 2): [ReorderCoverage(parallel_list_attr="Value")],
+ (ot.PairPos, 1): [ReorderCoverage(parallel_list_attr="PairSet")],
+ (ot.PairSet, None): [ReorderList("PairValueRecord", key="SecondGlyph")],
+ (ot.PairPos, 2): [ReorderCoverage()],
+ (ot.CursivePos, 1): [ReorderCoverage(parallel_list_attr="EntryExitRecord")],
+ (ot.MarkBasePos, 1): [
+ ReorderCoverage(
+ coverage_attr="MarkCoverage", parallel_list_attr="MarkArray.MarkRecord"
+ ),
+ ReorderCoverage(
+ coverage_attr="BaseCoverage", parallel_list_attr="BaseArray.BaseRecord"
+ ),
+ ],
+ (ot.MarkLigPos, 1): [
+ ReorderCoverage(
+ coverage_attr="MarkCoverage", parallel_list_attr="MarkArray.MarkRecord"
+ ),
+ ReorderCoverage(
+ coverage_attr="LigatureCoverage",
+ parallel_list_attr="LigatureArray.LigatureAttach",
+ ),
+ ],
+ (ot.MarkMarkPos, 1): [
+ ReorderCoverage(
+ coverage_attr="Mark1Coverage", parallel_list_attr="Mark1Array.MarkRecord"
+ ),
+ ReorderCoverage(
+ coverage_attr="Mark2Coverage", parallel_list_attr="Mark2Array.Mark2Record"
+ ),
+ ],
+ (ot.ContextPos, 1): [ReorderCoverage(parallel_list_attr="PosRuleSet")],
+ (ot.ContextPos, 2): [ReorderCoverage()],
+ (ot.ContextPos, 3): [ReorderCoverage()],
+ (ot.ChainContextPos, 1): [ReorderCoverage(parallel_list_attr="ChainPosRuleSet")],
+ (ot.ChainContextPos, 2): [ReorderCoverage()],
+ (ot.ChainContextPos, 3): [
+ ReorderCoverage(coverage_attr="BacktrackCoverage"),
+ ReorderCoverage(coverage_attr="InputCoverage"),
+ ReorderCoverage(coverage_attr="LookAheadCoverage"),
+ ],
+ # GSUB
+ (ot.ContextSubst, 1): [ReorderCoverage(parallel_list_attr="SubRuleSet")],
+ (ot.ContextSubst, 2): [ReorderCoverage()],
+ (ot.ContextSubst, 3): [ReorderCoverage()],
+ (ot.ChainContextSubst, 1): [ReorderCoverage(parallel_list_attr="ChainSubRuleSet")],
+ (ot.ChainContextSubst, 2): [ReorderCoverage()],
+ (ot.ChainContextSubst, 3): [
+ ReorderCoverage(coverage_attr="BacktrackCoverage"),
+ ReorderCoverage(coverage_attr="InputCoverage"),
+ ReorderCoverage(coverage_attr="LookAheadCoverage"),
+ ],
+ (ot.ReverseChainSingleSubst, 1): [
+ ReorderCoverage(parallel_list_attr="Substitute"),
+ ReorderCoverage(coverage_attr="BacktrackCoverage"),
+ ReorderCoverage(coverage_attr="LookAheadCoverage"),
+ ],
+ # GDEF
+ (ot.AttachList, None): [ReorderCoverage(parallel_list_attr="AttachPoint")],
+ (ot.LigCaretList, None): [ReorderCoverage(parallel_list_attr="LigGlyph")],
+ (ot.MarkGlyphSetsDef, None): [ReorderCoverage()],
+ # MATH
+ (ot.MathGlyphInfo, None): [ReorderCoverage(coverage_attr="ExtendedShapeCoverage")],
+ (ot.MathItalicsCorrectionInfo, None): [
+ ReorderCoverage(parallel_list_attr="ItalicsCorrection")
+ ],
+ (ot.MathTopAccentAttachment, None): [
+ ReorderCoverage(
+ coverage_attr="TopAccentCoverage", parallel_list_attr="TopAccentAttachment"
+ )
+ ],
+ (ot.MathKernInfo, None): [
+ ReorderCoverage(
+ coverage_attr="MathKernCoverage", parallel_list_attr="MathKernInfoRecords"
+ )
+ ],
+ (ot.MathVariants, None): [
+ ReorderCoverage(
+ coverage_attr="VertGlyphCoverage",
+ parallel_list_attr="VertGlyphConstruction",
+ ),
+ ReorderCoverage(
+ coverage_attr="HorizGlyphCoverage",
+ parallel_list_attr="HorizGlyphConstruction",
+ ),
+ ],
+}
+
+
+# TODO Port to otTraverse
+
+SubTablePath = Tuple[otBase.BaseTable.SubTableEntry, ...]
+
+
+def _bfs_base_table(
+ root: otBase.BaseTable, root_accessor: str
+) -> Iterable[SubTablePath]:
+ yield from _traverse_ot_data(
+ root, root_accessor, lambda frontier, new: frontier.extend(new)
+ )
+
+
+# Given f(current frontier, new entries) add new entries to frontier
+AddToFrontierFn = Callable[[Deque[SubTablePath], List[SubTablePath]], None]
+
+
+def _traverse_ot_data(
+ root: otBase.BaseTable, root_accessor: str, add_to_frontier_fn: AddToFrontierFn
+) -> Iterable[SubTablePath]:
+ # no visited because general otData is forward-offset only and thus cannot cycle
+
+ frontier: Deque[SubTablePath] = deque()
+ frontier.append((otBase.BaseTable.SubTableEntry(root_accessor, root),))
+ while frontier:
+ # path is (value, attr_name) tuples. attr_name is attr of parent to get value
+ path = frontier.popleft()
+ current = path[-1].value
+
+ yield path
+
+ new_entries = []
+ for subtable_entry in current.iterSubTables():
+ new_entries.append(path + (subtable_entry,))
+
+ add_to_frontier_fn(frontier, new_entries)
+
+
+def reorderGlyphs(font: ttLib.TTFont, new_glyph_order: List[str]):
+ old_glyph_order = font.getGlyphOrder()
+ if len(new_glyph_order) != len(old_glyph_order):
+ raise ValueError(
+ f"New glyph order contains {len(new_glyph_order)} glyphs, "
+ f"but font has {len(old_glyph_order)} glyphs"
+ )
+
+ if set(old_glyph_order) != set(new_glyph_order):
+ raise ValueError(
+ "New glyph order does not contain the same set of glyphs as the font:\n"
+ f"* only in new: {set(new_glyph_order) - set(old_glyph_order)}\n"
+ f"* only in old: {set(old_glyph_order) - set(new_glyph_order)}"
+ )
+
+ # Changing the order of glyphs in a TTFont requires that all tables that use
+ # glyph indexes have been fully.
+ # Cf. https://github.com/fonttools/fonttools/issues/2060
+ font.ensureDecompiled()
+ not_loaded = sorted(t for t in font.keys() if not font.isLoaded(t))
+ if not_loaded:
+ raise ValueError(f"Everything should be loaded, following aren't: {not_loaded}")
+
+ font.setGlyphOrder(new_glyph_order)
+
+ coverage_containers = {"GDEF", "GPOS", "GSUB", "MATH"}
+ for tag in coverage_containers:
+ if tag in font.keys():
+ for path in _bfs_base_table(font[tag].table, f'font["{tag}"]'):
+ value = path[-1].value
+ reorder_key = (type(value), getattr(value, "Format", None))
+ for reorder in _REORDER_RULES.get(reorder_key, []):
+ reorder.apply(font, value)
diff --git a/contrib/python/fonttools/fontTools/ttLib/tables/otConverters.py b/contrib/python/fonttools/fontTools/ttLib/tables/otConverters.py
index afe4e538f43..a2f672567ec 100644
--- a/contrib/python/fonttools/fontTools/ttLib/tables/otConverters.py
+++ b/contrib/python/fonttools/fontTools/ttLib/tables/otConverters.py
@@ -153,6 +153,8 @@ class BaseConverter(object):
self.name = name
self.repeat = repeat
self.aux = aux
+ if self.aux and not self.repeat:
+ self.aux = compile(self.aux, "<string>", "eval")
self.tableClass = tableClass
self.isCount = name.endswith("Count") or name in [
"DesignAxisRecordSize",
diff --git a/contrib/python/fonttools/fontTools/ttLib/ttFont.py b/contrib/python/fonttools/fontTools/ttLib/ttFont.py
index ad62a187de1..52e048b5f1b 100644
--- a/contrib/python/fonttools/fontTools/ttLib/ttFont.py
+++ b/contrib/python/fonttools/fontTools/ttLib/ttFont.py
@@ -840,6 +840,11 @@ class TTFont(object):
"""
return self["cmap"].getBestCmap(cmapPreferences=cmapPreferences)
+ def reorderGlyphs(self, new_glyph_order):
+ from .reorderGlyphs import reorderGlyphs
+
+ reorderGlyphs(self, new_glyph_order)
+
class GlyphOrder(object):
"""A pseudo table. The glyph order isn't in the font as a separate
diff --git a/contrib/python/fonttools/ya.make b/contrib/python/fonttools/ya.make
index a6872287a98..91be3261dfb 100644
--- a/contrib/python/fonttools/ya.make
+++ b/contrib/python/fonttools/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(4.50.0)
+VERSION(4.51.0)
LICENSE(MIT)
@@ -163,6 +163,7 @@ PY_SRCS(
fontTools/ttLib/__main__.py
fontTools/ttLib/macUtils.py
fontTools/ttLib/removeOverlaps.py
+ fontTools/ttLib/reorderGlyphs.py
fontTools/ttLib/scaleUpem.py
fontTools/ttLib/sfnt.py
fontTools/ttLib/standardGlyphOrder.py
diff --git a/contrib/python/future/py2/.dist-info/METADATA b/contrib/python/future/py2/.dist-info/METADATA
index 124bf500d0b..3d0e5512c62 100644
--- a/contrib/python/future/py2/.dist-info/METADATA
+++ b/contrib/python/future/py2/.dist-info/METADATA
@@ -1,13 +1,13 @@
Metadata-Version: 2.1
Name: future
-Version: 0.18.3
+Version: 1.0.0
Summary: Clean single-source support for Python 3 and 2
Home-page: https://python-future.org
Author: Ed Schofield
Author-email: ed@pythoncharmers.com
License: MIT
+Project-URL: Source, https://github.com/PythonCharmers/python-future
Keywords: future past python3 migration futurize backport six 2to3 modernize pasteurize 3to2
-Platform: UNKNOWN
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
@@ -18,11 +18,17 @@ Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
Classifier: License :: OSI Approved
Classifier: License :: OSI Approved :: MIT License
-Classifier: Development Status :: 4 - Beta
+Classifier: Development Status :: 6 - Mature
Classifier: Intended Audience :: Developers
Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.*
+License-File: LICENSE.txt
future: Easy, safe support for Python 2/3 compatibility
@@ -78,7 +84,7 @@ Automatic conversion
--------------------
An included script called `futurize
-<http://python-future.org/automatic_conversion.html>`_ aids in converting
+<https://python-future.org/automatic_conversion.html>`_ aids in converting
code (from either Python 2 or Python 3) to code compatible with both
platforms. It is similar to ``python-modernize`` but goes further in
providing Python 3 compatibility through the use of the backported types
@@ -88,22 +94,19 @@ and builtin functions in ``future``.
Documentation
-------------
-See: http://python-future.org
+See: https://python-future.org
Credits
-------
:Author: Ed Schofield, Jordan M. Adler, et al
-:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte
- Ltd, Singapore. http://pythoncharmers.com
-:Others: See docs/credits.rst or http://python-future.org/credits.html
+:Sponsor: Python Charmers: https://pythoncharmers.com
+:Others: See docs/credits.rst or https://python-future.org/credits.html
Licensing
---------
-Copyright 2013-2019 Python Charmers Pty Ltd, Australia.
+Copyright 2013-2024 Python Charmers, Australia.
The software is distributed under an MIT licence. See LICENSE.txt.
-
-
diff --git a/contrib/python/future/py2/.dist-info/entry_points.txt b/contrib/python/future/py2/.dist-info/entry_points.txt
index 45d1a880fbd..74aec276c83 100644
--- a/contrib/python/future/py2/.dist-info/entry_points.txt
+++ b/contrib/python/future/py2/.dist-info/entry_points.txt
@@ -1,4 +1,3 @@
[console_scripts]
futurize = libfuturize.main:main
pasteurize = libpasteurize.main:main
-
diff --git a/contrib/python/future/py2/LICENSE.txt b/contrib/python/future/py2/LICENSE.txt
index 4c904dba8fe..275cafd3036 100644
--- a/contrib/python/future/py2/LICENSE.txt
+++ b/contrib/python/future/py2/LICENSE.txt
@@ -1,4 +1,4 @@
-Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia
+Copyright (c) 2013-2024 Python Charmers, Australia
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/contrib/python/future/py2/README.rst b/contrib/python/future/py2/README.rst
index 1ab43e53d7a..a3aceb7d4a0 100644
--- a/contrib/python/future/py2/README.rst
+++ b/contrib/python/future/py2/README.rst
@@ -3,11 +3,8 @@
Overview: Easy, clean, reliable Python 2/3 compatibility
========================================================
-.. image:: https://travis-ci.org/PythonCharmers/python-future.svg?branch=master
- :target: https://travis-ci.org/PythonCharmers/python-future
-
-.. image:: https://readthedocs.org/projects/python-future/badge/?version=latest
- :target: https://python-future.readthedocs.io/en/latest/?badge=latest
+.. image:: https://github.com/PythonCharmers/python-future/actions/workflows/ci.yml/badge.svg?branch=master
+ :target: https://github.com/PythonCharmers/python-future/actions/workflows/ci.yml?query=branch%3Amaster
``python-future`` is the missing compatibility layer between Python 2 and
Python 3. It allows you to use a single, clean Python 3.x-compatible
@@ -19,9 +16,21 @@ ports of features from Python 3 and 2. It also comes with ``futurize`` and
either Py2 or Py3 code easily to support both Python 2 and 3 in a single
clean Py3-style codebase, module by module.
-Notable projects that use ``python-future`` for Python 2/3 compatibility
-are `Mezzanine <http://mezzanine.jupo.org/>`_ and `ObsPy
-<http://obspy.org>`_.
+The ``python-future`` project has been downloaded over 1.7 billion times.
+
+.. _status
+
+Status
+------
+
+The ``python-future`` project was created in 2013 to attempt to save Python from
+the schism of version incompatibility that was threatening to tear apart the
+language (as Perl 6 contributed to the death of Perl).
+
+That time is now past. Thanks to a huge porting effort across the Python
+community, Python 3 eventually thrived. Python 2 reached its end of life in
+2020 and the ``python-future`` package should no longer be necessary. Use it to
+help with porting legacy code to Python 3 but don't depend on it for new code.
.. _features:
@@ -223,11 +232,14 @@ into this code which runs on both Py2 and Py3:
name = input()
greet(name)
+The first four lines have no effect under Python 3 and can be removed from
+the codebase when Python 2 compatibility is no longer required.
+
See :ref:`forwards-conversion` and :ref:`backwards-conversion` for more details.
Automatic translation
----------------------
+~~~~~~~~~~~~~~~~~~~~~
The ``past`` package can automatically translate some simple Python 2
modules to Python 3 upon import. The goal is to support the "long tail" of
@@ -264,10 +276,9 @@ properly to a Python 2/3 compatible codebase using a tool like
Note: the auto-translation feature is still in alpha; it needs more testing and
development, and will likely never be perfect.
-For more info, see :ref:`translation`.
Pre-commit hooks
-----------------
+~~~~~~~~~~~~~~~~
`Pre-commit <https://pre-commit.com/>`_ is a framework for managing and maintaining
multi-language pre-commit hooks.
@@ -304,23 +315,25 @@ Licensing
:Author: Ed Schofield, Jordan M. Adler, et al
-:Copyright: 2013-2019 Python Charmers Pty Ltd, Australia.
+:Copyright: 2013-2024 Python Charmers, Australia.
-:Sponsors: Python Charmers Pty Ltd, Australia, and Python Charmers Pte
- Ltd, Singapore. http://pythoncharmers.com
+:Sponsors: Python Charmers: https://pythoncharmers.com
- Pinterest https://opensource.pinterest.com/
+ Pinterest https://opensource.pinterest.com
-:Licence: MIT. See ``LICENSE.txt`` or `here <http://python-future.org/credits.html>`_.
+:Licence: MIT. See ``LICENSE.txt`` or `here <https://python-future.org/credits.html>`_.
-:Other credits: See `here <http://python-future.org/credits.html>`_.
+:Other credits: See `here <https://python-future.org/credits.html>`_.
+Docs
+----
+See the docs `here <https://python-future.org>`_.
Next steps
----------
If you are new to Python-Future, check out the `Quickstart Guide
-<http://python-future.org/quickstart.html>`_.
+<https://python-future.org/quickstart.html>`_.
For an update on changes in the latest version, see the `What's New
-<http://python-future.org/whatsnew.html>`_ page.
+<https://python-future.org/whatsnew.html>`_ page.
diff --git a/contrib/python/future/py2/future/__init__.py b/contrib/python/future/py2/future/__init__.py
index b609299a7a2..b097fd81eb6 100644
--- a/contrib/python/future/py2/future/__init__.py
+++ b/contrib/python/future/py2/future/__init__.py
@@ -52,7 +52,7 @@ Automatic conversion
--------------------
An included script called `futurize
-<http://python-future.org/automatic_conversion.html>`_ aids in converting
+<https://python-future.org/automatic_conversion.html>`_ aids in converting
code (from either Python 2 or Python 3) to code compatible with both
platforms. It is similar to ``python-modernize`` but goes further in
providing Python 3 compatibility through the use of the backported types
@@ -62,21 +62,20 @@ and builtin functions in ``future``.
Documentation
-------------
-See: http://python-future.org
+See: https://python-future.org
Credits
-------
:Author: Ed Schofield, Jordan M. Adler, et al
-:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte
- Ltd, Singapore. http://pythoncharmers.com
-:Others: See docs/credits.rst or http://python-future.org/credits.html
+:Sponsor: Python Charmers: https://pythoncharmers.com
+:Others: See docs/credits.rst or https://python-future.org/credits.html
Licensing
---------
-Copyright 2013-2019 Python Charmers Pty Ltd, Australia.
+Copyright 2013-2024 Python Charmers, Australia.
The software is distributed under an MIT licence. See LICENSE.txt.
"""
@@ -84,10 +83,10 @@ The software is distributed under an MIT licence. See LICENSE.txt.
__title__ = 'future'
__author__ = 'Ed Schofield'
__license__ = 'MIT'
-__copyright__ = 'Copyright 2013-2019 Python Charmers Pty Ltd'
-__ver_major__ = 0
-__ver_minor__ = 18
-__ver_patch__ = 3
+__copyright__ = 'Copyright 2013-2024 Python Charmers (https://pythoncharmers.com)'
+__ver_major__ = 1
+__ver_minor__ = 0
+__ver_patch__ = 0
__ver_sub__ = ''
__version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__,
__ver_patch__, __ver_sub__)
diff --git a/contrib/python/future/py2/future/backports/datetime.py b/contrib/python/future/py2/future/backports/datetime.py
index 3261014e056..8cd62ddfa5c 100644
--- a/contrib/python/future/py2/future/backports/datetime.py
+++ b/contrib/python/future/py2/future/backports/datetime.py
@@ -689,7 +689,7 @@ class date(object):
@classmethod
def fromordinal(cls, n):
- """Contruct a date from a proleptic Gregorian ordinal.
+ """Construct a date from a proleptic Gregorian ordinal.
January 1 of year 1 is day 1. Only the year, month and day are
non-zero in the result.
diff --git a/contrib/python/future/py2/future/backports/email/_header_value_parser.py b/contrib/python/future/py2/future/backports/email/_header_value_parser.py
index 43957edc12f..59b1b318f38 100644
--- a/contrib/python/future/py2/future/backports/email/_header_value_parser.py
+++ b/contrib/python/future/py2/future/backports/email/_header_value_parser.py
@@ -2867,7 +2867,7 @@ def parse_content_type_header(value):
_find_mime_parameters(ctype, value)
return ctype
ctype.append(token)
- # XXX: If we really want to follow the formal grammer we should make
+ # XXX: If we really want to follow the formal grammar we should make
# mantype and subtype specialized TokenLists here. Probably not worth it.
if not value or value[0] != '/':
ctype.defects.append(errors.InvalidHeaderDefect(
diff --git a/contrib/python/future/py2/future/backports/email/parser.py b/contrib/python/future/py2/future/backports/email/parser.py
index df1c6e28689..79f0e5a33eb 100644
--- a/contrib/python/future/py2/future/backports/email/parser.py
+++ b/contrib/python/future/py2/future/backports/email/parser.py
@@ -26,7 +26,7 @@ class Parser(object):
textual representation of the message.
The string must be formatted as a block of RFC 2822 headers and header
- continuation lines, optionally preceeded by a `Unix-from' header. The
+ continuation lines, optionally preceded by a `Unix-from' header. The
header block is terminated either by the end of the string or by a
blank line.
@@ -92,7 +92,7 @@ class BytesParser(object):
textual representation of the message.
The input must be formatted as a block of RFC 2822 headers and header
- continuation lines, optionally preceeded by a `Unix-from' header. The
+ continuation lines, optionally preceded by a `Unix-from' header. The
header block is terminated either by the end of the input or by a
blank line.
diff --git a/contrib/python/future/py2/future/backports/http/cookiejar.py b/contrib/python/future/py2/future/backports/http/cookiejar.py
index 0ad80a0258a..a39242c0827 100644
--- a/contrib/python/future/py2/future/backports/http/cookiejar.py
+++ b/contrib/python/future/py2/future/backports/http/cookiejar.py
@@ -1851,7 +1851,7 @@ def lwp_cookie_str(cookie):
class LWPCookieJar(FileCookieJar):
"""
The LWPCookieJar saves a sequence of "Set-Cookie3" lines.
- "Set-Cookie3" is the format used by the libwww-perl libary, not known
+ "Set-Cookie3" is the format used by the libwww-perl library, not known
to be compatible with any browser, but which is easy to read and
doesn't lose information about RFC 2965 cookies.
diff --git a/contrib/python/future/py2/future/backports/xmlrpc/client.py b/contrib/python/future/py2/future/backports/xmlrpc/client.py
index b0b8f5e19ef..5c2cee0958c 100644
--- a/contrib/python/future/py2/future/backports/xmlrpc/client.py
+++ b/contrib/python/future/py2/future/backports/xmlrpc/client.py
@@ -134,10 +134,11 @@ from __future__ import (absolute_import, division, print_function,
from future.builtins import bytes, dict, int, range, str
import base64
-# Py2.7 compatibility hack
-base64.encodebytes = base64.encodestring
-base64.decodebytes = base64.decodestring
import sys
+if sys.version_info < (3, 9):
+ # Py2.7 compatibility hack
+ base64.encodebytes = base64.encodestring
+ base64.decodebytes = base64.decodestring
import time
from datetime import datetime
from future.backports.http import client as http_client
@@ -1254,7 +1255,7 @@ class Transport(object):
# Send HTTP request.
#
# @param host Host descriptor (URL or (URL, x509 info) tuple).
- # @param handler Targer RPC handler (a path relative to host)
+ # @param handler Target RPC handler (a path relative to host)
# @param request_body The XML-RPC request body
# @param debug Enable debugging if debug is true.
# @return An HTTPConnection.
diff --git a/contrib/python/future/py2/future/builtins/__init__.py b/contrib/python/future/py2/future/builtins/__init__.py
index 8bc1649d2fd..1734cd45fe3 100644
--- a/contrib/python/future/py2/future/builtins/__init__.py
+++ b/contrib/python/future/py2/future/builtins/__init__.py
@@ -2,7 +2,7 @@
A module that brings in equivalents of the new and modified Python 3
builtins into Py2. Has no effect on Py3.
-See the docs `here <http://python-future.org/what-else.html>`_
+See the docs `here <https://python-future.org/what-else.html>`_
(``docs/what-else.rst``) for more information.
"""
diff --git a/contrib/python/future/py2/future/moves/_dummy_thread.py b/contrib/python/future/py2/future/moves/_dummy_thread.py
index 688d249bbe7..6633f42e0cb 100644
--- a/contrib/python/future/py2/future/moves/_dummy_thread.py
+++ b/contrib/python/future/py2/future/moves/_dummy_thread.py
@@ -1,8 +1,13 @@
from __future__ import absolute_import
-from future.utils import PY3
+from future.utils import PY3, PY39_PLUS
-if PY3:
- from _dummy_thread import *
+
+if PY39_PLUS:
+ # _dummy_thread and dummy_threading modules were both deprecated in
+ # Python 3.7 and removed in Python 3.9
+ from _thread import *
+elif PY3:
+ from _dummy_thread import *
else:
__future_module__ = True
from dummy_thread import *
diff --git a/contrib/python/future/py2/future/moves/multiprocessing.py b/contrib/python/future/py2/future/moves/multiprocessing.py
new file mode 100644
index 00000000000..a871b676f46
--- /dev/null
+++ b/contrib/python/future/py2/future/moves/multiprocessing.py
@@ -0,0 +1,7 @@
+from __future__ import absolute_import
+from future.utils import PY3
+
+from multiprocessing import *
+if not PY3:
+ __future_module__ = True
+ from multiprocessing.queues import SimpleQueue
diff --git a/contrib/python/future/py2/future/standard_library/__init__.py b/contrib/python/future/py2/future/standard_library/__init__.py
index cff02f95943..d467aaf492c 100644
--- a/contrib/python/future/py2/future/standard_library/__init__.py
+++ b/contrib/python/future/py2/future/standard_library/__init__.py
@@ -17,7 +17,7 @@ And then these normal Py3 imports work on both Py3 and Py2::
import socketserver
import winreg # on Windows only
import test.support
- import html, html.parser, html.entites
+ import html, html.parser, html.entities
import http, http.client, http.server
import http.cookies, http.cookiejar
import urllib.parse, urllib.request, urllib.response, urllib.error, urllib.robotparser
@@ -33,6 +33,7 @@ And then these normal Py3 imports work on both Py3 and Py2::
from collections import OrderedDict, Counter, ChainMap # even on Py2.6
from subprocess import getoutput, getstatusoutput
from subprocess import check_output # even on Py2.6
+ from multiprocessing import SimpleQueue
(The renamed modules and functions are still available under their old
names on Python 2.)
@@ -62,9 +63,12 @@ from __future__ import absolute_import, division, print_function
import sys
import logging
-import imp
+# imp was deprecated in python 3.6
+if sys.version_info >= (3, 6):
+ import importlib as imp
+else:
+ import imp
import contextlib
-import types
import copy
import os
@@ -108,6 +112,7 @@ RENAMES = {
'future.moves.socketserver': 'socketserver',
'ConfigParser': 'configparser',
'repr': 'reprlib',
+ 'multiprocessing.queues': 'multiprocessing',
# 'FileDialog': 'tkinter.filedialog',
# 'tkFileDialog': 'tkinter.filedialog',
# 'SimpleDialog': 'tkinter.simpledialog',
@@ -125,7 +130,7 @@ RENAMES = {
# 'Tkinter': 'tkinter',
'_winreg': 'winreg',
'thread': '_thread',
- 'dummy_thread': '_dummy_thread',
+ 'dummy_thread': '_dummy_thread' if sys.version_info < (3, 9) else '_thread',
# 'anydbm': 'dbm', # causes infinite import loop
# 'whichdb': 'dbm', # causes infinite import loop
# anydbm and whichdb are handled by fix_imports2
@@ -184,6 +189,7 @@ MOVES = [('collections', 'UserList', 'UserList', 'UserList'),
('itertools', 'filterfalse','itertools', 'ifilterfalse'),
('itertools', 'zip_longest','itertools', 'izip_longest'),
('sys', 'intern','__builtin__', 'intern'),
+ ('multiprocessing', 'SimpleQueue', 'multiprocessing.queues', 'SimpleQueue'),
# The re module has no ASCII flag in Py2, but this is the default.
# Set re.ASCII to a zero constant. stat.ST_MODE just happens to be one
# (and it exists on Py2.6+).
diff --git a/contrib/python/future/py2/future/types/newint.py b/contrib/python/future/py2/future/types/newint.py
index 04a411e9331..ebc5715e2b6 100644
--- a/contrib/python/future/py2/future/types/newint.py
+++ b/contrib/python/future/py2/future/types/newint.py
@@ -223,9 +223,11 @@ class newint(with_metaclass(BaseNewInt, long)):
def __rpow__(self, other):
value = super(newint, self).__rpow__(other)
- if value is NotImplemented:
+ if isint(value):
+ return newint(value)
+ elif value is NotImplemented:
return other ** long(self)
- return newint(value)
+ return value
def __lshift__(self, other):
if not isint(other):
@@ -318,7 +320,7 @@ class newint(with_metaclass(BaseNewInt, long)):
bits = length * 8
num = (2**bits) + self
if num <= 0:
- raise OverflowError("int too smal to convert")
+ raise OverflowError("int too small to convert")
else:
if self < 0:
raise OverflowError("can't convert negative int to unsigned")
diff --git a/contrib/python/future/py2/future/types/newrange.py b/contrib/python/future/py2/future/types/newrange.py
index 6d4ebe2f8f6..dc5eb80222b 100644
--- a/contrib/python/future/py2/future/types/newrange.py
+++ b/contrib/python/future/py2/future/types/newrange.py
@@ -105,7 +105,7 @@ class newrange(Sequence):
raise ValueError('%r is not in range' % value)
def count(self, value):
- """Return the number of ocurrences of integer `value`
+ """Return the number of occurrences of integer `value`
in the sequence this range represents."""
# a value can occur exactly zero or one times
return int(value in self)
diff --git a/contrib/python/future/py2/past/__init__.py b/contrib/python/future/py2/past/__init__.py
index 14713039332..54619e0a608 100644
--- a/contrib/python/future/py2/past/__init__.py
+++ b/contrib/python/future/py2/past/__init__.py
@@ -75,12 +75,12 @@ Credits
-------
:Author: Ed Schofield, Jordan M. Adler, et al
-:Sponsor: Python Charmers Pty Ltd, Australia: http://pythoncharmers.com
+:Sponsor: Python Charmers: https://pythoncharmers.com
Licensing
---------
-Copyright 2013-2019 Python Charmers Pty Ltd, Australia.
+Copyright 2013-2024 Python Charmers, Australia.
The software is distributed under an MIT licence. See LICENSE.txt.
"""
diff --git a/contrib/python/future/py2/past/builtins/misc.py b/contrib/python/future/py2/past/builtins/misc.py
index 3600695c0aa..0b8e6a986c7 100644
--- a/contrib/python/future/py2/past/builtins/misc.py
+++ b/contrib/python/future/py2/past/builtins/misc.py
@@ -1,11 +1,13 @@
from __future__ import unicode_literals
import inspect
+import sys
import math
import numbers
from future.utils import PY2, PY3, exec_
+
if PY2:
from collections import Mapping
else:
@@ -103,13 +105,12 @@ if PY3:
return '0' + builtins.oct(number)[2:]
raw_input = input
-
- try:
+ # imp was deprecated in python 3.6
+ if sys.version_info >= (3, 6):
from importlib import reload
- except ImportError:
+ else:
# for python2, python3 <= 3.4
from imp import reload
-
unicode = str
unichr = chr
xrange = range
diff --git a/contrib/python/future/py2/ya.make b/contrib/python/future/py2/ya.make
index f537724340b..3761f698744 100644
--- a/contrib/python/future/py2/ya.make
+++ b/contrib/python/future/py2/ya.make
@@ -2,7 +2,7 @@
PY2_LIBRARY()
-VERSION(0.18.3)
+VERSION(1.0.0)
LICENSE(MIT)
@@ -105,6 +105,7 @@ PY_SRCS(
future/moves/http/cookies.py
future/moves/http/server.py
future/moves/itertools.py
+ future/moves/multiprocessing.py
future/moves/pickle.py
future/moves/queue.py
future/moves/reprlib.py
diff --git a/contrib/python/hypothesis/py3/.dist-info/METADATA b/contrib/python/hypothesis/py3/.dist-info/METADATA
index a4cff64dd92..5c622375cd1 100644
--- a/contrib/python/hypothesis/py3/.dist-info/METADATA
+++ b/contrib/python/hypothesis/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: hypothesis
-Version: 6.100.0
+Version: 6.100.1
Summary: A library for property-based testing
Home-page: https://hypothesis.works
Author: David R. MacIver and Zac Hatfield-Dodds
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
index 701105bd5e2..93f7758ba2e 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/data.py
@@ -91,6 +91,9 @@ DRAW_FLOAT_LABEL = calc_label_from_name("drawing a float")
FLOAT_STRATEGY_DO_DRAW_LABEL = calc_label_from_name(
"getting another float in FloatStrategy"
)
+INTEGER_WEIGHTED_DISTRIBUTION = calc_label_from_name(
+ "drawing from a weighted distribution in integers"
+)
InterestingOrigin = Tuple[
Type[BaseException], str, int, Tuple[Any, ...], Tuple[Tuple[Any, ...], ...]
@@ -1673,6 +1676,7 @@ class HypothesisProvider(PrimitiveProvider):
center: Optional[int] = None,
forced: Optional[int] = None,
fake_forced: bool = False,
+ _vary_effective_size: bool = True,
) -> int:
assert lower <= upper
assert forced is None or lower <= forced <= upper
@@ -1709,14 +1713,27 @@ class HypothesisProvider(PrimitiveProvider):
bits = gap.bit_length()
probe = gap + 1
- if bits > 24 and self.draw_boolean(
- 7 / 8, forced=None if forced is None else False, fake_forced=fake_forced
+ if (
+ bits > 24
+ and _vary_effective_size
+ and self.draw_boolean(
+ 7 / 8, forced=None if forced is None else False, fake_forced=fake_forced
+ )
):
+ self._cd.start_example(INTEGER_WEIGHTED_DISTRIBUTION)
# For large ranges, we combine the uniform random distribution from draw_bits
# with a weighting scheme with moderate chance. Cutoff at 2 ** 24 so that our
# choice of unicode characters is uniform but the 32bit distribution is not.
idx = INT_SIZES_SAMPLER.sample(self._cd)
- bits = min(bits, INT_SIZES[idx])
+ force_bits = min(bits, INT_SIZES[idx])
+ forced = self._draw_bounded_integer(
+ lower=center if above else max(lower, center - 2**force_bits - 1),
+ upper=center if not above else min(upper, center + 2**force_bits - 1),
+ _vary_effective_size=False,
+ )
+ self._cd.stop_example()
+
+ assert lower <= forced <= upper
while probe > gap:
self._cd.start_example(INTEGER_RANGE_DRAW_LABEL)
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
index 6ab4c2783fe..c22cd0b2946 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/datatree.py
@@ -30,6 +30,7 @@ from hypothesis.internal.conjecture.data import (
Status,
StringKWargs,
)
+from hypothesis.internal.escalation import InterestingOrigin
from hypothesis.internal.floats import (
count_between_floats,
float_to_int,
@@ -83,8 +84,8 @@ class Branch:
class Conclusion:
"""Represents a transition to a finished state."""
- status = attr.ib()
- interesting_origin = attr.ib()
+ status: Status = attr.ib()
+ interesting_origin: Optional[InterestingOrigin] = attr.ib()
# The number of max children where, beyond this, it is practically impossible
@@ -1043,8 +1044,9 @@ class TreeRecordingObserver(DataObserver):
or new_transition.status != Status.VALID
):
raise Flaky(
- f"Inconsistent test results! Test case was {node.transition!r} "
- f"on first run but {new_transition!r} on second"
+ f"Inconsistent results from replaying a test case!\n"
+ f" last: {node.transition.status.name} from {node.transition.interesting_origin}\n"
+ f" this: {new_transition.status.name} from {new_transition.interesting_origin}"
)
else:
node.transition = new_transition
diff --git a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/pareto.py b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/pareto.py
index 146b1b56f4f..b43e6df5bc5 100644
--- a/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/pareto.py
+++ b/contrib/python/hypothesis/py3/hypothesis/internal/conjecture/pareto.py
@@ -133,10 +133,11 @@ class ParetoFront:
"""Attempts to add ``data`` to the pareto front. Returns True if
``data`` is now in the front, including if data is already in the
collection, and False otherwise"""
- data = data.as_result()
if data.status < Status.VALID:
return False
+ data = data.as_result()
+
if not self.front:
self.front.add(data)
return True
diff --git a/contrib/python/hypothesis/py3/hypothesis/version.py b/contrib/python/hypothesis/py3/hypothesis/version.py
index 158a440f0a2..be228256343 100644
--- a/contrib/python/hypothesis/py3/hypothesis/version.py
+++ b/contrib/python/hypothesis/py3/hypothesis/version.py
@@ -8,5 +8,5 @@
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
-__version_info__ = (6, 100, 0)
+__version_info__ = (6, 100, 1)
__version__ = ".".join(map(str, __version_info__))
diff --git a/contrib/python/hypothesis/py3/ya.make b/contrib/python/hypothesis/py3/ya.make
index 986fc7ae788..c899d543fd3 100644
--- a/contrib/python/hypothesis/py3/ya.make
+++ b/contrib/python/hypothesis/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(6.100.0)
+VERSION(6.100.1)
LICENSE(MPL-2.0)
diff --git a/contrib/python/parso/py3/.dist-info/METADATA b/contrib/python/parso/py3/.dist-info/METADATA
index 331fef3a494..10f9cb843e8 100644
--- a/contrib/python/parso/py3/.dist-info/METADATA
+++ b/contrib/python/parso/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: parso
-Version: 0.8.3
+Version: 0.8.4
Summary: A Python Parser
Home-page: https://github.com/davidhalter/parso
Author: David Halter
@@ -26,11 +26,12 @@ Classifier: Topic :: Utilities
Classifier: Typing :: Typed
Requires-Python: >=3.6
Provides-Extra: qa
-Requires-Dist: flake8 (==3.8.3) ; extra == 'qa'
-Requires-Dist: mypy (==0.782) ; extra == 'qa'
+Requires-Dist: flake8 (==5.0.4) ; extra == 'qa'
+Requires-Dist: mypy (==0.971) ; extra == 'qa'
+Requires-Dist: types-setuptools (==67.2.0.1) ; extra == 'qa'
Provides-Extra: testing
Requires-Dist: docopt ; extra == 'testing'
-Requires-Dist: pytest (<6.0.0) ; extra == 'testing'
+Requires-Dist: pytest ; extra == 'testing'
###################################################################
parso - A Python Parser
@@ -137,6 +138,11 @@ Changelog
Unreleased
++++++++++
+0.8.4 (2024-04-05)
+++++++++++++++++++
+
+- Add basic support for Python 3.13
+
0.8.3 (2021-11-30)
++++++++++++++++++
diff --git a/contrib/python/parso/py3/parso/__init__.py b/contrib/python/parso/py3/parso/__init__.py
index 0cceabedca5..354aff5c252 100644
--- a/contrib/python/parso/py3/parso/__init__.py
+++ b/contrib/python/parso/py3/parso/__init__.py
@@ -43,7 +43,7 @@ from parso.grammar import Grammar, load_grammar
from parso.utils import split_lines, python_bytes_to_unicode
-__version__ = '0.8.3'
+__version__ = '0.8.4'
def parse(code=None, **kwargs):
diff --git a/contrib/python/parso/py3/parso/grammar.py b/contrib/python/parso/py3/parso/grammar.py
index 1f81148682e..9d6f1a1ea09 100644
--- a/contrib/python/parso/py3/parso/grammar.py
+++ b/contrib/python/parso/py3/parso/grammar.py
@@ -107,14 +107,14 @@ class Grammar(Generic[_NodeT]):
if file_io is None:
if code is None:
- file_io = FileIO(path) # type: ignore
+ file_io = FileIO(path) # type: ignore[arg-type]
else:
file_io = KnownContentFileIO(path, code)
if cache and file_io.path is not None:
module_node = load_module(self._hashed, file_io, cache_path=cache_path)
if module_node is not None:
- return module_node # type: ignore
+ return module_node # type: ignore[no-any-return]
if code is None:
code = file_io.read()
@@ -133,7 +133,7 @@ class Grammar(Generic[_NodeT]):
module_node = module_cache_item.node
old_lines = module_cache_item.lines
if old_lines == lines:
- return module_node # type: ignore
+ return module_node # type: ignore[no-any-return]
new_node = self._diff_parser(
self._pgen_grammar, self._tokenizer, module_node
@@ -145,7 +145,7 @@ class Grammar(Generic[_NodeT]):
# Never pickle in pypy, it's slow as hell.
pickling=cache and not is_pypy,
cache_path=cache_path)
- return new_node # type: ignore
+ return new_node # type: ignore[no-any-return]
tokens = self._tokenizer(lines)
@@ -161,7 +161,7 @@ class Grammar(Generic[_NodeT]):
# Never pickle in pypy, it's slow as hell.
pickling=cache and not is_pypy,
cache_path=cache_path)
- return root_node # type: ignore
+ return root_node # type: ignore[no-any-return]
def _get_token_namespace(self):
ns = self._token_namespace
diff --git a/contrib/python/parso/py3/parso/pgen2/generator.py b/contrib/python/parso/py3/parso/pgen2/generator.py
index db6e1cb3261..30f0b546b8c 100644
--- a/contrib/python/parso/py3/parso/pgen2/generator.py
+++ b/contrib/python/parso/py3/parso/pgen2/generator.py
@@ -276,7 +276,7 @@ def generate_grammar(bnf_grammar: str, token_namespace) -> Grammar:
dfa_state.transitions[transition] = DFAPlan(next_dfa)
_calculate_tree_traversal(rule_to_dfas)
- return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) # type: ignore
+ return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) # type: ignore[arg-type]
def _make_transition(token_namespace, reserved_syntax_strings, label):
diff --git a/contrib/python/parso/py3/parso/python/errors.py b/contrib/python/parso/py3/parso/python/errors.py
index 5da046ab01f..09c5047b612 100644
--- a/contrib/python/parso/py3/parso/python/errors.py
+++ b/contrib/python/parso/py3/parso/python/errors.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import codecs
+import sys
import warnings
import re
from contextlib import contextmanager
@@ -33,7 +34,10 @@ def _get_rhs_name(node, version):
return "literal"
else:
if second.children[1] == ":" or second.children[0] == "**":
- return "dict display"
+ if version < (3, 10):
+ return "dict display"
+ else:
+ return "dict literal"
else:
return "set display"
elif (
@@ -47,7 +51,10 @@ def _get_rhs_name(node, version):
elif first == "[":
return "list"
elif first == "{" and second == "}":
- return "dict display"
+ if version < (3, 10):
+ return "dict display"
+ else:
+ return "dict literal"
elif first == "{" and len(node.children) > 2:
return "set display"
elif type_ == "keyword":
@@ -58,7 +65,10 @@ def _get_rhs_name(node, version):
else:
return str(node.value)
elif type_ == "operator" and node.value == "...":
- return "Ellipsis"
+ if version < (3, 10):
+ return "Ellipsis"
+ else:
+ return "ellipsis"
elif type_ == "comparison":
return "comparison"
elif type_ in ("string", "number", "strings"):
@@ -83,7 +93,10 @@ def _get_rhs_name(node, version):
or "_test" in type_
or type_ in ("term", "factor")
):
- return "operator"
+ if version < (3, 10):
+ return "operator"
+ else:
+ return "expression"
elif type_ == "star_expr":
return "starred"
elif type_ == "testlist_star_expr":
@@ -610,7 +623,10 @@ class _NameChecks(SyntaxRule):
@ErrorFinder.register_rule(type='string')
class _StringChecks(SyntaxRule):
- message = "bytes can only contain ASCII literal characters."
+ if sys.version_info < (3, 10):
+ message = "bytes can only contain ASCII literal characters."
+ else:
+ message = "bytes can only contain ASCII literal characters"
def is_issue(self, leaf):
string_prefix = leaf.string_prefix.lower()
@@ -1043,14 +1059,20 @@ class _CheckAssignmentRule(SyntaxRule):
error = 'literal'
else:
if second.children[1] == ':':
- error = 'dict display'
+ if self._normalizer.version < (3, 10):
+ error = 'dict display'
+ else:
+ error = 'dict literal'
else:
error = 'set display'
elif first == "{" and second == "}":
if self._normalizer.version < (3, 8):
error = 'literal'
else:
- error = "dict display"
+ if self._normalizer.version < (3, 10):
+ error = "dict display"
+ else:
+ error = "dict literal"
elif first == "{" and len(node.children) > 2:
if self._normalizer.version < (3, 8):
error = 'literal'
@@ -1083,7 +1105,10 @@ class _CheckAssignmentRule(SyntaxRule):
error = str(node.value)
elif type_ == 'operator':
if node.value == '...':
- error = 'Ellipsis'
+ if self._normalizer.version < (3, 10):
+ error = 'Ellipsis'
+ else:
+ error = 'ellipsis'
elif type_ == 'comparison':
error = 'comparison'
elif type_ in ('string', 'number', 'strings'):
@@ -1098,7 +1123,10 @@ class _CheckAssignmentRule(SyntaxRule):
if node.children[0] == 'await':
error = 'await expression'
elif node.children[-2] == '**':
- error = 'operator'
+ if self._normalizer.version < (3, 10):
+ error = 'operator'
+ else:
+ error = 'expression'
else:
# Has a trailer
trailer = node.children[-1]
@@ -1120,7 +1148,10 @@ class _CheckAssignmentRule(SyntaxRule):
elif ('expr' in type_ and type_ != 'star_expr' # is a substring
or '_test' in type_
or type_ in ('term', 'factor')):
- error = 'operator'
+ if self._normalizer.version < (3, 10):
+ error = 'operator'
+ else:
+ error = 'expression'
elif type_ == "star_expr":
if is_deletion:
if self._normalizer.version >= (3, 9):
diff --git a/contrib/python/parso/py3/parso/python/grammar313.txt b/contrib/python/parso/py3/parso/python/grammar313.txt
new file mode 100644
index 00000000000..f092050d881
--- /dev/null
+++ b/contrib/python/parso/py3/parso/python/grammar313.txt
@@ -0,0 +1,169 @@
+# Grammar for Python
+
+# NOTE WELL: You should also follow all the steps listed at
+# https://devguide.python.org/grammar/
+
+# Start symbols for the grammar:
+# single_input is a single interactive statement;
+# file_input is a module or sequence of commands read from an input file;
+# eval_input is the input for the eval() functions.
+# NB: compound_stmt in single_input is followed by extra NEWLINE!
+single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
+file_input: stmt* ENDMARKER
+eval_input: testlist NEWLINE* ENDMARKER
+
+decorator: '@' namedexpr_test NEWLINE
+decorators: decorator+
+decorated: decorators (classdef | funcdef | async_funcdef)
+
+async_funcdef: 'async' funcdef
+funcdef: 'def' NAME parameters ['->' test] ':' suite
+
+parameters: '(' [typedargslist] ')'
+typedargslist: (
+ (tfpdef ['=' test] (',' tfpdef ['=' test])* ',' '/' [',' [ tfpdef ['=' test] (
+ ',' tfpdef ['=' test])* ([',' [
+ '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]]
+ | '**' tfpdef [',']]])
+ | '*' [tfpdef] (',' tfpdef ['=' test])* ([',' ['**' tfpdef [',']]])
+ | '**' tfpdef [',']]] )
+| (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' [
+ '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]]
+ | '**' tfpdef [',']]]
+ | '*' [tfpdef] (',' tfpdef ['=' test])* [',' ['**' tfpdef [',']]]
+ | '**' tfpdef [','])
+)
+tfpdef: NAME [':' test]
+varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
+ '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']]]
+ | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' [
+ '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']]]
+ | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]]
+ | '**' vfpdef [',']
+)
+vfpdef: NAME
+
+stmt: simple_stmt | compound_stmt | NEWLINE
+simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
+small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt |
+ import_stmt | global_stmt | nonlocal_stmt | assert_stmt)
+expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
+ ('=' (yield_expr|testlist_star_expr))*)
+annassign: ':' test ['=' (yield_expr|testlist_star_expr)]
+testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
+augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
+ '<<=' | '>>=' | '**=' | '//=')
+# For normal and annotated assignments, additional restrictions enforced by the interpreter
+del_stmt: 'del' exprlist
+pass_stmt: 'pass'
+flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
+break_stmt: 'break'
+continue_stmt: 'continue'
+return_stmt: 'return' [testlist_star_expr]
+yield_stmt: yield_expr
+raise_stmt: 'raise' [test ['from' test]]
+import_stmt: import_name | import_from
+import_name: 'import' dotted_as_names
+# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
+import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+)
+ 'import' ('*' | '(' import_as_names ')' | import_as_names))
+import_as_name: NAME ['as' NAME]
+dotted_as_name: dotted_name ['as' NAME]
+import_as_names: import_as_name (',' import_as_name)* [',']
+dotted_as_names: dotted_as_name (',' dotted_as_name)*
+dotted_name: NAME ('.' NAME)*
+global_stmt: 'global' NAME (',' NAME)*
+nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
+assert_stmt: 'assert' test [',' test]
+
+compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
+async_stmt: 'async' (funcdef | with_stmt | for_stmt)
+if_stmt: 'if' namedexpr_test ':' suite ('elif' namedexpr_test ':' suite)* ['else' ':' suite]
+while_stmt: 'while' namedexpr_test ':' suite ['else' ':' suite]
+for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
+try_stmt: ('try' ':' suite
+ ((except_clause ':' suite)+
+ ['else' ':' suite]
+ ['finally' ':' suite] |
+ 'finally' ':' suite))
+with_stmt: 'with' with_item (',' with_item)* ':' suite
+with_item: test ['as' expr]
+# NB compile.c makes sure that the default except clause is last
+except_clause: 'except' [test ['as' NAME]]
+suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
+
+namedexpr_test: test [':=' test]
+test: or_test ['if' or_test 'else' test] | lambdef
+lambdef: 'lambda' [varargslist] ':' test
+or_test: and_test ('or' and_test)*
+and_test: not_test ('and' not_test)*
+not_test: 'not' not_test | comparison
+comparison: expr (comp_op expr)*
+# <> isn't actually a valid comparison operator in Python. It's here for the
+# sake of a __future__ import described in PEP 401 (which really works :-)
+comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+star_expr: '*' expr
+expr: xor_expr ('|' xor_expr)*
+xor_expr: and_expr ('^' and_expr)*
+and_expr: shift_expr ('&' shift_expr)*
+shift_expr: arith_expr (('<<'|'>>') arith_expr)*
+arith_expr: term (('+'|'-') term)*
+term: factor (('*'|'@'|'/'|'%'|'//') factor)*
+factor: ('+'|'-'|'~') factor | power
+power: atom_expr ['**' factor]
+atom_expr: ['await'] atom trailer*
+atom: ('(' [yield_expr|testlist_comp] ')' |
+ '[' [testlist_comp] ']' |
+ '{' [dictorsetmaker] '}' |
+ NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False')
+testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
+trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+subscriptlist: subscript (',' subscript)* [',']
+subscript: test [':=' test] | [test] ':' [test] [sliceop]
+sliceop: ':' [test]
+exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
+testlist: test (',' test)* [',']
+dictorsetmaker: ( ((test ':' test | '**' expr)
+ (comp_for | (',' (test ':' test | '**' expr))* [','])) |
+ ((test [':=' test] | star_expr)
+ (comp_for | (',' (test [':=' test] | star_expr))* [','])) )
+
+classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
+
+arglist: argument (',' argument)* [',']
+
+# The reason that keywords are test nodes instead of NAME is that using NAME
+# results in an ambiguity. ast.c makes sure it's a NAME.
+# "test '=' test" is really "keyword '=' test", but we have no such token.
+# These need to be in a single rule to avoid grammar that is ambiguous
+# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
+# we explicitly match '*' here, too, to give it proper precedence.
+# Illegal combinations and orderings are blocked in ast.c:
+# multiple (test comp_for) arguments are blocked; keyword unpackings
+# that precede iterable unpackings are blocked; etc.
+argument: ( test [comp_for] |
+ test ':=' test |
+ test '=' test |
+ '**' test |
+ '*' test )
+
+comp_iter: comp_for | comp_if
+sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
+comp_for: ['async'] sync_comp_for
+comp_if: 'if' or_test [comp_iter]
+
+# not used in grammar, but may appear in "node" passed from Parser to Compiler
+encoding_decl: NAME
+
+yield_expr: 'yield' [yield_arg]
+yield_arg: 'from' test | testlist_star_expr
+
+strings: (STRING | fstring)+
+fstring: FSTRING_START fstring_content* FSTRING_END
+fstring_content: FSTRING_STRING | fstring_expr
+fstring_conversion: '!' NAME
+fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
+fstring_format_spec: ':' fstring_content*
diff --git a/contrib/python/parso/py3/parso/python/tree.py b/contrib/python/parso/py3/parso/python/tree.py
index ebb4087030d..0624e6755d6 100644
--- a/contrib/python/parso/py3/parso/python/tree.py
+++ b/contrib/python/parso/py3/parso/python/tree.py
@@ -295,6 +295,8 @@ class FStringEnd(PythonLeaf):
class _StringComparisonMixin:
+ __slots__ = ()
+
def __eq__(self, other):
"""
Make comparisons with strings easy.
@@ -544,6 +546,7 @@ class Function(ClassOrFunc):
4. annotation (if present)
"""
type = 'funcdef'
+ __slots__ = ()
def __init__(self, children):
super().__init__(children)
diff --git a/contrib/python/parso/py3/tests/test_python_errors.py b/contrib/python/parso/py3/tests/test_python_errors.py
index fe43a301ad0..b4986d33f6b 100644
--- a/contrib/python/parso/py3/tests/test_python_errors.py
+++ b/contrib/python/parso/py3/tests/test_python_errors.py
@@ -1,6 +1,7 @@
"""
Testing if parso finds syntax errors and indentation errors.
"""
+import re
import sys
import warnings
@@ -136,6 +137,28 @@ def _get_actual_exception(code):
wanted = 'SyntaxError: invalid syntax'
elif wanted == "SyntaxError: f-string: single '}' is not allowed":
wanted = 'SyntaxError: invalid syntax'
+ elif "Maybe you meant '==' instead of '='?" in wanted:
+ wanted = wanted.removesuffix(" here. Maybe you meant '==' instead of '='?")
+ elif re.match(
+ r"SyntaxError: unterminated string literal \(detected at line \d+\)", wanted
+ ):
+ wanted = "SyntaxError: EOL while scanning string literal"
+ elif re.match(
+ r"SyntaxError: unterminated triple-quoted string literal \(detected at line \d+\)",
+ wanted,
+ ):
+ wanted = 'SyntaxError: EOF while scanning triple-quoted string literal'
+ elif wanted == 'SyntaxError: cannot use starred expression here':
+ wanted = "SyntaxError: can't use starred expression here"
+ elif wanted == 'SyntaxError: f-string: cannot use starred expression here':
+ wanted = "SyntaxError: f-string: can't use starred expression here"
+ elif re.match(
+ r"IndentationError: expected an indented block after '[^']*' statement on line \d",
+ wanted,
+ ):
+ wanted = 'IndentationError: expected an indented block'
+ elif wanted == 'SyntaxError: unterminated string literal':
+ wanted = 'SyntaxError: EOL while scanning string literal'
return [wanted], line_nr
diff --git a/contrib/python/parso/py3/ya.make b/contrib/python/parso/py3/ya.make
index fa4210f7c5d..4a388e26b2a 100644
--- a/contrib/python/parso/py3/ya.make
+++ b/contrib/python/parso/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(0.8.3)
+VERSION(0.8.4)
LICENSE(PSF-2.0)
@@ -41,6 +41,7 @@ RESOURCE_FILES(
parso/python/grammar310.txt
parso/python/grammar311.txt
parso/python/grammar312.txt
+ parso/python/grammar313.txt
parso/python/grammar36.txt
parso/python/grammar37.txt
parso/python/grammar38.txt
diff --git a/contrib/python/typing-extensions/py3/.dist-info/METADATA b/contrib/python/typing-extensions/py3/.dist-info/METADATA
index 13d06e24b78..5089b4ddde4 100644
--- a/contrib/python/typing-extensions/py3/.dist-info/METADATA
+++ b/contrib/python/typing-extensions/py3/.dist-info/METADATA
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: typing_extensions
-Version: 4.10.0
+Version: 4.11.0
Summary: Backported and Experimental Type Hints for Python 3.8+
Keywords: annotations,backport,checker,checking,function,hinting,hints,type,typechecking,typehinting,typehints,typing
Author-email: "Guido van Rossum, Jukka Lehtosalo, Ɓukasz Langa, Michael Lee" <levkivskyi@gmail.com>
diff --git a/contrib/python/typing-extensions/py3/typing_extensions.py b/contrib/python/typing-extensions/py3/typing_extensions.py
index f3132ea4ae1..9ccd519ce4f 100644
--- a/contrib/python/typing-extensions/py3/typing_extensions.py
+++ b/contrib/python/typing-extensions/py3/typing_extensions.py
@@ -147,27 +147,6 @@ class _Sentinel:
_marker = _Sentinel()
-def _check_generic(cls, parameters, elen=_marker):
- """Check correct count for parameters of a generic cls (internal helper).
- This gives a nice error message in case of count mismatch.
- """
- if not elen:
- raise TypeError(f"{cls} is not a generic class")
- if elen is _marker:
- if not hasattr(cls, "__parameters__") or not cls.__parameters__:
- raise TypeError(f"{cls} is not a generic class")
- elen = len(cls.__parameters__)
- alen = len(parameters)
- if alen != elen:
- if hasattr(cls, "__parameters__"):
- parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
- num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
- if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
- return
- raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
- f" actual {alen}, expected {elen}")
-
-
if sys.version_info >= (3, 10):
def _should_collect_from_parameters(t):
return isinstance(
@@ -181,27 +160,6 @@ else:
return isinstance(t, typing._GenericAlias) and not t._special
-def _collect_type_vars(types, typevar_types=None):
- """Collect all type variable contained in types in order of
- first appearance (lexicographic order). For example::
-
- _collect_type_vars((T, List[S, T])) == (T, S)
- """
- if typevar_types is None:
- typevar_types = typing.TypeVar
- tvars = []
- for t in types:
- if (
- isinstance(t, typevar_types) and
- t not in tvars and
- not _is_unpack(t)
- ):
- tvars.append(t)
- if _should_collect_from_parameters(t):
- tvars.extend([t for t in t.__parameters__ if t not in tvars])
- return tuple(tvars)
-
-
NoReturn = typing.NoReturn
# Some unconstrained type variables. These are used by the container types.
@@ -834,7 +792,11 @@ def _ensure_subclassable(mro_entries):
return inner
-if hasattr(typing, "ReadOnly"):
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
@@ -845,7 +807,8 @@ if hasattr(typing, "ReadOnly"):
# Aaaand on 3.12 we add __orig_bases__ to TypedDict
# to enable better runtime introspection.
# On 3.13 we deprecate some odd ways of creating TypedDicts.
- # PEP 705 proposes adding the ReadOnly[] qualifier.
+ # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+ # PEP 728 (still pending) makes more changes.
TypedDict = typing.TypedDict
_TypedDictMeta = typing._TypedDictMeta
is_typeddict = typing.is_typeddict
@@ -1122,15 +1085,15 @@ else:
return val
-if hasattr(typing, "Required"): # 3.11+
+if hasattr(typing, "ReadOnly"): # 3.13+
get_type_hints = typing.get_type_hints
-else: # <=3.10
+else: # <=3.13
# replaces _strip_annotations()
def _strip_extras(t):
"""Strips Annotated, Required and NotRequired from a given type."""
if isinstance(t, _AnnotatedAlias):
return _strip_extras(t.__origin__)
- if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
+ if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
return _strip_extras(t.__args__[0])
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
@@ -2689,9 +2652,151 @@ else:
# counting generic parameters, so that when we subscript a generic,
# the runtime doesn't try to substitute the Unpack with the subscripted type.
if not hasattr(typing, "TypeVarTuple"):
+ def _check_generic(cls, parameters, elen=_marker):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ if elen is _marker:
+ if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ elen = len(cls.__parameters__)
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+ num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+ if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+ return
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if getattr(parameters[alen], '__default__', None) is not None:
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', None)
+ is not None for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+ # Python 3.11+
+
+ def _check_generic(cls, parameters, elen):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if getattr(parameters[alen], '__default__', None) is not None:
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', None)
+ is not None for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+
+typing._check_generic = _check_generic
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+ def _collect_type_vars(types, typevar_types=None):
+ """Collect all type variable contained in types in order of
+ first appearance (lexicographic order). For example::
+
+ _collect_type_vars((T, List[S, T])) == (T, S)
+ """
+ if typevar_types is None:
+ typevar_types = typing.TypeVar
+ tvars = []
+ # required TypeVarLike cannot appear after TypeVarLike with default
+ default_encountered = False
+ for t in types:
+ if (
+ isinstance(t, typevar_types) and
+ t not in tvars and
+ not _is_unpack(t)
+ ):
+ if getattr(t, '__default__', None) is not None:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ tvars.append(t)
+ if _should_collect_from_parameters(t):
+ tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ return tuple(tvars)
+
typing._collect_type_vars = _collect_type_vars
- typing._check_generic = _check_generic
+else:
+ def _collect_parameters(args):
+ """Collect all type variables and parameter specifications in args
+ in order of first appearance (lexicographic order).
+
+ For example::
+
+ assert _collect_parameters((T, Callable[P, T])) == (T, P)
+ """
+ parameters = []
+ # required TypeVarLike cannot appear after TypeVarLike with default
+ default_encountered = False
+ for t in args:
+ if isinstance(t, type):
+ # We don't want __parameters__ descriptor of a bare Python class.
+ pass
+ elif isinstance(t, tuple):
+ # `t` might be a tuple, when `ParamSpec` is substituted with
+ # `[T, int]`, or `[int, *Ts]`, etc.
+ for x in t:
+ for collected in _collect_parameters([x]):
+ if collected not in parameters:
+ parameters.append(collected)
+ elif hasattr(t, '__typing_subst__'):
+ if t not in parameters:
+ if getattr(t, '__default__', None) is not None:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ parameters.append(t)
+ else:
+ for x in getattr(t, '__parameters__', ()):
+ if x not in parameters:
+ parameters.append(x)
+
+ return tuple(parameters)
+ typing._collect_parameters = _collect_parameters
# Backport typing.NamedTuple as it exists in Python 3.13.
# In 3.11, the ability to define generic `NamedTuple`s was supported.
diff --git a/contrib/python/typing-extensions/py3/ya.make b/contrib/python/typing-extensions/py3/ya.make
index 6a099000e49..293ccb585ea 100644
--- a/contrib/python/typing-extensions/py3/ya.make
+++ b/contrib/python/typing-extensions/py3/ya.make
@@ -2,7 +2,7 @@
PY3_LIBRARY()
-VERSION(4.10.0)
+VERSION(4.11.0)
LICENSE(PSF-2.0)