summaryrefslogtreecommitdiffstats
path: root/contrib/tools/python3/Lib
diff options
context:
space:
mode:
authorshadchin <[email protected]>2025-02-16 15:28:01 +0300
committershadchin <[email protected]>2025-02-16 16:03:50 +0300
commita27b6a96fdc5ca444428ddef4823d0486dcdccb9 (patch)
treeadde5c24d9ea37e1634e7972e27e682a820ab941 /contrib/tools/python3/Lib
parent7a3958c3c6de324baab9dba4bd4eb808c1b839a6 (diff)
Update Python 3 to 3.12.9
commit_hash:c8651982d81e18f18e037fb247cc6ae53c4fa7f1
Diffstat (limited to 'contrib/tools/python3/Lib')
-rw-r--r--contrib/tools/python3/Lib/_pydatetime.py1
-rw-r--r--contrib/tools/python3/Lib/_pydecimal.py4
-rw-r--r--contrib/tools/python3/Lib/_strptime.py2
-rw-r--r--contrib/tools/python3/Lib/ast.py11
-rw-r--r--contrib/tools/python3/Lib/asyncio/base_events.py11
-rw-r--r--contrib/tools/python3/Lib/asyncio/locks.py2
-rw-r--r--contrib/tools/python3/Lib/asyncio/runners.py1
-rw-r--r--contrib/tools/python3/Lib/asyncio/selector_events.py13
-rw-r--r--contrib/tools/python3/Lib/asyncio/staggered.py72
-rw-r--r--contrib/tools/python3/Lib/asyncio/taskgroups.py21
-rw-r--r--contrib/tools/python3/Lib/bdb.py64
-rw-r--r--contrib/tools/python3/Lib/dis.py4
-rw-r--r--contrib/tools/python3/Lib/email/_header_value_parser.py19
-rw-r--r--contrib/tools/python3/Lib/email/message.py8
-rw-r--r--contrib/tools/python3/Lib/functools.py3
-rw-r--r--contrib/tools/python3/Lib/http/client.py4
-rw-r--r--contrib/tools/python3/Lib/http/cookies.py6
-rw-r--r--contrib/tools/python3/Lib/imaplib.py11
-rw-r--r--contrib/tools/python3/Lib/importlib/resources/_common.py5
-rw-r--r--contrib/tools/python3/Lib/linecache.py13
-rw-r--r--contrib/tools/python3/Lib/multiprocessing/connection.py2
-rw-r--r--contrib/tools/python3/Lib/multiprocessing/resource_tracker.py7
-rw-r--r--contrib/tools/python3/Lib/multiprocessing/synchronize.py2
-rwxr-xr-xcontrib/tools/python3/Lib/pdb.py1
-rwxr-xr-xcontrib/tools/python3/Lib/platform.py3
-rw-r--r--contrib/tools/python3/Lib/poplib.py2
-rwxr-xr-xcontrib/tools/python3/Lib/pydoc.py39
-rw-r--r--contrib/tools/python3/Lib/pydoc_data/topics.py38
-rw-r--r--contrib/tools/python3/Lib/socket.py4
-rw-r--r--contrib/tools/python3/Lib/socketserver.py7
-rw-r--r--contrib/tools/python3/Lib/subprocess.py22
-rw-r--r--contrib/tools/python3/Lib/sysconfig.py3
-rw-r--r--contrib/tools/python3/Lib/threading.py2
-rw-r--r--contrib/tools/python3/Lib/tokenize.py14
-rw-r--r--contrib/tools/python3/Lib/typing.py6
-rw-r--r--contrib/tools/python3/Lib/urllib/parse.py20
-rw-r--r--contrib/tools/python3/Lib/urllib/request.py4
-rw-r--r--contrib/tools/python3/Lib/urllib/robotparser.py2
-rw-r--r--contrib/tools/python3/Lib/xml/dom/xmlbuilder.py12
-rw-r--r--contrib/tools/python3/Lib/ya.make4
-rw-r--r--contrib/tools/python3/Lib/zipfile/__init__.py9
41 files changed, 326 insertions, 152 deletions
diff --git a/contrib/tools/python3/Lib/_pydatetime.py b/contrib/tools/python3/Lib/_pydatetime.py
index ad6292e1e41..fc43cf0bba3 100644
--- a/contrib/tools/python3/Lib/_pydatetime.py
+++ b/contrib/tools/python3/Lib/_pydatetime.py
@@ -2313,7 +2313,6 @@ datetime.resolution = timedelta(microseconds=1)
def _isoweek1monday(year):
# Helper to calculate the day number of the Monday starting week 1
- # XXX This could be done more efficiently
THURSDAY = 3
firstday = _ymd2ord(year, 1, 1)
firstweekday = (firstday + 6) % 7 # See weekday() above
diff --git a/contrib/tools/python3/Lib/_pydecimal.py b/contrib/tools/python3/Lib/_pydecimal.py
index 75df3db2624..ff80180a79e 100644
--- a/contrib/tools/python3/Lib/_pydecimal.py
+++ b/contrib/tools/python3/Lib/_pydecimal.py
@@ -97,7 +97,7 @@ class DecimalException(ArithmeticError):
Used exceptions derive from this.
If an exception derives from another exception besides this (such as
- Underflow (Inexact, Rounded, Subnormal) that indicates that it is only
+ Underflow (Inexact, Rounded, Subnormal)) that indicates that it is only
called if the others are present. This isn't actually used for
anything, though.
@@ -145,7 +145,7 @@ class InvalidOperation(DecimalException):
x ** (+-)INF
An operand is invalid
- The result of the operation after these is a quiet positive NaN,
+ The result of the operation after this is a quiet positive NaN,
except when the cause is a signaling NaN, in which case the result is
also a quiet NaN, but with the original sign, and an optional
diagnostic information.
diff --git a/contrib/tools/python3/Lib/_strptime.py b/contrib/tools/python3/Lib/_strptime.py
index dfd2bc5d8b4..5d9df2b12f8 100644
--- a/contrib/tools/python3/Lib/_strptime.py
+++ b/contrib/tools/python3/Lib/_strptime.py
@@ -300,8 +300,6 @@ class TimeRE(dict):
'V': r"(?P<V>5[0-3]|0[1-9]|[1-4]\d|\d)",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
- #XXX: Does 'Y' need to worry about having less or more than
- # 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
diff --git a/contrib/tools/python3/Lib/ast.py b/contrib/tools/python3/Lib/ast.py
index b0995fa7f10..6d9785cc48e 100644
--- a/contrib/tools/python3/Lib/ast.py
+++ b/contrib/tools/python3/Lib/ast.py
@@ -1246,9 +1246,14 @@ class _Unparser(NodeVisitor):
fallback_to_repr = True
break
quote_types = new_quote_types
- elif "\n" in value:
- quote_types = [q for q in quote_types if q in _MULTI_QUOTES]
- assert quote_types
+ else:
+ if "\n" in value:
+ quote_types = [q for q in quote_types if q in _MULTI_QUOTES]
+ assert quote_types
+
+ new_quote_types = [q for q in quote_types if q not in value]
+ if new_quote_types:
+ quote_types = new_quote_types
new_fstring_parts.append(value)
if fallback_to_repr:
diff --git a/contrib/tools/python3/Lib/asyncio/base_events.py b/contrib/tools/python3/Lib/asyncio/base_events.py
index 3146f7f3f65..136c1631822 100644
--- a/contrib/tools/python3/Lib/asyncio/base_events.py
+++ b/contrib/tools/python3/Lib/asyncio/base_events.py
@@ -466,7 +466,12 @@ class BaseEventLoop(events.AbstractEventLoop):
tasks._set_task_name(task, name)
- return task
+ try:
+ return task
+ finally:
+ # gh-128552: prevent a refcycle of
+ # task.exception().__traceback__->BaseEventLoop.create_task->task
+ del task
def set_task_factory(self, factory):
"""Set a task factory that will be used by loop.create_task().
@@ -1550,7 +1555,9 @@ class BaseEventLoop(events.AbstractEventLoop):
if reuse_address:
sock.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
- if reuse_port:
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if reuse_port and af in (socket.AF_INET, socket.AF_INET6):
_set_reuseport(sock)
# Disable IPv4/IPv6 dual stack support (enabled by
# default on Linux) which makes a single socket
diff --git a/contrib/tools/python3/Lib/asyncio/locks.py b/contrib/tools/python3/Lib/asyncio/locks.py
index ce5d8d5bfb2..588dca6c0e1 100644
--- a/contrib/tools/python3/Lib/asyncio/locks.py
+++ b/contrib/tools/python3/Lib/asyncio/locks.py
@@ -454,7 +454,7 @@ class Barrier(mixins._LoopBoundMixin):
def __init__(self, parties):
"""Create a barrier, initialised to 'parties' tasks."""
if parties < 1:
- raise ValueError('parties must be > 0')
+ raise ValueError('parties must be >= 1')
self._cond = Condition() # notify all tasks when state changes
diff --git a/contrib/tools/python3/Lib/asyncio/runners.py b/contrib/tools/python3/Lib/asyncio/runners.py
index 1b89236599a..102ae78021b 100644
--- a/contrib/tools/python3/Lib/asyncio/runners.py
+++ b/contrib/tools/python3/Lib/asyncio/runners.py
@@ -168,6 +168,7 @@ def run(main, *, debug=None, loop_factory=None):
running in the same thread.
If debug is True, the event loop will be run in debug mode.
+ If loop_factory is passed, it is used for new event loop creation.
This function always creates a new event loop and closes it at the end.
It should be used as a main entry point for asyncio programs, and should
diff --git a/contrib/tools/python3/Lib/asyncio/selector_events.py b/contrib/tools/python3/Lib/asyncio/selector_events.py
index 790711f8340..160ed6ca13e 100644
--- a/contrib/tools/python3/Lib/asyncio/selector_events.py
+++ b/contrib/tools/python3/Lib/asyncio/selector_events.py
@@ -1183,15 +1183,19 @@ class _SelectorSocketTransport(_SelectorTransport):
# If the entire buffer couldn't be written, register a write handler
if self._buffer:
self._loop._add_writer(self._sock_fd, self._write_ready)
+ self._maybe_pause_protocol()
def can_write_eof(self):
return True
def _call_connection_lost(self, exc):
- super()._call_connection_lost(exc)
- if self._empty_waiter is not None:
- self._empty_waiter.set_exception(
- ConnectionError("Connection is closed by peer"))
+ try:
+ super()._call_connection_lost(exc)
+ finally:
+ self._write_ready = None
+ if self._empty_waiter is not None:
+ self._empty_waiter.set_exception(
+ ConnectionError("Connection is closed by peer"))
def _make_empty_waiter(self):
if self._empty_waiter is not None:
@@ -1206,7 +1210,6 @@ class _SelectorSocketTransport(_SelectorTransport):
def close(self):
self._read_ready_cb = None
- self._write_ready = None
super().close()
diff --git a/contrib/tools/python3/Lib/asyncio/staggered.py b/contrib/tools/python3/Lib/asyncio/staggered.py
index 0f4df8855a8..0afed64fdf9 100644
--- a/contrib/tools/python3/Lib/asyncio/staggered.py
+++ b/contrib/tools/python3/Lib/asyncio/staggered.py
@@ -66,8 +66,27 @@ async def staggered_race(coro_fns, delay, *, loop=None):
enum_coro_fns = enumerate(coro_fns)
winner_result = None
winner_index = None
+ unhandled_exceptions = []
exceptions = []
- running_tasks = []
+ running_tasks = set()
+ on_completed_fut = None
+
+ def task_done(task):
+ running_tasks.discard(task)
+ if (
+ on_completed_fut is not None
+ and not on_completed_fut.done()
+ and not running_tasks
+ ):
+ on_completed_fut.set_result(None)
+
+ if task.cancelled():
+ return
+
+ exc = task.exception()
+ if exc is None:
+ return
+ unhandled_exceptions.append(exc)
async def run_one_coro(ok_to_start, previous_failed) -> None:
# in eager tasks this waits for the calling task to append this task
@@ -91,11 +110,11 @@ async def staggered_race(coro_fns, delay, *, loop=None):
this_failed = locks.Event()
next_ok_to_start = locks.Event()
next_task = loop.create_task(run_one_coro(next_ok_to_start, this_failed))
- running_tasks.append(next_task)
+ running_tasks.add(next_task)
+ next_task.add_done_callback(task_done)
# next_task has been appended to running_tasks so next_task is ok to
# start.
next_ok_to_start.set()
- assert len(running_tasks) == this_index + 2
# Prepare place to put this coroutine's exceptions if not won
exceptions.append(None)
assert len(exceptions) == this_index + 1
@@ -120,31 +139,36 @@ async def staggered_race(coro_fns, delay, *, loop=None):
# up as done() == True, cancelled() == False, exception() ==
# asyncio.CancelledError. This behavior is specified in
# https://bugs.python.org/issue30048
- for i, t in enumerate(running_tasks):
- if i != this_index:
+ current_task = tasks.current_task(loop)
+ for t in running_tasks:
+ if t is not current_task:
t.cancel()
- ok_to_start = locks.Event()
- first_task = loop.create_task(run_one_coro(ok_to_start, None))
- running_tasks.append(first_task)
- # first_task has been appended to running_tasks so first_task is ok to start.
- ok_to_start.set()
+ propagate_cancellation_error = None
try:
- # Wait for a growing list of tasks to all finish: poor man's version of
- # curio's TaskGroup or trio's nursery
- done_count = 0
- while done_count != len(running_tasks):
- done, _ = await tasks.wait(running_tasks)
- done_count = len(done)
+ ok_to_start = locks.Event()
+ first_task = loop.create_task(run_one_coro(ok_to_start, None))
+ running_tasks.add(first_task)
+ first_task.add_done_callback(task_done)
+ # first_task has been appended to running_tasks so first_task is ok to start.
+ ok_to_start.set()
+ propagate_cancellation_error = None
+ # Make sure no tasks are left running if we leave this function
+ while running_tasks:
+ on_completed_fut = loop.create_future()
+ try:
+ await on_completed_fut
+ except exceptions_mod.CancelledError as ex:
+ propagate_cancellation_error = ex
+ for task in running_tasks:
+ task.cancel(*ex.args)
+ on_completed_fut = None
+ if __debug__ and unhandled_exceptions:
# If run_one_coro raises an unhandled exception, it's probably a
# programming error, and I want to see it.
- if __debug__:
- for d in done:
- if d.done() and not d.cancelled() and d.exception():
- raise d.exception()
+ raise ExceptionGroup("staggered race failed", unhandled_exceptions)
+ if propagate_cancellation_error is not None:
+ raise propagate_cancellation_error
return winner_result, winner_index, exceptions
finally:
- del exceptions
- # Make sure no tasks are left running if we leave this function
- for t in running_tasks:
- t.cancel()
+ del exceptions, propagate_cancellation_error, unhandled_exceptions
diff --git a/contrib/tools/python3/Lib/asyncio/taskgroups.py b/contrib/tools/python3/Lib/asyncio/taskgroups.py
index aada3ffa8e0..b2b953b0938 100644
--- a/contrib/tools/python3/Lib/asyncio/taskgroups.py
+++ b/contrib/tools/python3/Lib/asyncio/taskgroups.py
@@ -185,15 +185,20 @@ class TaskGroup:
else:
task = self._loop.create_task(coro, context=context)
tasks._set_task_name(task, name)
- # optimization: Immediately call the done callback if the task is
+
+ # Always schedule the done callback even if the task is
# already done (e.g. if the coro was able to complete eagerly),
- # and skip scheduling a done callback
- if task.done():
- self._on_task_done(task)
- else:
- self._tasks.add(task)
- task.add_done_callback(self._on_task_done)
- return task
+ # otherwise if the task completes with an exception then it will cancel
+ # the current task too early. gh-128550, gh-128588
+
+ self._tasks.add(task)
+ task.add_done_callback(self._on_task_done)
+ try:
+ return task
+ finally:
+ # gh-128552: prevent a refcycle of
+ # task.exception().__traceback__->TaskGroup.create_task->task
+ del task
# Since Python 3.8 Tasks propagate all exceptions correctly,
# except for KeyboardInterrupt and SystemExit which are
diff --git a/contrib/tools/python3/Lib/bdb.py b/contrib/tools/python3/Lib/bdb.py
index 196e6b178cb..085c17ce05d 100644
--- a/contrib/tools/python3/Lib/bdb.py
+++ b/contrib/tools/python3/Lib/bdb.py
@@ -3,6 +3,7 @@
import fnmatch
import sys
import os
+from contextlib import contextmanager
from inspect import CO_GENERATOR, CO_COROUTINE, CO_ASYNC_GENERATOR
__all__ = ["BdbQuit", "Bdb", "Breakpoint"]
@@ -33,6 +34,7 @@ class Bdb:
self.breaks = {}
self.fncache = {}
self.frame_returning = None
+ self.enterframe = None
self._load_breaks()
@@ -60,6 +62,12 @@ class Bdb:
self.botframe = None
self._set_stopinfo(None, None)
+ @contextmanager
+ def set_enterframe(self, frame):
+ self.enterframe = frame
+ yield
+ self.enterframe = None
+
def trace_dispatch(self, frame, event, arg):
"""Dispatch a trace function for debugged frames based on the event.
@@ -84,24 +92,26 @@ class Bdb:
The arg parameter depends on the previous event.
"""
- if self.quitting:
- return # None
- if event == 'line':
- return self.dispatch_line(frame)
- if event == 'call':
- return self.dispatch_call(frame, arg)
- if event == 'return':
- return self.dispatch_return(frame, arg)
- if event == 'exception':
- return self.dispatch_exception(frame, arg)
- if event == 'c_call':
- return self.trace_dispatch
- if event == 'c_exception':
- return self.trace_dispatch
- if event == 'c_return':
+
+ with self.set_enterframe(frame):
+ if self.quitting:
+ return # None
+ if event == 'line':
+ return self.dispatch_line(frame)
+ if event == 'call':
+ return self.dispatch_call(frame, arg)
+ if event == 'return':
+ return self.dispatch_return(frame, arg)
+ if event == 'exception':
+ return self.dispatch_exception(frame, arg)
+ if event == 'c_call':
+ return self.trace_dispatch
+ if event == 'c_exception':
+ return self.trace_dispatch
+ if event == 'c_return':
+ return self.trace_dispatch
+ print('bdb.Bdb.dispatch: unknown debugging event:', repr(event))
return self.trace_dispatch
- print('bdb.Bdb.dispatch: unknown debugging event:', repr(event))
- return self.trace_dispatch
def dispatch_line(self, frame):
"""Invoke user function and return trace function for line event.
@@ -332,14 +342,16 @@ class Bdb:
If frame is not specified, debugging starts from caller's frame.
"""
+ sys.settrace(None)
if frame is None:
frame = sys._getframe().f_back
self.reset()
- while frame:
- frame.f_trace = self.trace_dispatch
- self.botframe = frame
- frame = frame.f_back
- self.set_step()
+ with self.set_enterframe(frame):
+ while frame:
+ frame.f_trace = self.trace_dispatch
+ self.botframe = frame
+ frame = frame.f_back
+ self.set_step()
sys.settrace(self.trace_dispatch)
def set_continue(self):
@@ -394,6 +406,14 @@ class Bdb:
return 'Line %s:%d does not exist' % (filename, lineno)
self._add_to_breaks(filename, lineno)
bp = Breakpoint(filename, lineno, temporary, cond, funcname)
+ # After we set a new breakpoint, we need to search through all frames
+ # and set f_trace to trace_dispatch if there could be a breakpoint in
+ # that frame.
+ frame = self.enterframe
+ while frame:
+ if self.break_anywhere(frame):
+ frame.f_trace = self.trace_dispatch
+ frame = frame.f_back
return None
def _load_breaks(self):
diff --git a/contrib/tools/python3/Lib/dis.py b/contrib/tools/python3/Lib/dis.py
index 320dec03d25..b1069c82528 100644
--- a/contrib/tools/python3/Lib/dis.py
+++ b/contrib/tools/python3/Lib/dis.py
@@ -790,12 +790,12 @@ class Bytecode:
return output.getvalue()
-def main():
+def main(args=None):
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
- args = parser.parse_args()
+ args = parser.parse_args(args=args)
with args.infile as infile:
source = infile.read()
code = compile(source, args.infile.name, "exec")
diff --git a/contrib/tools/python3/Lib/email/_header_value_parser.py b/contrib/tools/python3/Lib/email/_header_value_parser.py
index ec2215a5e5f..3d845c09d41 100644
--- a/contrib/tools/python3/Lib/email/_header_value_parser.py
+++ b/contrib/tools/python3/Lib/email/_header_value_parser.py
@@ -95,8 +95,16 @@ EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
NLSET = {'\n', '\r'}
SPECIALSNL = SPECIALS | NLSET
+
+def make_quoted_pairs(value):
+ """Escape dquote and backslash for use within a quoted-string."""
+ return str(value).replace('\\', '\\\\').replace('"', '\\"')
+
+
def quote_string(value):
- return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
+ escaped = make_quoted_pairs(value)
+ return f'"{escaped}"'
+
# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
rfc2047_matcher = re.compile(r'''
@@ -2905,6 +2913,15 @@ def _refold_parse_tree(parse_tree, *, policy):
if not hasattr(part, 'encode'):
# It's not a terminal, try folding the subparts.
newparts = list(part)
+ if part.token_type == 'bare-quoted-string':
+ # To fold a quoted string we need to create a list of terminal
+ # tokens that will render the leading and trailing quotes
+ # and use quoted pairs in the value as appropriate.
+ newparts = (
+ [ValueTerminal('"', 'ptext')] +
+ [ValueTerminal(make_quoted_pairs(p), 'ptext')
+ for p in newparts] +
+ [ValueTerminal('"', 'ptext')])
if not part.as_ew_allowed:
wrap_as_ew_blocked += 1
newparts.append(end_ew_not_allowed)
diff --git a/contrib/tools/python3/Lib/email/message.py b/contrib/tools/python3/Lib/email/message.py
index 46bb8c21942..6b7c3a23777 100644
--- a/contrib/tools/python3/Lib/email/message.py
+++ b/contrib/tools/python3/Lib/email/message.py
@@ -286,8 +286,12 @@ class Message:
if i is not None and not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
payload = self._payload
- # cte might be a Header, so for now stringify it.
- cte = str(self.get('content-transfer-encoding', '')).lower()
+ cte = self.get('content-transfer-encoding', '')
+ if hasattr(cte, 'cte'):
+ cte = cte.cte
+ else:
+ # cte might be a Header, so for now stringify it.
+ cte = str(cte).strip().lower()
# payload may be bytes here.
if not decode:
if isinstance(payload, str) and utils._has_surrogates(payload):
diff --git a/contrib/tools/python3/Lib/functools.py b/contrib/tools/python3/Lib/functools.py
index f6849899e75..6025032bfaa 100644
--- a/contrib/tools/python3/Lib/functools.py
+++ b/contrib/tools/python3/Lib/functools.py
@@ -340,6 +340,9 @@ class partial:
self.args = args
self.keywords = kwds
+ __class_getitem__ = classmethod(GenericAlias)
+
+
try:
from _functools import partial
except ImportError:
diff --git a/contrib/tools/python3/Lib/http/client.py b/contrib/tools/python3/Lib/http/client.py
index a353716a850..fb29923d942 100644
--- a/contrib/tools/python3/Lib/http/client.py
+++ b/contrib/tools/python3/Lib/http/client.py
@@ -472,7 +472,7 @@ class HTTPResponse(io.BufferedIOBase):
if self.chunked:
return self._read_chunked(amt)
- if amt is not None:
+ if amt is not None and amt >= 0:
if self.length is not None and amt > self.length:
# clip the read to the "end of response"
amt = self.length
@@ -590,6 +590,8 @@ class HTTPResponse(io.BufferedIOBase):
def _read_chunked(self, amt=None):
assert self.chunked != _UNKNOWN
+ if amt is not None and amt < 0:
+ amt = None
value = []
try:
while (chunk_left := self._get_chunk_left()) is not None:
diff --git a/contrib/tools/python3/Lib/http/cookies.py b/contrib/tools/python3/Lib/http/cookies.py
index 6b9ed24ad8e..57791c6ab08 100644
--- a/contrib/tools/python3/Lib/http/cookies.py
+++ b/contrib/tools/python3/Lib/http/cookies.py
@@ -424,9 +424,11 @@ _CookiePattern = re.compile(r"""
( # Optional group: there may not be a value.
\s*=\s* # Equal Sign
(?P<val> # Start of group 'val'
- "(?:[^\\"]|\\.)*" # Any doublequoted string
+ "(?:[^\\"]|\\.)*" # Any double-quoted string
| # or
- \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr
+ # Special case for "expires" attr
+ (\w{3,6}day|\w{3}),\s # Day of the week or abbreviated day
+ [\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Date and time in specific format
| # or
[""" + _LegalValueChars + r"""]* # Any word or empty string
) # End of group 'val'
diff --git a/contrib/tools/python3/Lib/imaplib.py b/contrib/tools/python3/Lib/imaplib.py
index 577b4b9b03a..e337fe64710 100644
--- a/contrib/tools/python3/Lib/imaplib.py
+++ b/contrib/tools/python3/Lib/imaplib.py
@@ -52,6 +52,9 @@ AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first
# search command can be quite large, so we now use 1M.
_MAXLINE = 1000000
+# Data larger than this will be read in chunks, to prevent extreme
+# overallocation.
+_SAFE_BUF_SIZE = 1 << 20
# Commands
@@ -315,7 +318,13 @@ class IMAP4:
def read(self, size):
"""Read 'size' bytes from remote."""
- return self.file.read(size)
+ cursize = min(size, _SAFE_BUF_SIZE)
+ data = self.file.read(cursize)
+ while cursize < size and len(data) == cursize:
+ delta = min(cursize, size - cursize)
+ data += self.file.read(delta)
+ cursize += delta
+ return data
def readline(self):
diff --git a/contrib/tools/python3/Lib/importlib/resources/_common.py b/contrib/tools/python3/Lib/importlib/resources/_common.py
index a3902535342..a85df4b399f 100644
--- a/contrib/tools/python3/Lib/importlib/resources/_common.py
+++ b/contrib/tools/python3/Lib/importlib/resources/_common.py
@@ -93,12 +93,13 @@ def _infer_caller():
"""
def is_this_file(frame_info):
- return frame_info.filename == __file__
+ return frame_info.filename == stack[0].filename
def is_wrapper(frame_info):
return frame_info.function == 'wrapper'
- not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
+ stack = inspect.stack()
+ not_this_file = itertools.filterfalse(is_this_file, stack)
# also exclude 'wrapper' due to singledispatch in the call stack
callers = itertools.filterfalse(is_wrapper, not_this_file)
return next(callers).frame
diff --git a/contrib/tools/python3/Lib/linecache.py b/contrib/tools/python3/Lib/linecache.py
index 05eb49d3b05..06eea3c94f3 100644
--- a/contrib/tools/python3/Lib/linecache.py
+++ b/contrib/tools/python3/Lib/linecache.py
@@ -54,14 +54,17 @@ def checkcache(filename=None):
(This is not checked upon each call!)"""
if filename is None:
- filenames = list(cache.keys())
- elif filename in cache:
- filenames = [filename]
+ # get keys atomically
+ filenames = cache.copy().keys()
else:
- return
+ filenames = [filename]
for filename in filenames:
- entry = cache[filename]
+ try:
+ entry = cache[filename]
+ except KeyError:
+ continue
+
if len(entry) == 1:
# lazy cache entry, leave it lazy.
continue
diff --git a/contrib/tools/python3/Lib/multiprocessing/connection.py b/contrib/tools/python3/Lib/multiprocessing/connection.py
index fdbc3bda7db..81ed2ae51d1 100644
--- a/contrib/tools/python3/Lib/multiprocessing/connection.py
+++ b/contrib/tools/python3/Lib/multiprocessing/connection.py
@@ -846,7 +846,7 @@ _MD5_DIGEST_LEN = 16
_LEGACY_LENGTHS = (_MD5ONLY_MESSAGE_LENGTH, _MD5_DIGEST_LEN)
-def _get_digest_name_and_payload(message: bytes) -> (str, bytes):
+def _get_digest_name_and_payload(message): # type: (bytes) -> tuple[str, bytes]
"""Returns a digest name and the payload for a response hash.
If a legacy protocol is detected based on the message length
diff --git a/contrib/tools/python3/Lib/multiprocessing/resource_tracker.py b/contrib/tools/python3/Lib/multiprocessing/resource_tracker.py
index 79e96ecf324..23fea295c35 100644
--- a/contrib/tools/python3/Lib/multiprocessing/resource_tracker.py
+++ b/contrib/tools/python3/Lib/multiprocessing/resource_tracker.py
@@ -142,13 +142,14 @@ class ResourceTracker(object):
# that can make the child die before it registers signal handlers
# for SIGINT and SIGTERM. The mask is unregistered after spawning
# the child.
+ prev_sigmask = None
try:
if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
+ prev_sigmask = signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
pid = util.spawnv_passfds(exe, args, fds_to_pass)
finally:
- if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
+ if prev_sigmask is not None:
+ signal.pthread_sigmask(signal.SIG_SETMASK, prev_sigmask)
except:
os.close(w)
raise
diff --git a/contrib/tools/python3/Lib/multiprocessing/synchronize.py b/contrib/tools/python3/Lib/multiprocessing/synchronize.py
index 0f682b9a094..870c91349b9 100644
--- a/contrib/tools/python3/Lib/multiprocessing/synchronize.py
+++ b/contrib/tools/python3/Lib/multiprocessing/synchronize.py
@@ -360,7 +360,7 @@ class Event(object):
return True
return False
- def __repr__(self) -> str:
+ def __repr__(self):
set_status = 'set' if self.is_set() else 'unset'
return f"<{type(self).__qualname__} at {id(self):#x} {set_status}>"
#
diff --git a/contrib/tools/python3/Lib/pdb.py b/contrib/tools/python3/Lib/pdb.py
index 1e1b5ea4f0a..2a6e994dac1 100755
--- a/contrib/tools/python3/Lib/pdb.py
+++ b/contrib/tools/python3/Lib/pdb.py
@@ -281,6 +281,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
if hasattr(self, 'curframe') and self.curframe:
self.curframe.f_globals.pop('__pdb_convenience_variables', None)
self.curframe = None
+ self.curframe_locals = {}
self.tb_lineno.clear()
def setup(self, f, tb):
diff --git a/contrib/tools/python3/Lib/platform.py b/contrib/tools/python3/Lib/platform.py
index c5b60480369..b86e6834911 100755
--- a/contrib/tools/python3/Lib/platform.py
+++ b/contrib/tools/python3/Lib/platform.py
@@ -348,7 +348,8 @@ _WIN32_CLIENT_RELEASES = [
]
_WIN32_SERVER_RELEASES = [
- ((10, 1, 0), "post2022Server"),
+ ((10, 1, 0), "post2025Server"),
+ ((10, 0, 26100), "2025Server"),
((10, 0, 20348), "2022Server"),
((10, 0, 17763), "2019Server"),
((6, 4, 0), "2016Server"),
diff --git a/contrib/tools/python3/Lib/poplib.py b/contrib/tools/python3/Lib/poplib.py
index 9a5ef03c983..81b01385987 100644
--- a/contrib/tools/python3/Lib/poplib.py
+++ b/contrib/tools/python3/Lib/poplib.py
@@ -309,7 +309,7 @@ class POP3:
# optional commands:
def rpop(self, user):
- """Not sure what this does."""
+ """Send RPOP command to access the mailbox with an alternate user."""
return self._shortcmd('RPOP %s' % user)
diff --git a/contrib/tools/python3/Lib/pydoc.py b/contrib/tools/python3/Lib/pydoc.py
index e3745e5453b..9dfa87b2a82 100755
--- a/contrib/tools/python3/Lib/pydoc.py
+++ b/contrib/tools/python3/Lib/pydoc.py
@@ -54,6 +54,7 @@ Richard Chamberlain, for the first implementation of textdoc.
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
+import ast
import __future__
import builtins
import importlib._bootstrap
@@ -346,21 +347,29 @@ def ispackage(path):
return False
def source_synopsis(file):
- line = file.readline()
- while line[:1] == '#' or not line.strip():
- line = file.readline()
- if not line: break
- line = line.strip()
- if line[:4] == 'r"""': line = line[1:]
- if line[:3] == '"""':
- line = line[3:]
- if line[-1:] == '\\': line = line[:-1]
- while not line.strip():
- line = file.readline()
- if not line: break
- result = line.split('"""')[0].strip()
- else: result = None
- return result
+ """Return the one-line summary of a file object, if present"""
+
+ string = ''
+ try:
+ tokens = tokenize.generate_tokens(file.readline)
+ for tok_type, tok_string, _, _, _ in tokens:
+ if tok_type == tokenize.STRING:
+ string += tok_string
+ elif tok_type == tokenize.NEWLINE:
+ with warnings.catch_warnings():
+ # Ignore the "invalid escape sequence" warning.
+ warnings.simplefilter("ignore", SyntaxWarning)
+ docstring = ast.literal_eval(string)
+ if not isinstance(docstring, str):
+ return None
+ return docstring.strip().split('\n')[0].strip()
+ elif tok_type == tokenize.OP and tok_string in ('(', ')'):
+ string += tok_string
+ elif tok_type not in (tokenize.COMMENT, tokenize.NL, tokenize.ENCODING):
+ return None
+ except (tokenize.TokenError, UnicodeDecodeError, SyntaxError):
+ return None
+ return None
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
diff --git a/contrib/tools/python3/Lib/pydoc_data/topics.py b/contrib/tools/python3/Lib/pydoc_data/topics.py
index 12523999ca8..66204433c0e 100644
--- a/contrib/tools/python3/Lib/pydoc_data/topics.py
+++ b/contrib/tools/python3/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Tue Dec 3 19:41:14 2024
+# Autogenerated by Sphinx on Tue Feb 4 15:37:58 2025
# as part of the release process.
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
@@ -2769,15 +2769,18 @@ topics = {'assert': 'The "assert" statement\n'
' enter = type(manager).__enter__\n'
' exit = type(manager).__exit__\n'
' value = enter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
' TARGET = value\n'
' SUITE\n'
' except:\n'
+ ' hit_except = True\n'
' if not exit(manager, *sys.exc_info()):\n'
' raise\n'
- ' else:\n'
- ' exit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
'\n'
'With more than one item, the context managers are processed as '
'if\n'
@@ -3578,8 +3581,11 @@ topics = {'assert': 'The "assert" statement\n'
'defparameter)* ["," [parameter_list_starargs]]\n'
' | parameter_list_starargs\n'
' parameter_list_starargs ::= "*" [star_parameter] ("," '
- 'defparameter)* ["," ["**" parameter [","]]]\n'
- ' | "**" parameter [","]\n'
+ 'defparameter)* ["," [parameter_star_kwargs]]\n'
+ ' "*" ("," defparameter)+ ["," '
+ '[parameter_star_kwargs]]\n'
+ ' | parameter_star_kwargs\n'
+ ' parameter_star_kwargs ::= "**" parameter [","]\n'
' parameter ::= identifier [":" expression]\n'
' star_parameter ::= identifier [":" ["*"] '
'expression]\n'
@@ -6908,8 +6914,12 @@ topics = {'assert': 'The "assert" statement\n'
'trailing zeros are not removed from the result.\n'
'\n'
'The "\',\'" option signals the use of a comma for a '
- 'thousands separator.\n'
- 'For a locale aware separator, use the "\'n\'" integer '
+ 'thousands separator\n'
+ 'for floating-point presentation types and for integer '
+ 'presentation\n'
+ 'type "\'d\'". For other presentation types, this option is '
+ 'an error. For\n'
+ 'a locale aware separator, use the "\'n\'" integer '
'presentation type\n'
'instead.\n'
'\n'
@@ -7417,8 +7427,11 @@ topics = {'assert': 'The "assert" statement\n'
'defparameter)* ["," [parameter_list_starargs]]\n'
' | parameter_list_starargs\n'
' parameter_list_starargs ::= "*" [star_parameter] ("," '
- 'defparameter)* ["," ["**" parameter [","]]]\n'
- ' | "**" parameter [","]\n'
+ 'defparameter)* ["," [parameter_star_kwargs]]\n'
+ ' "*" ("," defparameter)+ ["," '
+ '[parameter_star_kwargs]]\n'
+ ' | parameter_star_kwargs\n'
+ ' parameter_star_kwargs ::= "**" parameter [","]\n'
' parameter ::= identifier [":" expression]\n'
' star_parameter ::= identifier [":" ["*"] '
'expression]\n'
@@ -16976,15 +16989,18 @@ topics = {'assert': 'The "assert" statement\n'
' enter = type(manager).__enter__\n'
' exit = type(manager).__exit__\n'
' value = enter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
' TARGET = value\n'
' SUITE\n'
' except:\n'
+ ' hit_except = True\n'
' if not exit(manager, *sys.exc_info()):\n'
' raise\n'
- ' else:\n'
- ' exit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
'\n'
'With more than one item, the context managers are processed as if\n'
'multiple "with" statements were nested:\n'
diff --git a/contrib/tools/python3/Lib/socket.py b/contrib/tools/python3/Lib/socket.py
index c1880c4ea51..91782b30ae8 100644
--- a/contrib/tools/python3/Lib/socket.py
+++ b/contrib/tools/python3/Lib/socket.py
@@ -932,7 +932,9 @@ def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
# Fail later on bind(), for platforms which may not
# support this option.
pass
- if reuse_port:
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if reuse_port and family in (AF_INET, AF_INET6):
sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
if has_ipv6 and family == AF_INET6:
if dualstack_ipv6:
diff --git a/contrib/tools/python3/Lib/socketserver.py b/contrib/tools/python3/Lib/socketserver.py
index cd028ef1c63..35b2723de3b 100644
--- a/contrib/tools/python3/Lib/socketserver.py
+++ b/contrib/tools/python3/Lib/socketserver.py
@@ -468,7 +468,12 @@ class TCPServer(BaseServer):
"""
if self.allow_reuse_address and hasattr(socket, "SO_REUSEADDR"):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- if self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT"):
+ # Since Linux 6.12.9, SO_REUSEPORT is not allowed
+ # on other address families than AF_INET/AF_INET6.
+ if (
+ self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT")
+ and self.address_family in (socket.AF_INET, socket.AF_INET6)
+ ):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
diff --git a/contrib/tools/python3/Lib/subprocess.py b/contrib/tools/python3/Lib/subprocess.py
index 1d17ae3608a..3ec39ca3e61 100644
--- a/contrib/tools/python3/Lib/subprocess.py
+++ b/contrib/tools/python3/Lib/subprocess.py
@@ -43,10 +43,8 @@ getstatusoutput(...): Runs a command in the shell, waits for it to complete,
import builtins
import errno
import io
-import locale
import os
import time
-import signal
import sys
import threading
import warnings
@@ -138,6 +136,8 @@ class CalledProcessError(SubprocessError):
def __str__(self):
if self.returncode and self.returncode < 0:
+ # Lazy import to improve module import time
+ import signal
try:
return "Command '%s' died with %r." % (
self.cmd, signal.Signals(-self.returncode))
@@ -375,12 +375,14 @@ def _text_encoding():
if sys.flags.utf8_mode:
return "utf-8"
else:
+ # Lazy import to improve module import time
+ import locale
return locale.getencoding()
def call(*popenargs, timeout=None, **kwargs):
"""Run command with arguments. Wait for command to complete or
- timeout, then return the returncode attribute.
+ for timeout seconds, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
@@ -517,8 +519,8 @@ def run(*popenargs,
in the returncode attribute, and output & stderr attributes if those streams
were captured.
- If timeout is given, and the process takes too long, a TimeoutExpired
- exception will be raised.
+ If timeout (seconds) is given and the process takes too long,
+ a TimeoutExpired exception will be raised.
There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
@@ -1655,6 +1657,9 @@ class Popen:
# Don't signal a process that we know has already died.
if self.returncode is not None:
return
+
+ # Lazy import to improve module import time
+ import signal
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
@@ -1759,6 +1764,9 @@ class Popen:
kwargs = {}
if restore_signals:
+ # Lazy import to improve module import time
+ import signal
+
# See _Py_RestoreSignals() in Python/pylifecycle.c
sigset = []
for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
@@ -2208,9 +2216,13 @@ class Popen:
def terminate(self):
"""Terminate the process with SIGTERM
"""
+ # Lazy import to improve module import time
+ import signal
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
+ # Lazy import to improve module import time
+ import signal
self.send_signal(signal.SIGKILL)
diff --git a/contrib/tools/python3/Lib/sysconfig.py b/contrib/tools/python3/Lib/sysconfig.py
index 9bb81e7842c..168f42871b9 100644
--- a/contrib/tools/python3/Lib/sysconfig.py
+++ b/contrib/tools/python3/Lib/sysconfig.py
@@ -765,7 +765,8 @@ def get_platform():
solaris-2.6-sun4u
Windows will return one of:
- win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-amd64 (64-bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
+ win-arm64 (64-bit Windows on ARM64 (aka AArch64)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
diff --git a/contrib/tools/python3/Lib/threading.py b/contrib/tools/python3/Lib/threading.py
index 0bba85d08a0..064c74d40f3 100644
--- a/contrib/tools/python3/Lib/threading.py
+++ b/contrib/tools/python3/Lib/threading.py
@@ -686,7 +686,7 @@ class Barrier:
"""
if parties < 1:
- raise ValueError("parties must be > 0")
+ raise ValueError("parties must be >= 1")
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
diff --git a/contrib/tools/python3/Lib/tokenize.py b/contrib/tools/python3/Lib/tokenize.py
index b2dff8e6967..553c1ca4388 100644
--- a/contrib/tools/python3/Lib/tokenize.py
+++ b/contrib/tools/python3/Lib/tokenize.py
@@ -320,16 +320,10 @@ def untokenize(iterable):
with at least two elements, a token number and token value. If
only two tokens are passed, the resulting output is poor.
- Round-trip invariant for full input:
- Untokenized source will match input source exactly
-
- Round-trip invariant for limited input:
- # Output bytes will tokenize back to the input
- t1 = [tok[:2] for tok in tokenize(f.readline)]
- newcode = untokenize(t1)
- readline = BytesIO(newcode).readline
- t2 = [tok[:2] for tok in tokenize(readline)]
- assert t1 == t2
+ The result is guaranteed to tokenize back to match the input so
+ that the conversion is lossless and round-trips are assured.
+ The guarantee applies only to the token type and token string as
+ the spacing between tokens (column positions) may change.
"""
ut = Untokenizer()
out = ut.untokenize(iterable)
diff --git a/contrib/tools/python3/Lib/typing.py b/contrib/tools/python3/Lib/typing.py
index a271416d46c..d7f96b60f03 100644
--- a/contrib/tools/python3/Lib/typing.py
+++ b/contrib/tools/python3/Lib/typing.py
@@ -1610,12 +1610,16 @@ class _UnionGenericAlias(_NotIterable, _GenericAlias, _root=True):
return super().__repr__()
def __instancecheck__(self, obj):
- return self.__subclasscheck__(type(obj))
+ for arg in self.__args__:
+ if isinstance(obj, arg):
+ return True
+ return False
def __subclasscheck__(self, cls):
for arg in self.__args__:
if issubclass(cls, arg):
return True
+ return False
def __reduce__(self):
func, (origin, args) = super().__reduce__()
diff --git a/contrib/tools/python3/Lib/urllib/parse.py b/contrib/tools/python3/Lib/urllib/parse.py
index 24815952037..c72138a33ca 100644
--- a/contrib/tools/python3/Lib/urllib/parse.py
+++ b/contrib/tools/python3/Lib/urllib/parse.py
@@ -436,6 +436,23 @@ def _checknetloc(netloc):
raise ValueError("netloc '" + netloc + "' contains invalid " +
"characters under NFKC normalization")
+def _check_bracketed_netloc(netloc):
+ # Note that this function must mirror the splitting
+ # done in NetlocResultMixins._hostinfo().
+ hostname_and_port = netloc.rpartition('@')[2]
+ before_bracket, have_open_br, bracketed = hostname_and_port.partition('[')
+ if have_open_br:
+ # No data is allowed before a bracket.
+ if before_bracket:
+ raise ValueError("Invalid IPv6 URL")
+ hostname, _, port = bracketed.partition(']')
+ # No data is allowed after the bracket but before the port delimiter.
+ if port and not port.startswith(":"):
+ raise ValueError("Invalid IPv6 URL")
+ else:
+ hostname, _, port = hostname_and_port.partition(':')
+ _check_bracketed_host(hostname)
+
# Valid bracketed hosts are defined in
# https://www.rfc-editor.org/rfc/rfc3986#page-49 and https://url.spec.whatwg.org/
def _check_bracketed_host(hostname):
@@ -496,8 +513,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if '[' in netloc and ']' in netloc:
- bracketed_host = netloc.partition('[')[2].partition(']')[0]
- _check_bracketed_host(bracketed_host)
+ _check_bracketed_netloc(netloc)
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
diff --git a/contrib/tools/python3/Lib/urllib/request.py b/contrib/tools/python3/Lib/urllib/request.py
index 9a559f44152..c7ded0f67fc 100644
--- a/contrib/tools/python3/Lib/urllib/request.py
+++ b/contrib/tools/python3/Lib/urllib/request.py
@@ -903,9 +903,9 @@ class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm):
- def __init__(self, *args, **kwargs):
+ def __init__(self):
self.authenticated = {}
- super().__init__(*args, **kwargs)
+ super().__init__()
def add_password(self, realm, uri, user, passwd, is_authenticated=False):
self.update_authenticated(uri, is_authenticated)
diff --git a/contrib/tools/python3/Lib/urllib/robotparser.py b/contrib/tools/python3/Lib/urllib/robotparser.py
index c58565e3945..409f2b2e48d 100644
--- a/contrib/tools/python3/Lib/urllib/robotparser.py
+++ b/contrib/tools/python3/Lib/urllib/robotparser.py
@@ -11,6 +11,7 @@
"""
import collections
+import urllib.error
import urllib.parse
import urllib.request
@@ -65,6 +66,7 @@ class RobotFileParser:
self.disallow_all = True
elif err.code >= 400 and err.code < 500:
self.allow_all = True
+ err.close()
else:
raw = f.read()
self.parse(raw.decode("utf-8").splitlines())
diff --git a/contrib/tools/python3/Lib/xml/dom/xmlbuilder.py b/contrib/tools/python3/Lib/xml/dom/xmlbuilder.py
index 8a200263497..a8852625a2f 100644
--- a/contrib/tools/python3/Lib/xml/dom/xmlbuilder.py
+++ b/contrib/tools/python3/Lib/xml/dom/xmlbuilder.py
@@ -189,7 +189,7 @@ class DOMBuilder:
options.filter = self.filter
options.errorHandler = self.errorHandler
fp = input.byteStream
- if fp is None and options.systemId:
+ if fp is None and input.systemId:
import urllib.request
fp = urllib.request.urlopen(input.systemId)
return self._parse_bytestream(fp, options)
@@ -247,10 +247,12 @@ class DOMEntityResolver(object):
def _guess_media_encoding(self, source):
info = source.byteStream.info()
- if "Content-Type" in info:
- for param in info.getplist():
- if param.startswith("charset="):
- return param.split("=", 1)[1].lower()
+ # import email.message
+ # assert isinstance(info, email.message.Message)
+ charset = info.get_param('charset')
+ if charset is not None:
+ return charset.lower()
+ return None
class DOMInputSource(object):
diff --git a/contrib/tools/python3/Lib/ya.make b/contrib/tools/python3/Lib/ya.make
index ba53fefab39..2310e5fa992 100644
--- a/contrib/tools/python3/Lib/ya.make
+++ b/contrib/tools/python3/Lib/ya.make
@@ -4,9 +4,9 @@ ENABLE(PYBUILD_NO_PY)
PY3_LIBRARY()
-VERSION(3.12.8)
+VERSION(3.12.9)
-ORIGINAL_SOURCE(https://github.com/python/cpython/archive/v3.12.8.tar.gz)
+ORIGINAL_SOURCE(https://github.com/python/cpython/archive/v3.12.9.tar.gz)
LICENSE(Python-2.0)
diff --git a/contrib/tools/python3/Lib/zipfile/__init__.py b/contrib/tools/python3/Lib/zipfile/__init__.py
index cf71c6dba2b..91b2e032e5f 100644
--- a/contrib/tools/python3/Lib/zipfile/__init__.py
+++ b/contrib/tools/python3/Lib/zipfile/__init__.py
@@ -794,7 +794,10 @@ class _SharedFile:
raise ValueError("Can't reposition in the ZIP file while "
"there is an open writing handle on it. "
"Close the writing handle before trying to read.")
- self._file.seek(offset, whence)
+ if whence == os.SEEK_CUR:
+ self._file.seek(self._pos + offset)
+ else:
+ self._file.seek(offset, whence)
self._pos = self._file.tell()
return self._pos
@@ -1137,13 +1140,15 @@ class ZipExtFile(io.BufferedIOBase):
self._offset = buff_offset
read_offset = 0
# Fast seek uncompressed unencrypted file
- elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset > 0:
+ elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset != 0:
# disable CRC checking after first seeking - it would be invalid
self._expected_crc = None
# seek actual file taking already buffered data into account
read_offset -= len(self._readbuffer) - self._offset
self._fileobj.seek(read_offset, os.SEEK_CUR)
self._left -= read_offset
+ self._compress_left -= read_offset
+ self._eof = self._left <= 0
read_offset = 0
# flush read buffer
self._readbuffer = b''