diff options
| author | robot-piglet <[email protected]> | 2024-11-12 07:54:50 +0300 | 
|---|---|---|
| committer | robot-piglet <[email protected]> | 2024-11-12 08:05:59 +0300 | 
| commit | 55cec9f6b0618fb3570fc8ef66aad151f4932591 (patch) | |
| tree | 9198c2ca0b0305269062c3674ce79f19c4990e65 /contrib/python | |
| parent | b77b1fbf262ea4f40e33a60ce32c4db4e5e49015 (diff) | |
Intermediate changes
commit_hash:c229701a8b4f4d9ee57ce1ed763099d862d53fa6
Diffstat (limited to 'contrib/python')
32 files changed, 1458 insertions, 1428 deletions
diff --git a/contrib/python/Twisted/py3/.dist-info/METADATA b/contrib/python/Twisted/py3/.dist-info/METADATA index 22f3987e612..838ec650c53 100644 --- a/contrib/python/Twisted/py3/.dist-info/METADATA +++ b/contrib/python/Twisted/py3/.dist-info/METADATA @@ -1,14 +1,16 @@  Metadata-Version: 2.3  Name: Twisted -Version: 24.7.0 +Version: 24.10.0  Summary: An asynchronous networking framework written in Python  Project-URL: Changelog, https://github.com/twisted/twisted/blob/HEAD/NEWS.rst -Project-URL: Documentation, https://docs.twistedmatrix.com/ -Project-URL: Homepage, https://twistedmatrix.com/ -Project-URL: Issues, https://twistedmatrix.com/trac/report +Project-URL: Documentation, https://docs.twisted.org/ +Project-URL: Homepage, https://twisted.org/ +Project-URL: Issues, https://github.com/twisted/twisted/issues  Project-URL: Source, https://github.com/twisted/twisted  Project-URL: Twitter, https://twitter.com/twistedmatrix -Author-email: Twisted Matrix Laboratories <[email protected]> +Project-URL: Funding-PSF, https://psfmember.org/civicrm/contribute/transact/?reset=1&id=44 +Project-URL: Funding-GitHub, https://github.com/sponsors/twisted +Author-email: Twisted Matrix Community <[email protected]>  License: MIT License  License-File: LICENSE  Classifier: Programming Language :: Python :: 3 @@ -18,9 +20,10 @@ Classifier: Programming Language :: Python :: 3.9  Classifier: Programming Language :: Python :: 3.10  Classifier: Programming Language :: Python :: 3.11  Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13  Requires-Python: >=3.8.0 -Requires-Dist: attrs>=21.3.0 -Requires-Dist: automat>=0.8.0 +Requires-Dist: attrs>=22.2.0 +Requires-Dist: automat>=24.8.0  Requires-Dist: constantly>=15.1  Requires-Dist: hyperlink>=17.1.1  Requires-Dist: incremental>=24.7.0 @@ -31,7 +34,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'all-non-platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'all-non-platform'  Requires-Dist: cryptography>=3.3; extra == 'all-non-platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'all-non-platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'all-non-platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'all-non-platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'all-non-platform'  Requires-Dist: hypothesis>=6.56; extra == 'all-non-platform'  Requires-Dist: idna>=2.4; extra == 'all-non-platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'all-non-platform' @@ -45,7 +49,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'all_non_platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'all_non_platform'  Requires-Dist: cryptography>=3.3; extra == 'all_non_platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'all_non_platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'all_non_platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'all_non_platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'all_non_platform'  Requires-Dist: hypothesis>=6.56; extra == 'all_non_platform'  Requires-Dist: idna>=2.4; extra == 'all_non_platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'all_non_platform' @@ -61,6 +66,7 @@ Requires-Dist: cryptography>=3.3; extra == 'conch'  Provides-Extra: dev  Requires-Dist: coverage~=7.5; extra == 'dev'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'dev' +Requires-Dist: httpx[http2]>=0.27; extra == 'dev'  Requires-Dist: hypothesis>=6.56; extra == 'dev'  Requires-Dist: pydoctor~=23.9.0; extra == 'dev'  Requires-Dist: pyflakes~=2.2; extra == 'dev' @@ -85,7 +91,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'gtk-platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'gtk-platform'  Requires-Dist: cryptography>=3.3; extra == 'gtk-platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'gtk-platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'gtk-platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'gtk-platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'gtk-platform'  Requires-Dist: hypothesis>=6.56; extra == 'gtk-platform'  Requires-Dist: idna>=2.4; extra == 'gtk-platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'gtk-platform' @@ -100,7 +107,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'gtk_platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'gtk_platform'  Requires-Dist: cryptography>=3.3; extra == 'gtk_platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'gtk_platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'gtk_platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'gtk_platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'gtk_platform'  Requires-Dist: hypothesis>=6.56; extra == 'gtk_platform'  Requires-Dist: idna>=2.4; extra == 'gtk_platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'gtk_platform' @@ -111,14 +119,15 @@ Requires-Dist: pyserial>=3.0; extra == 'gtk_platform'  Requires-Dist: pywin32!=226; (platform_system == 'Windows') and extra == 'gtk_platform'  Requires-Dist: service-identity>=18.1.0; extra == 'gtk_platform'  Provides-Extra: http2 -Requires-Dist: h2<5.0,>=3.0; extra == 'http2' +Requires-Dist: h2<5.0,>=3.2; extra == 'http2'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'http2'  Provides-Extra: macos-platform  Requires-Dist: appdirs>=1.4.0; extra == 'macos-platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'macos-platform'  Requires-Dist: cryptography>=3.3; extra == 'macos-platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'macos-platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'macos-platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'macos-platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'macos-platform'  Requires-Dist: hypothesis>=6.56; extra == 'macos-platform'  Requires-Dist: idna>=2.4; extra == 'macos-platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'macos-platform' @@ -135,7 +144,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'macos_platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'macos_platform'  Requires-Dist: cryptography>=3.3; extra == 'macos_platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'macos_platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'macos_platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'macos_platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'macos_platform'  Requires-Dist: hypothesis>=6.56; extra == 'macos_platform'  Requires-Dist: idna>=2.4; extra == 'macos_platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'macos_platform' @@ -153,11 +163,12 @@ Requires-Dist: bcrypt>=3.1.3; extra == 'mypy'  Requires-Dist: coverage~=7.5; extra == 'mypy'  Requires-Dist: cryptography>=3.3; extra == 'mypy'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'mypy' -Requires-Dist: h2<5.0,>=3.0; extra == 'mypy' +Requires-Dist: h2<5.0,>=3.2; extra == 'mypy' +Requires-Dist: httpx[http2]>=0.27; extra == 'mypy'  Requires-Dist: hypothesis>=6.56; extra == 'mypy'  Requires-Dist: idna>=2.4; extra == 'mypy' -Requires-Dist: mypy-zope~=1.0.3; extra == 'mypy' -Requires-Dist: mypy~=1.8; extra == 'mypy' +Requires-Dist: mypy-zope==1.0.6; extra == 'mypy' +Requires-Dist: mypy==1.10.1; extra == 'mypy'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'mypy'  Requires-Dist: pydoctor~=23.9.0; extra == 'mypy'  Requires-Dist: pyflakes~=2.2; extra == 'mypy' @@ -178,7 +189,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'osx-platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'osx-platform'  Requires-Dist: cryptography>=3.3; extra == 'osx-platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'osx-platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'osx-platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'osx-platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'osx-platform'  Requires-Dist: hypothesis>=6.56; extra == 'osx-platform'  Requires-Dist: idna>=2.4; extra == 'osx-platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'osx-platform' @@ -195,7 +207,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'osx_platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'osx_platform'  Requires-Dist: cryptography>=3.3; extra == 'osx_platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'osx_platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'osx_platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'osx_platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'osx_platform'  Requires-Dist: hypothesis>=6.56; extra == 'osx_platform'  Requires-Dist: idna>=2.4; extra == 'osx_platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'osx_platform' @@ -212,6 +225,7 @@ Requires-Dist: pyserial>=3.0; extra == 'serial'  Requires-Dist: pywin32!=226; (platform_system == 'Windows') and extra == 'serial'  Provides-Extra: test  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'test' +Requires-Dist: httpx[http2]>=0.27; extra == 'test'  Requires-Dist: hypothesis>=6.56; extra == 'test'  Requires-Dist: pyhamcrest>=2; extra == 'test'  Provides-Extra: tls @@ -223,7 +237,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'windows-platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'windows-platform'  Requires-Dist: cryptography>=3.3; extra == 'windows-platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'windows-platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'windows-platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'windows-platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'windows-platform'  Requires-Dist: hypothesis>=6.56; extra == 'windows-platform'  Requires-Dist: idna>=2.4; extra == 'windows-platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'windows-platform' @@ -239,7 +254,8 @@ Requires-Dist: appdirs>=1.4.0; extra == 'windows_platform'  Requires-Dist: bcrypt>=3.1.3; extra == 'windows_platform'  Requires-Dist: cryptography>=3.3; extra == 'windows_platform'  Requires-Dist: cython-test-exception-raiser<2,>=1.0.2; extra == 'windows_platform' -Requires-Dist: h2<5.0,>=3.0; extra == 'windows_platform' +Requires-Dist: h2<5.0,>=3.2; extra == 'windows_platform' +Requires-Dist: httpx[http2]>=0.27; extra == 'windows_platform'  Requires-Dist: hypothesis>=6.56; extra == 'windows_platform'  Requires-Dist: idna>=2.4; extra == 'windows_platform'  Requires-Dist: priority<2.0,>=1.1.0; extra == 'windows_platform' @@ -263,6 +279,17 @@ Twisted  For information on changes in this release, see the `NEWS <https://github.com/twisted/twisted/blob/trunk/NEWS.rst>`_ file. +Sponsors +-------- + +Twisted is an MIT-licensed open source project with its ongoing development made possible entirely by the support of community and these awesome sponsors. +If you'd like to join them, please consider `sponsoring Twisted's <https://docs.twisted.org/en/latest/development/sponsorship.html>`_ development. + +|thinkst|_ + +|sftpplus|_ + +  What is this?  ------------- @@ -288,13 +315,13 @@ To install the latest version of Twisted using pip::    $ pip install twisted -Additional instructions for installing this software are in `the installation instructions <https://docs.twisted.org/en/latest/installations.rst>`_. +Additional instructions for installing this software are in `the installation instructions <https://docs.twisted.org/en/latest/installation.html>`_.  Documentation and Support  ------------------------- -Twisted's documentation is available from the `Twisted Matrix website <https://twistedmatrix.com/documents/current/>`_. +Twisted's documentation is available from the `Twisted Matrix Read The Docs website <https://docs.twisted.org/>`_.  This documentation contains how-tos, code examples, and an API reference.  Help is also available on the `Twisted mailing list <https://mail.python.org/mailman3/lists/twisted.python.org/>`_. @@ -329,7 +356,7 @@ Some of these tests may fail if you:  Static Code Checkers  -------------------- -You can ensure that code complies to Twisted `coding standards <https://twistedmatrix.com/documents/current/core/development/policy/coding-standard.html>`_:: +You can ensure that code complies to Twisted `coding standards <https://docs.twisted.org/en/latest/development/coding-standard.html>`_::    $ tox -e lint   # run pre-commit to check coding stanards    $ tox -e mypy   # run MyPy static type checker to check for type errors @@ -375,3 +402,11 @@ Again, see the included `LICENSE <https://github.com/twisted/twisted/blob/trunk/  .. |rtd| image:: https://readthedocs.org/projects/twisted/badge/?version=latest&style=flat  .. _rtd: https://docs.twistedmatrix.com + +.. |thinkst| image:: https://github.com/user-attachments/assets/a5b52432-2d18-4d91-a3c9-772fb2e02781 +    :alt: Thinkst Canary +.. _thinkst: https://thinkst.com/ + +.. |sftpplus| image:: https://github.com/user-attachments/assets/5f585316-c7e8-4ef1-8fbb-923f0756ceed +    :alt: SFTPPlus +.. _sftpplus: https://www.sftpplus.com/ diff --git a/contrib/python/Twisted/py3/README.rst b/contrib/python/Twisted/py3/README.rst index 1d2f85648cc..a30c266eb37 100644 --- a/contrib/python/Twisted/py3/README.rst +++ b/contrib/python/Twisted/py3/README.rst @@ -9,6 +9,17 @@ Twisted  For information on changes in this release, see the `NEWS <NEWS.rst>`_ file. +Sponsors +-------- + +Twisted is an MIT-licensed open source project with its ongoing development made possible entirely by the support of community and these awesome sponsors. +If you'd like to join them, please consider `sponsoring Twisted's <https://docs.twisted.org/en/latest/development/sponsorship.html>`_ development. + +|thinkst|_ + +|sftpplus|_ + +  What is this?  ------------- @@ -34,13 +45,13 @@ To install the latest version of Twisted using pip::    $ pip install twisted -Additional instructions for installing this software are in `the installation instructions <https://docs.twisted.org/en/latest/installations.rst>`_. +Additional instructions for installing this software are in `the installation instructions <https://docs.twisted.org/en/latest/installation.html>`_.  Documentation and Support  ------------------------- -Twisted's documentation is available from the `Twisted Matrix website <https://twistedmatrix.com/documents/current/>`_. +Twisted's documentation is available from the `Twisted Matrix Read The Docs website <https://docs.twisted.org/>`_.  This documentation contains how-tos, code examples, and an API reference.  Help is also available on the `Twisted mailing list <https://mail.python.org/mailman3/lists/twisted.python.org/>`_. @@ -75,7 +86,7 @@ Some of these tests may fail if you:  Static Code Checkers  -------------------- -You can ensure that code complies to Twisted `coding standards <https://twistedmatrix.com/documents/current/core/development/policy/coding-standard.html>`_:: +You can ensure that code complies to Twisted `coding standards <https://docs.twisted.org/en/latest/development/coding-standard.html>`_::    $ tox -e lint   # run pre-commit to check coding stanards    $ tox -e mypy   # run MyPy static type checker to check for type errors @@ -121,3 +132,11 @@ Again, see the included `LICENSE <LICENSE>`_ file for specific legal details.  .. |rtd| image:: https://readthedocs.org/projects/twisted/badge/?version=latest&style=flat  .. _rtd: https://docs.twistedmatrix.com + +.. |thinkst| image:: https://github.com/user-attachments/assets/a5b52432-2d18-4d91-a3c9-772fb2e02781 +    :alt: Thinkst Canary +.. _thinkst: https://thinkst.com/ + +.. |sftpplus| image:: https://github.com/user-attachments/assets/5f585316-c7e8-4ef1-8fbb-923f0756ceed +    :alt: SFTPPlus +.. _sftpplus: https://www.sftpplus.com/ diff --git a/contrib/python/Twisted/py3/twisted/_threads/_convenience.py b/contrib/python/Twisted/py3/twisted/_threads/_convenience.py index deff5764624..3d8c6682f01 100644 --- a/contrib/python/Twisted/py3/twisted/_threads/_convenience.py +++ b/contrib/python/Twisted/py3/twisted/_threads/_convenience.py @@ -18,13 +18,13 @@ class Quit:      @type isSet: L{bool}      """ -    def __init__(self): +    def __init__(self) -> None:          """          Create a L{Quit} un-set.          """          self.isSet = False -    def set(self): +    def set(self) -> None:          """          Set the flag if it has not been set. @@ -33,7 +33,7 @@ class Quit:          self.check()          self.isSet = True -    def check(self): +    def check(self) -> None:          """          Check if the flag has been set. diff --git a/contrib/python/Twisted/py3/twisted/_threads/_ithreads.py b/contrib/python/Twisted/py3/twisted/_threads/_ithreads.py index cab9135f874..b2e01752e52 100644 --- a/contrib/python/Twisted/py3/twisted/_threads/_ithreads.py +++ b/contrib/python/Twisted/py3/twisted/_threads/_ithreads.py @@ -45,7 +45,7 @@ class IWorker(Interface):          @raise AlreadyQuit: if C{quit} has been called.          """ -    def quit(): +    def quit() -> None:          """          Free any resources associated with this L{IWorker} and cause it to          reject all future work. diff --git a/contrib/python/Twisted/py3/twisted/_threads/_memory.py b/contrib/python/Twisted/py3/twisted/_threads/_memory.py index 4c56db02ae9..5483b8e5770 100644 --- a/contrib/python/Twisted/py3/twisted/_threads/_memory.py +++ b/contrib/python/Twisted/py3/twisted/_threads/_memory.py @@ -6,16 +6,25 @@  Implementation of an in-memory worker that defers execution.  """ +from __future__ import annotations + +from enum import Enum, auto +from typing import Callable, Literal  from zope.interface import implementer -from . import IWorker  from ._convenience import Quit +from ._ithreads import IExclusiveWorker + + +class NoMore(Enum): +    Work = auto() + -NoMoreWork = object() +NoMoreWork = NoMore.Work -@implementer(IWorker) +@implementer(IExclusiveWorker)  class MemoryWorker:      """      An L{IWorker} that queues work for later performance. @@ -24,14 +33,17 @@ class MemoryWorker:      @type _quit: L{Quit}      """ -    def __init__(self, pending=list): +    def __init__( +        self, +        pending: Callable[[], list[Callable[[], object] | Literal[NoMore.Work]]] = list, +    ) -> None:          """          Create a L{MemoryWorker}.          """          self._quit = Quit()          self._pending = pending() -    def do(self, work): +    def do(self, work: Callable[[], object]) -> None:          """          Queue some work for to perform later; see L{createMemoryWorker}. @@ -40,7 +52,7 @@ class MemoryWorker:          self._quit.check()          self._pending.append(work) -    def quit(self): +    def quit(self) -> None:          """          Quit this worker.          """ @@ -48,22 +60,23 @@ class MemoryWorker:          self._pending.append(NoMoreWork) -def createMemoryWorker(): +def createMemoryWorker() -> tuple[MemoryWorker, Callable[[], bool]]:      """      Create an L{IWorker} that does nothing but defer work, to be performed      later.      @return: a worker that will enqueue work to perform later, and a callable          that will perform one element of that work. -    @rtype: 2-L{tuple} of (L{IWorker}, L{callable})      """ -    def perform(): +    def perform() -> bool:          if not worker._pending:              return False -        if worker._pending[0] is NoMoreWork: +        peek = worker._pending[0] +        if peek is NoMoreWork:              return False -        worker._pending.pop(0)() +        worker._pending.pop(0) +        peek()          return True      worker = MemoryWorker() diff --git a/contrib/python/Twisted/py3/twisted/_threads/_team.py b/contrib/python/Twisted/py3/twisted/_threads/_team.py index d15ae04242d..95e40cffa95 100644 --- a/contrib/python/Twisted/py3/twisted/_threads/_team.py +++ b/contrib/python/Twisted/py3/twisted/_threads/_team.py @@ -158,7 +158,7 @@ class Team:          if self._shouldQuitCoordinator and self._busyCount == 0:              self._coordinator.quit() -    def do(self, task: Callable[[], None]) -> None: +    def do(self, task: Callable[[], object]) -> None:          """          Perform some work in a worker created by C{createWorker}. diff --git a/contrib/python/Twisted/py3/twisted/_threads/_threadworker.py b/contrib/python/Twisted/py3/twisted/_threads/_threadworker.py index e7ffc097580..a4617a1974c 100644 --- a/contrib/python/Twisted/py3/twisted/_threads/_threadworker.py +++ b/contrib/python/Twisted/py3/twisted/_threads/_threadworker.py @@ -5,16 +5,41 @@  """  Implementation of an L{IWorker} based on native threads and queues.  """ +from __future__ import annotations +from enum import Enum, auto +from typing import TYPE_CHECKING, Callable, Iterator, Literal, Protocol, TypeVar -from typing import Callable +if TYPE_CHECKING: +    import threading  from zope.interface import implementer  from ._convenience import Quit  from ._ithreads import IExclusiveWorker -_stop = object() + +class Stop(Enum): +    Thread = auto() + + +StopThread = Stop.Thread + +T = TypeVar("T") +U = TypeVar("U") + + +class SimpleQueue(Protocol[T]): +    def put(self, item: T) -> None: +        ... + +    def get(self) -> T: +        ... + + +# when the sentinel value is a literal in a union, this is how iter works +smartiter: Callable[[Callable[[], T | U], U], Iterator[T]] +smartiter = iter  # type:ignore[assignment]  @implementer(IExclusiveWorker) @@ -27,25 +52,26 @@ class ThreadWorker:      thread.      """ -    def __init__(self, startThread, queue): +    def __init__( +        self, +        startThread: Callable[[Callable[[], object]], object], +        queue: SimpleQueue[Callable[[], object] | Literal[Stop.Thread]], +    ):          """          Create a L{ThreadWorker} with a function to start a thread and a queue          to use to communicate with that thread.          @param startThread: a callable that takes a callable to run in another              thread. -        @type startThread: callable taking a 0-argument callable and returning -            nothing.          @param queue: A L{Queue} to use to give tasks to the thread created by              C{startThread}. -        @type queue: L{Queue}          """          self._q = queue          self._hasQuit = Quit() -        def work(): -            for task in iter(queue.get, _stop): +        def work() -> None: +            for task in smartiter(queue.get, StopThread):                  task()          startThread(work) @@ -59,14 +85,22 @@ class ThreadWorker:          self._hasQuit.check()          self._q.put(task) -    def quit(self): +    def quit(self) -> None:          """          Reject all future work and stop the thread started by C{__init__}.          """          # Reject all future work.  Set this _before_ enqueueing _stop, so          # that no work is ever enqueued _after_ _stop.          self._hasQuit.set() -        self._q.put(_stop) +        self._q.put(StopThread) + + +class SimpleLock(Protocol): +    def acquire(self) -> bool: +        ... + +    def release(self) -> None: +        ...  @implementer(IExclusiveWorker) @@ -75,7 +109,7 @@ class LockWorker:      An L{IWorker} implemented based on a mutual-exclusion lock.      """ -    def __init__(self, lock, local): +    def __init__(self, lock: SimpleLock, local: threading.local):          """          @param lock: A mutual-exclusion lock, with C{acquire} and C{release}              methods. @@ -85,7 +119,7 @@ class LockWorker:          @type local: L{threading.local}          """          self._quit = Quit() -        self._lock = lock +        self._lock: SimpleLock | None = lock          self._local = local      def do(self, work: Callable[[], None]) -> None: @@ -101,6 +135,7 @@ class LockWorker:          self._quit.check()          working = getattr(local, "working", None)          if working is None: +            assert lock is not None, "LockWorker used after quit()"              working = local.working = []              working.append(work)              lock.acquire() @@ -113,7 +148,7 @@ class LockWorker:          else:              working.append(work) -    def quit(self): +    def quit(self) -> None:          """          Quit this L{LockWorker}.          """ diff --git a/contrib/python/Twisted/py3/twisted/_version.py b/contrib/python/Twisted/py3/twisted/_version.py index f1f493452d5..b43dec8e331 100644 --- a/contrib/python/Twisted/py3/twisted/_version.py +++ b/contrib/python/Twisted/py3/twisted/_version.py @@ -7,5 +7,5 @@ Provides Twisted version information.  from incremental import Version -__version__ = Version("Twisted", 24, 7, 0) +__version__ = Version("Twisted", 24, 10, 0)  __all__ = ["__version__"] diff --git a/contrib/python/Twisted/py3/twisted/application/_client_service.py b/contrib/python/Twisted/py3/twisted/application/_client_service.py new file mode 100644 index 00000000000..32b6c4c5234 --- /dev/null +++ b/contrib/python/Twisted/py3/twisted/application/_client_service.py @@ -0,0 +1,596 @@ +# -*- test-case-name: twisted.application.test.test_internet,twisted.test.test_application,twisted.test.test_cooperator -*- + +""" +Implementation of L{twisted.application.internet.ClientService}, particularly +its U{automat <https://automat.readthedocs.org/>} state machine. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from random import random as _goodEnoughRandom +from typing import Callable, Optional, Protocol as TypingProtocol, TypeVar, Union + +from zope.interface import implementer + +from automat import TypeMachineBuilder, pep614 + +from twisted.application.service import Service +from twisted.internet.defer import ( +    CancelledError, +    Deferred, +    fail, +    maybeDeferred, +    succeed, +) +from twisted.internet.interfaces import ( +    IAddress, +    IDelayedCall, +    IProtocol, +    IProtocolFactory, +    IReactorTime, +    IStreamClientEndpoint, +    ITransport, +) +from twisted.logger import Logger +from twisted.python.failure import Failure + +T = TypeVar("T") + + +def _maybeGlobalReactor(maybeReactor: Optional[T]) -> T: +    """ +    @return: the argument, or the global reactor if the argument is L{None}. +    """ +    if maybeReactor is None: +        from twisted.internet import reactor + +        return reactor  # type:ignore[return-value] +    else: +        return maybeReactor + + +class _Client(TypingProtocol): +    def start(self) -> None: +        """ +        Start this L{ClientService}, initiating the connection retry loop. +        """ + +    def stop(self) -> Deferred[None]: +        """ +        Stop trying to connect and disconnect any current connection. + +        @return: a L{Deferred} that fires when all outstanding connections are +            closed and all in-progress connection attempts halted. +        """ + +    def _connectionMade(self, protocol: _ReconnectingProtocolProxy) -> None: +        """ +        A connection has been made. + +        @param protocol: The protocol of the connection. +        """ + +    def _connectionFailed(self, failure: Failure) -> None: +        """ +        Deliver connection failures to any L{ClientService.whenConnected} +        L{Deferred}s that have met their failAfterFailures threshold. + +        @param failure: the Failure to fire the L{Deferred}s with. +        """ + +    def _reconnect(self, failure: Optional[Failure] = None) -> None: +        """ +        The wait between connection attempts is done. +        """ + +    def _clientDisconnected(self, failure: Optional[Failure] = None) -> None: +        """ +        The current connection has been disconnected. +        """ + +    def whenConnected( +        self, /, failAfterFailures: Optional[int] = None +    ) -> Deferred[IProtocol]: +        """ +        Retrieve the currently-connected L{Protocol}, or the next one to +        connect. + +        @param failAfterFailures: number of connection failures after which the +            Deferred will deliver a Failure (None means the Deferred will only +            fail if/when the service is stopped).  Set this to 1 to make the +            very first connection failure signal an error.  Use 2 to allow one +            failure but signal an error if the subsequent retry then fails. + +        @return: a Deferred that fires with a protocol produced by the factory +            passed to C{__init__}.  It may: + +                - fire with L{IProtocol} + +                - fail with L{CancelledError} when the service is stopped + +                - fail with e.g. +                  L{DNSLookupError<twisted.internet.error.DNSLookupError>} or +                  L{ConnectionRefusedError<twisted.internet.error.ConnectionRefusedError>} +                  when the number of consecutive failed connection attempts +                  equals the value of "failAfterFailures" +        """ + + +@implementer(IProtocol) +class _ReconnectingProtocolProxy: +    """ +    A proxy for a Protocol to provide connectionLost notification to a client +    connection service, in support of reconnecting when connections are lost. +    """ + +    def __init__( +        self, protocol: IProtocol, lostNotification: Callable[[Failure], None] +    ) -> None: +        """ +        Create a L{_ReconnectingProtocolProxy}. + +        @param protocol: the application-provided L{interfaces.IProtocol} +            provider. +        @type protocol: provider of L{interfaces.IProtocol} which may +            additionally provide L{interfaces.IHalfCloseableProtocol} and +            L{interfaces.IFileDescriptorReceiver}. + +        @param lostNotification: a 1-argument callable to invoke with the +            C{reason} when the connection is lost. +        """ +        self._protocol = protocol +        self._lostNotification = lostNotification + +    def makeConnection(self, transport: ITransport) -> None: +        self._transport = transport +        self._protocol.makeConnection(transport) + +    def connectionLost(self, reason: Failure) -> None: +        """ +        The connection was lost.  Relay this information. + +        @param reason: The reason the connection was lost. + +        @return: the underlying protocol's result +        """ +        try: +            return self._protocol.connectionLost(reason) +        finally: +            self._lostNotification(reason) + +    def __getattr__(self, item: str) -> object: +        return getattr(self._protocol, item) + +    def __repr__(self) -> str: +        return f"<{self.__class__.__name__} wrapping {self._protocol!r}>" + + +@implementer(IProtocolFactory) +class _DisconnectFactory: +    """ +    A L{_DisconnectFactory} is a proxy for L{IProtocolFactory} that catches +    C{connectionLost} notifications and relays them. +    """ + +    def __init__( +        self, +        protocolFactory: IProtocolFactory, +        protocolDisconnected: Callable[[Failure], None], +    ) -> None: +        self._protocolFactory = protocolFactory +        self._protocolDisconnected = protocolDisconnected + +    def buildProtocol(self, addr: IAddress) -> Optional[IProtocol]: +        """ +        Create a L{_ReconnectingProtocolProxy} with the disconnect-notification +        callback we were called with. + +        @param addr: The address the connection is coming from. + +        @return: a L{_ReconnectingProtocolProxy} for a protocol produced by +            C{self._protocolFactory} +        """ +        built = self._protocolFactory.buildProtocol(addr) +        if built is None: +            return None +        return _ReconnectingProtocolProxy(built, self._protocolDisconnected) + +    def __getattr__(self, item: str) -> object: +        return getattr(self._protocolFactory, item) + +    def __repr__(self) -> str: +        return "<{} wrapping {!r}>".format( +            self.__class__.__name__, self._protocolFactory +        ) + + +def _deinterface(o: object) -> None: +    """ +    Remove the special runtime attributes set by L{implementer} so that a class +    can proxy through those attributes with C{__getattr__} and thereby forward +    optionally-provided interfaces by the delegated class. +    """ +    for zopeSpecial in ["__providedBy__", "__provides__", "__implemented__"]: +        delattr(o, zopeSpecial) + + +_deinterface(_DisconnectFactory) +_deinterface(_ReconnectingProtocolProxy) + + +@dataclass +class _Core: +    """ +    Shared core for ClientService state machine. +    """ + +    # required parameters +    endpoint: IStreamClientEndpoint +    factory: IProtocolFactory +    timeoutForAttempt: Callable[[int], float] +    clock: IReactorTime +    prepareConnection: Optional[Callable[[IProtocol], object]] + +    # internal state +    stopWaiters: list[Deferred[None]] = field(default_factory=list) +    awaitingConnected: list[tuple[Deferred[IProtocol], Optional[int]]] = field( +        default_factory=list +    ) +    failedAttempts: int = 0 +    log: Logger = Logger() + +    def waitForStop(self) -> Deferred[None]: +        self.stopWaiters.append(Deferred()) +        return self.stopWaiters[-1] + +    def unawait(self, value: Union[IProtocol, Failure]) -> None: +        self.awaitingConnected, waiting = [], self.awaitingConnected +        for w, remaining in waiting: +            w.callback(value) + +    def cancelConnectWaiters(self) -> None: +        self.unawait(Failure(CancelledError())) + +    def finishStopping(self) -> None: +        self.stopWaiters, waiting = [], self.stopWaiters +        for w in waiting: +            w.callback(None) + + +def makeMachine() -> Callable[[_Core], _Client]: +    machine = TypeMachineBuilder(_Client, _Core) + +    def waitForRetry( +        c: _Client, s: _Core, failure: Optional[Failure] = None +    ) -> IDelayedCall: +        s.failedAttempts += 1 +        delay = s.timeoutForAttempt(s.failedAttempts) +        s.log.info( +            "Scheduling retry {attempt} to connect {endpoint} in {delay} seconds.", +            attempt=s.failedAttempts, +            endpoint=s.endpoint, +            delay=delay, +        ) +        return s.clock.callLater(delay, c._reconnect) + +    def rememberConnection( +        c: _Client, s: _Core, protocol: _ReconnectingProtocolProxy +    ) -> _ReconnectingProtocolProxy: +        s.failedAttempts = 0 +        s.unawait(protocol._protocol) +        return protocol + +    def attemptConnection( +        c: _Client, s: _Core, failure: Optional[Failure] = None +    ) -> Deferred[_ReconnectingProtocolProxy]: +        factoryProxy = _DisconnectFactory(s.factory, c._clientDisconnected) +        connecting: Deferred[IProtocol] = s.endpoint.connect(factoryProxy) + +        def prepare( +            protocol: _ReconnectingProtocolProxy, +        ) -> Deferred[_ReconnectingProtocolProxy]: +            if s.prepareConnection is not None: +                return maybeDeferred(s.prepareConnection, protocol).addCallback( +                    lambda _: protocol +                ) +            return succeed(protocol) + +        # endpoint.connect() is actually generic on the type of the protocol, +        # but this is not expressible via zope.interface, so we have to cast +        # https://github.com/Shoobx/mypy-zope/issues/95 +        connectingProxy: Deferred[_ReconnectingProtocolProxy] +        connectingProxy = connecting  # type:ignore[assignment] +        ( +            connectingProxy.addCallback(prepare) +            .addCallback(c._connectionMade) +            .addErrback(c._connectionFailed) +        ) +        return connectingProxy + +    # States: +    Init = machine.state("Init") +    Connecting = machine.state("Connecting", attemptConnection) +    Stopped = machine.state("Stopped") +    Waiting = machine.state("Waiting", waitForRetry) +    Connected = machine.state("Connected", rememberConnection) +    Disconnecting = machine.state("Disconnecting") +    Restarting = machine.state("Restarting") +    Stopped = machine.state("Stopped") + +    # Behavior-less state transitions: +    Init.upon(_Client.start).to(Connecting).returns(None) +    Connecting.upon(_Client.start).loop().returns(None) +    Connecting.upon(_Client._connectionMade).to(Connected).returns(None) +    Waiting.upon(_Client.start).loop().returns(None) +    Waiting.upon(_Client._reconnect).to(Connecting).returns(None) +    Connected.upon(_Client._connectionFailed).to(Waiting).returns(None) +    Connected.upon(_Client.start).loop().returns(None) +    Connected.upon(_Client._clientDisconnected).to(Waiting).returns(None) +    Disconnecting.upon(_Client.start).to(Restarting).returns(None) +    Restarting.upon(_Client.start).to(Restarting).returns(None) +    Stopped.upon(_Client.start).to(Connecting).returns(None) + +    # Behavior-full state transitions: +    @pep614(Init.upon(_Client.stop).to(Stopped)) +    @pep614(Stopped.upon(_Client.stop).to(Stopped)) +    def immediateStop(c: _Client, s: _Core) -> Deferred[None]: +        return succeed(None) + +    @pep614(Connecting.upon(_Client.stop).to(Disconnecting)) +    def connectingStop( +        c: _Client, s: _Core, attempt: Deferred[_ReconnectingProtocolProxy] +    ) -> Deferred[None]: +        waited = s.waitForStop() +        attempt.cancel() +        return waited + +    @pep614(Connecting.upon(_Client._connectionFailed, nodata=True).to(Waiting)) +    def failedWhenConnecting(c: _Client, s: _Core, failure: Failure) -> None: +        ready = [] +        notReady: list[tuple[Deferred[IProtocol], Optional[int]]] = [] +        for w, remaining in s.awaitingConnected: +            if remaining is None: +                notReady.append((w, remaining)) +            elif remaining <= 1: +                ready.append(w) +            else: +                notReady.append((w, remaining - 1)) +        s.awaitingConnected = notReady +        for w in ready: +            w.callback(failure) + +    @pep614(Waiting.upon(_Client.stop).to(Stopped)) +    def stop(c: _Client, s: _Core, futureRetry: IDelayedCall) -> Deferred[None]: +        waited = s.waitForStop() +        s.cancelConnectWaiters() +        futureRetry.cancel() +        s.finishStopping() +        return waited + +    @pep614(Connected.upon(_Client.stop).to(Disconnecting)) +    def stopWhileConnected( +        c: _Client, s: _Core, protocol: _ReconnectingProtocolProxy +    ) -> Deferred[None]: +        waited = s.waitForStop() +        protocol._transport.loseConnection() +        return waited + +    @pep614(Connected.upon(_Client.whenConnected).loop()) +    def whenConnectedWhenConnected( +        c: _Client, +        s: _Core, +        protocol: _ReconnectingProtocolProxy, +        failAfterFailures: Optional[int] = None, +    ) -> Deferred[IProtocol]: +        return succeed(protocol._protocol) + +    @pep614(Disconnecting.upon(_Client.stop).loop()) +    @pep614(Restarting.upon(_Client.stop).to(Disconnecting)) +    def discoStop(c: _Client, s: _Core) -> Deferred[None]: +        return s.waitForStop() + +    @pep614(Disconnecting.upon(_Client._connectionFailed).to(Stopped)) +    @pep614(Disconnecting.upon(_Client._clientDisconnected).to(Stopped)) +    def disconnectingFinished( +        c: _Client, s: _Core, failure: Optional[Failure] = None +    ) -> None: +        s.cancelConnectWaiters() +        s.finishStopping() + +    @pep614(Connecting.upon(_Client.whenConnected, nodata=True).loop()) +    @pep614(Waiting.upon(_Client.whenConnected, nodata=True).loop()) +    @pep614(Init.upon(_Client.whenConnected).to(Init)) +    @pep614(Restarting.upon(_Client.whenConnected).to(Restarting)) +    @pep614(Disconnecting.upon(_Client.whenConnected).to(Disconnecting)) +    def awaitingConnection( +        c: _Client, s: _Core, failAfterFailures: Optional[int] = None +    ) -> Deferred[IProtocol]: +        result: Deferred[IProtocol] = Deferred() +        s.awaitingConnected.append((result, failAfterFailures)) +        return result + +    @pep614(Restarting.upon(_Client._clientDisconnected).to(Connecting)) +    def restartDone(c: _Client, s: _Core, failure: Optional[Failure] = None) -> None: +        s.finishStopping() + +    @pep614(Stopped.upon(_Client.whenConnected).to(Stopped)) +    def notGoingToConnect( +        c: _Client, s: _Core, failAfterFailures: Optional[int] = None +    ) -> Deferred[IProtocol]: +        return fail(CancelledError()) + +    return machine.build() + + +def backoffPolicy( +    initialDelay: float = 1.0, +    maxDelay: float = 60.0, +    factor: float = 1.5, +    jitter: Callable[[], float] = _goodEnoughRandom, +) -> Callable[[int], float]: +    """ +    A timeout policy for L{ClientService} which computes an exponential backoff +    interval with configurable parameters. + +    @since: 16.1.0 + +    @param initialDelay: Delay for the first reconnection attempt (default +        1.0s). +    @type initialDelay: L{float} + +    @param maxDelay: Maximum number of seconds between connection attempts +        (default 60 seconds, or one minute).  Note that this value is before +        jitter is applied, so the actual maximum possible delay is this value +        plus the maximum possible result of C{jitter()}. +    @type maxDelay: L{float} + +    @param factor: A multiplicative factor by which the delay grows on each +        failed reattempt.  Default: 1.5. +    @type factor: L{float} + +    @param jitter: A 0-argument callable that introduces noise into the delay. +        By default, C{random.random}, i.e. a pseudorandom floating-point value +        between zero and one. +    @type jitter: 0-argument callable returning L{float} + +    @return: a 1-argument callable that, given an attempt count, returns a +        floating point number; the number of seconds to delay. +    @rtype: see L{ClientService.__init__}'s C{retryPolicy} argument. +    """ + +    def policy(attempt: int) -> float: +        try: +            delay = min(initialDelay * (factor ** min(100, attempt)), maxDelay) +        except OverflowError: +            delay = maxDelay +        return delay + jitter() + +    return policy + + +_defaultPolicy = backoffPolicy() +ClientMachine = makeMachine() + + +class ClientService(Service): +    """ +    A L{ClientService} maintains a single outgoing connection to a client +    endpoint, reconnecting after a configurable timeout when a connection +    fails, either before or after connecting. + +    @since: 16.1.0 +    """ + +    _log = Logger() + +    def __init__( +        self, +        endpoint: IStreamClientEndpoint, +        factory: IProtocolFactory, +        retryPolicy: Optional[Callable[[int], float]] = None, +        clock: Optional[IReactorTime] = None, +        prepareConnection: Optional[Callable[[IProtocol], object]] = None, +    ): +        """ +        @param endpoint: A L{stream client endpoint +            <interfaces.IStreamClientEndpoint>} provider which will be used to +            connect when the service starts. + +        @param factory: A L{protocol factory <interfaces.IProtocolFactory>} +            which will be used to create clients for the endpoint. + +        @param retryPolicy: A policy configuring how long L{ClientService} will +            wait between attempts to connect to C{endpoint}; a callable taking +            (the number of failed connection attempts made in a row (L{int})) +            and returning the number of seconds to wait before making another +            attempt. + +        @param clock: The clock used to schedule reconnection.  It's mainly +            useful to be parametrized in tests.  If the factory is serialized, +            this attribute will not be serialized, and the default value (the +            reactor) will be restored when deserialized. + +        @param prepareConnection: A single argument L{callable} that may return +            a L{Deferred}.  It will be called once with the L{protocol +            <interfaces.IProtocol>} each time a new connection is made.  It may +            call methods on the protocol to prepare it for use (e.g. +            authenticate) or validate it (check its health). + +            The C{prepareConnection} callable may raise an exception or return +            a L{Deferred} which fails to reject the connection.  A rejected +            connection is not used to fire an L{Deferred} returned by +            L{whenConnected}.  Instead, L{ClientService} handles the failure +            and continues as if the connection attempt were a failure +            (incrementing the counter passed to C{retryPolicy}). + +            L{Deferred}s returned by L{whenConnected} will not fire until any +            L{Deferred} returned by the C{prepareConnection} callable fire. +            Otherwise its successful return value is consumed, but ignored. + +            Present Since Twisted 18.7.0 +        """ +        clock = _maybeGlobalReactor(clock) +        retryPolicy = _defaultPolicy if retryPolicy is None else retryPolicy + +        self._machine: _Client = ClientMachine( +            _Core( +                endpoint, +                factory, +                retryPolicy, +                clock, +                prepareConnection=prepareConnection, +                log=self._log, +            ) +        ) + +    def whenConnected( +        self, failAfterFailures: Optional[int] = None +    ) -> Deferred[IProtocol]: +        """ +        Retrieve the currently-connected L{Protocol}, or the next one to +        connect. + +        @param failAfterFailures: number of connection failures after which +            the Deferred will deliver a Failure (None means the Deferred will +            only fail if/when the service is stopped).  Set this to 1 to make +            the very first connection failure signal an error.  Use 2 to +            allow one failure but signal an error if the subsequent retry +            then fails. +        @type failAfterFailures: L{int} or None + +        @return: a Deferred that fires with a protocol produced by the +            factory passed to C{__init__} +        @rtype: L{Deferred} that may: + +            - fire with L{IProtocol} + +            - fail with L{CancelledError} when the service is stopped + +            - fail with e.g. +              L{DNSLookupError<twisted.internet.error.DNSLookupError>} or +              L{ConnectionRefusedError<twisted.internet.error.ConnectionRefusedError>} +              when the number of consecutive failed connection attempts +              equals the value of "failAfterFailures" +        """ +        return self._machine.whenConnected(failAfterFailures) + +    def startService(self) -> None: +        """ +        Start this L{ClientService}, initiating the connection retry loop. +        """ +        if self.running: +            self._log.warn("Duplicate ClientService.startService {log_source}") +            return +        super().startService() +        self._machine.start() + +    def stopService(self) -> Deferred[None]: +        """ +        Stop attempting to reconnect and close any existing connections. + +        @return: a L{Deferred} that fires when all outstanding connections are +            closed and all in-progress connection attempts halted. +        """ +        super().stopService() +        return self._machine.stop() diff --git a/contrib/python/Twisted/py3/twisted/application/internet.py b/contrib/python/Twisted/py3/twisted/application/internet.py index 9e702f7e844..8bcc9722a0a 100644 --- a/contrib/python/Twisted/py3/twisted/application/internet.py +++ b/contrib/python/Twisted/py3/twisted/application/internet.py @@ -38,35 +38,13 @@ reactor.listen/connect* methods for more information.  """ -from random import random as _goodEnoughRandom  from typing import List -from automat import MethodicalMachine -  from twisted.application import service  from twisted.internet import task -from twisted.internet.defer import ( -    CancelledError, -    Deferred, -    fail, -    maybeDeferred, -    succeed, -) -from twisted.logger import Logger +from twisted.internet.defer import CancelledError  from twisted.python import log -from twisted.python.failure import Failure - - -def _maybeGlobalReactor(maybeReactor): -    """ -    @return: the argument, or the global reactor if the argument is L{None}. -    """ -    if maybeReactor is None: -        from twisted.internet import reactor - -        return reactor -    else: -        return maybeReactor +from ._client_service import ClientService, _maybeGlobalReactor, backoffPolicy  class _VolatileDataService(service.Service): @@ -429,764 +407,6 @@ class StreamServerEndpointService(service.Service):          return d -class _ReconnectingProtocolProxy: -    """ -    A proxy for a Protocol to provide connectionLost notification to a client -    connection service, in support of reconnecting when connections are lost. -    """ - -    def __init__(self, protocol, lostNotification): -        """ -        Create a L{_ReconnectingProtocolProxy}. - -        @param protocol: the application-provided L{interfaces.IProtocol} -            provider. -        @type protocol: provider of L{interfaces.IProtocol} which may -            additionally provide L{interfaces.IHalfCloseableProtocol} and -            L{interfaces.IFileDescriptorReceiver}. - -        @param lostNotification: a 1-argument callable to invoke with the -            C{reason} when the connection is lost. -        """ -        self._protocol = protocol -        self._lostNotification = lostNotification - -    def connectionLost(self, reason): -        """ -        The connection was lost.  Relay this information. - -        @param reason: The reason the connection was lost. - -        @return: the underlying protocol's result -        """ -        try: -            return self._protocol.connectionLost(reason) -        finally: -            self._lostNotification(reason) - -    def __getattr__(self, item): -        return getattr(self._protocol, item) - -    def __repr__(self) -> str: -        return f"<{self.__class__.__name__} wrapping {self._protocol!r}>" - - -class _DisconnectFactory: -    """ -    A L{_DisconnectFactory} is a proxy for L{IProtocolFactory} that catches -    C{connectionLost} notifications and relays them. -    """ - -    def __init__(self, protocolFactory, protocolDisconnected): -        self._protocolFactory = protocolFactory -        self._protocolDisconnected = protocolDisconnected - -    def buildProtocol(self, addr): -        """ -        Create a L{_ReconnectingProtocolProxy} with the disconnect-notification -        callback we were called with. - -        @param addr: The address the connection is coming from. - -        @return: a L{_ReconnectingProtocolProxy} for a protocol produced by -            C{self._protocolFactory} -        """ -        return _ReconnectingProtocolProxy( -            self._protocolFactory.buildProtocol(addr), self._protocolDisconnected -        ) - -    def __getattr__(self, item): -        return getattr(self._protocolFactory, item) - -    def __repr__(self) -> str: -        return "<{} wrapping {!r}>".format( -            self.__class__.__name__, self._protocolFactory -        ) - - -def backoffPolicy( -    initialDelay=1.0, maxDelay=60.0, factor=1.5, jitter=_goodEnoughRandom -): -    """ -    A timeout policy for L{ClientService} which computes an exponential backoff -    interval with configurable parameters. - -    @since: 16.1.0 - -    @param initialDelay: Delay for the first reconnection attempt (default -        1.0s). -    @type initialDelay: L{float} - -    @param maxDelay: Maximum number of seconds between connection attempts -        (default 60 seconds, or one minute).  Note that this value is before -        jitter is applied, so the actual maximum possible delay is this value -        plus the maximum possible result of C{jitter()}. -    @type maxDelay: L{float} - -    @param factor: A multiplicative factor by which the delay grows on each -        failed reattempt.  Default: 1.5. -    @type factor: L{float} - -    @param jitter: A 0-argument callable that introduces noise into the delay. -        By default, C{random.random}, i.e. a pseudorandom floating-point value -        between zero and one. -    @type jitter: 0-argument callable returning L{float} - -    @return: a 1-argument callable that, given an attempt count, returns a -        floating point number; the number of seconds to delay. -    @rtype: see L{ClientService.__init__}'s C{retryPolicy} argument. -    """ - -    def policy(attempt): -        try: -            delay = min(initialDelay * (factor ** min(100, attempt)), maxDelay) -        except OverflowError: -            delay = maxDelay -        return delay + jitter() - -    return policy - - -_defaultPolicy = backoffPolicy() - - -def _firstResult(gen): -    """ -    Return the first element of a generator and exhaust it. - -    C{MethodicalMachine.upon}'s C{collector} argument takes a generator of -    output results. If the generator is exhausted, the later outputs aren't -    actually run. - -    @param gen: Generator to extract values from - -    @return: The first element of the generator. -    """ -    return list(gen)[0] - - -class _ClientMachine: -    """ -    State machine for maintaining a single outgoing connection to an endpoint. - -    @ivar _awaitingConnected: notifications to make when connection -        succeeds, fails, or is cancelled -    @type _awaitingConnected: list of (Deferred, count) tuples - -    @see: L{ClientService} -    """ - -    _machine = MethodicalMachine() - -    def __init__(self, endpoint, factory, retryPolicy, clock, prepareConnection, log): -        """ -        @see: L{ClientService.__init__} - -        @param log: The logger for the L{ClientService} instance this state -            machine is associated to. -        @type log: L{Logger} -        """ -        self._endpoint = endpoint -        self._failedAttempts = 0 -        self._stopped = False -        self._factory = factory -        self._timeoutForAttempt = retryPolicy -        self._clock = clock -        self._prepareConnection = prepareConnection -        self._connectionInProgress = succeed(None) - -        self._awaitingConnected = [] - -        self._stopWaiters = [] -        self._log = log - -    @_machine.state(initial=True) -    def _init(self): -        """ -        The service has not been started. -        """ - -    @_machine.state() -    def _connecting(self): -        """ -        The service has started connecting. -        """ - -    @_machine.state() -    def _waiting(self): -        """ -        The service is waiting for the reconnection period -        before reconnecting. -        """ - -    @_machine.state() -    def _connected(self): -        """ -        The service is connected. -        """ - -    @_machine.state() -    def _disconnecting(self): -        """ -        The service is disconnecting after being asked to shutdown. -        """ - -    @_machine.state() -    def _restarting(self): -        """ -        The service is disconnecting and has been asked to restart. -        """ - -    @_machine.state() -    def _stopped(self): -        """ -        The service has been stopped and is disconnected. -        """ - -    @_machine.input() -    def start(self): -        """ -        Start this L{ClientService}, initiating the connection retry loop. -        """ - -    @_machine.output() -    def _connect(self): -        """ -        Start a connection attempt. -        """ -        factoryProxy = _DisconnectFactory( -            self._factory, lambda _: self._clientDisconnected() -        ) - -        self._connectionInProgress = ( -            self._endpoint.connect(factoryProxy) -            .addCallback(self._runPrepareConnection) -            .addCallback(self._connectionMade) -            .addErrback(self._connectionFailed) -        ) - -    def _runPrepareConnection(self, protocol): -        """ -        Run any C{prepareConnection} callback with the connected protocol, -        ignoring its return value but propagating any failure. - -        @param protocol: The protocol of the connection. -        @type protocol: L{IProtocol} - -        @return: Either: - -            - A L{Deferred} that succeeds with the protocol when the -              C{prepareConnection} callback has executed successfully. - -            - A L{Deferred} that fails when the C{prepareConnection} callback -              throws or returns a failed L{Deferred}. - -            - The protocol, when no C{prepareConnection} callback is defined. -        """ -        if self._prepareConnection: -            return maybeDeferred(self._prepareConnection, protocol).addCallback( -                lambda _: protocol -            ) -        return protocol - -    @_machine.output() -    def _resetFailedAttempts(self): -        """ -        Reset the number of failed attempts. -        """ -        self._failedAttempts = 0 - -    @_machine.input() -    def stop(self): -        """ -        Stop trying to connect and disconnect any current connection. - -        @return: a L{Deferred} that fires when all outstanding connections are -            closed and all in-progress connection attempts halted. -        """ - -    @_machine.output() -    def _waitForStop(self): -        """ -        Return a deferred that will fire when the service has finished -        disconnecting. - -        @return: L{Deferred} that fires when the service has finished -            disconnecting. -        """ -        self._stopWaiters.append(Deferred()) -        return self._stopWaiters[-1] - -    @_machine.output() -    def _stopConnecting(self): -        """ -        Stop pending connection attempt. -        """ -        self._connectionInProgress.cancel() - -    @_machine.output() -    def _stopRetrying(self): -        """ -        Stop pending attempt to reconnect. -        """ -        self._retryCall.cancel() -        del self._retryCall - -    @_machine.output() -    def _disconnect(self): -        """ -        Disconnect the current connection. -        """ -        self._currentConnection.transport.loseConnection() - -    @_machine.input() -    def _connectionMade(self, protocol): -        """ -        A connection has been made. - -        @param protocol: The protocol of the connection. -        @type protocol: L{IProtocol} -        """ - -    @_machine.output() -    def _notifyWaiters(self, protocol): -        """ -        Notify all pending requests for a connection that a connection has been -        made. - -        @param protocol: The protocol of the connection. -        @type protocol: L{IProtocol} -        """ -        # This should be in _resetFailedAttempts but the signature doesn't -        # match. -        self._failedAttempts = 0 - -        self._currentConnection = protocol._protocol -        self._unawait(self._currentConnection) - -    @_machine.input() -    def _connectionFailed(self, f): -        """ -        The current connection attempt failed. -        """ - -    @_machine.output() -    def _wait(self): -        """ -        Schedule a retry attempt. -        """ -        self._doWait() - -    @_machine.output() -    def _ignoreAndWait(self, f): -        """ -        Schedule a retry attempt, and ignore the Failure passed in. -        """ -        return self._doWait() - -    def _doWait(self): -        self._failedAttempts += 1 -        delay = self._timeoutForAttempt(self._failedAttempts) -        self._log.info( -            "Scheduling retry {attempt} to connect {endpoint} " "in {delay} seconds.", -            attempt=self._failedAttempts, -            endpoint=self._endpoint, -            delay=delay, -        ) -        self._retryCall = self._clock.callLater(delay, self._reconnect) - -    @_machine.input() -    def _reconnect(self): -        """ -        The wait between connection attempts is done. -        """ - -    @_machine.input() -    def _clientDisconnected(self): -        """ -        The current connection has been disconnected. -        """ - -    @_machine.output() -    def _forgetConnection(self): -        """ -        Forget the current connection. -        """ -        del self._currentConnection - -    @_machine.output() -    def _cancelConnectWaiters(self): -        """ -        Notify all pending requests for a connection that no more connections -        are expected. -        """ -        self._unawait(Failure(CancelledError())) - -    @_machine.output() -    def _ignoreAndCancelConnectWaiters(self, f): -        """ -        Notify all pending requests for a connection that no more connections -        are expected, after ignoring the Failure passed in. -        """ -        self._unawait(Failure(CancelledError())) - -    @_machine.output() -    def _finishStopping(self): -        """ -        Notify all deferreds waiting on the service stopping. -        """ -        self._doFinishStopping() - -    @_machine.output() -    def _ignoreAndFinishStopping(self, f): -        """ -        Notify all deferreds waiting on the service stopping, and ignore the -        Failure passed in. -        """ -        self._doFinishStopping() - -    def _doFinishStopping(self): -        self._stopWaiters, waiting = [], self._stopWaiters -        for w in waiting: -            w.callback(None) - -    @_machine.input() -    def whenConnected(self, failAfterFailures=None): -        """ -        Retrieve the currently-connected L{Protocol}, or the next one to -        connect. - -        @param failAfterFailures: number of connection failures after which -            the Deferred will deliver a Failure (None means the Deferred will -            only fail if/when the service is stopped).  Set this to 1 to make -            the very first connection failure signal an error.  Use 2 to -            allow one failure but signal an error if the subsequent retry -            then fails. -        @type failAfterFailures: L{int} or None - -        @return: a Deferred that fires with a protocol produced by the -            factory passed to C{__init__} -        @rtype: L{Deferred} that may: - -            - fire with L{IProtocol} - -            - fail with L{CancelledError} when the service is stopped - -            - fail with e.g. -              L{DNSLookupError<twisted.internet.error.DNSLookupError>} or -              L{ConnectionRefusedError<twisted.internet.error.ConnectionRefusedError>} -              when the number of consecutive failed connection attempts -              equals the value of "failAfterFailures" -        """ - -    @_machine.output() -    def _currentConnection(self, failAfterFailures=None): -        """ -        Return the currently connected protocol. - -        @return: L{Deferred} that is fired with currently connected protocol. -        """ -        return succeed(self._currentConnection) - -    @_machine.output() -    def _noConnection(self, failAfterFailures=None): -        """ -        Notify the caller that no connection is expected. - -        @return: L{Deferred} that is fired with L{CancelledError}. -        """ -        return fail(CancelledError()) - -    @_machine.output() -    def _awaitingConnection(self, failAfterFailures=None): -        """ -        Return a deferred that will fire with the next connected protocol. - -        @return: L{Deferred} that will fire with the next connected protocol. -        """ -        result = Deferred() -        self._awaitingConnected.append((result, failAfterFailures)) -        return result - -    @_machine.output() -    def _deferredSucceededWithNone(self): -        """ -        Return a deferred that has already fired with L{None}. - -        @return: A L{Deferred} that has already fired with L{None}. -        """ -        return succeed(None) - -    def _unawait(self, value): -        """ -        Fire all outstanding L{ClientService.whenConnected} L{Deferred}s. - -        @param value: the value to fire the L{Deferred}s with. -        """ -        self._awaitingConnected, waiting = [], self._awaitingConnected -        for w, remaining in waiting: -            w.callback(value) - -    @_machine.output() -    def _deliverConnectionFailure(self, f): -        """ -        Deliver connection failures to any L{ClientService.whenConnected} -        L{Deferred}s that have met their failAfterFailures threshold. - -        @param f: the Failure to fire the L{Deferred}s with. -        """ -        ready = [] -        notReady = [] -        for w, remaining in self._awaitingConnected: -            if remaining is None: -                notReady.append((w, remaining)) -            elif remaining <= 1: -                ready.append(w) -            else: -                notReady.append((w, remaining - 1)) -        self._awaitingConnected = notReady -        for w in ready: -            w.callback(f) - -    # State Transitions - -    _init.upon(start, enter=_connecting, outputs=[_connect]) -    _init.upon( -        stop, -        enter=_stopped, -        outputs=[_deferredSucceededWithNone], -        collector=_firstResult, -    ) - -    _connecting.upon(start, enter=_connecting, outputs=[]) -    # Note that this synchonously triggers _connectionFailed in the -    # _disconnecting state. -    _connecting.upon( -        stop, -        enter=_disconnecting, -        outputs=[_waitForStop, _stopConnecting], -        collector=_firstResult, -    ) -    _connecting.upon(_connectionMade, enter=_connected, outputs=[_notifyWaiters]) -    _connecting.upon( -        _connectionFailed, -        enter=_waiting, -        outputs=[_ignoreAndWait, _deliverConnectionFailure], -    ) - -    _waiting.upon(start, enter=_waiting, outputs=[]) -    _waiting.upon( -        stop, -        enter=_stopped, -        outputs=[_waitForStop, _cancelConnectWaiters, _stopRetrying, _finishStopping], -        collector=_firstResult, -    ) -    _waiting.upon(_reconnect, enter=_connecting, outputs=[_connect]) - -    _connected.upon(start, enter=_connected, outputs=[]) -    _connected.upon( -        stop, -        enter=_disconnecting, -        outputs=[_waitForStop, _disconnect], -        collector=_firstResult, -    ) -    _connected.upon( -        _clientDisconnected, enter=_waiting, outputs=[_forgetConnection, _wait] -    ) - -    _disconnecting.upon(start, enter=_restarting, outputs=[_resetFailedAttempts]) -    _disconnecting.upon( -        stop, enter=_disconnecting, outputs=[_waitForStop], collector=_firstResult -    ) -    _disconnecting.upon( -        _clientDisconnected, -        enter=_stopped, -        outputs=[_cancelConnectWaiters, _finishStopping, _forgetConnection], -    ) -    # Note that this is triggered synchonously with the transition from -    # _connecting -    _disconnecting.upon( -        _connectionFailed, -        enter=_stopped, -        outputs=[_ignoreAndCancelConnectWaiters, _ignoreAndFinishStopping], -    ) - -    _restarting.upon(start, enter=_restarting, outputs=[]) -    _restarting.upon( -        stop, enter=_disconnecting, outputs=[_waitForStop], collector=_firstResult -    ) -    _restarting.upon( -        _clientDisconnected, enter=_connecting, outputs=[_finishStopping, _connect] -    ) - -    _stopped.upon(start, enter=_connecting, outputs=[_connect]) -    _stopped.upon( -        stop, -        enter=_stopped, -        outputs=[_deferredSucceededWithNone], -        collector=_firstResult, -    ) - -    _init.upon( -        whenConnected, -        enter=_init, -        outputs=[_awaitingConnection], -        collector=_firstResult, -    ) -    _connecting.upon( -        whenConnected, -        enter=_connecting, -        outputs=[_awaitingConnection], -        collector=_firstResult, -    ) -    _waiting.upon( -        whenConnected, -        enter=_waiting, -        outputs=[_awaitingConnection], -        collector=_firstResult, -    ) -    _connected.upon( -        whenConnected, -        enter=_connected, -        outputs=[_currentConnection], -        collector=_firstResult, -    ) -    _disconnecting.upon( -        whenConnected, -        enter=_disconnecting, -        outputs=[_awaitingConnection], -        collector=_firstResult, -    ) -    _restarting.upon( -        whenConnected, -        enter=_restarting, -        outputs=[_awaitingConnection], -        collector=_firstResult, -    ) -    _stopped.upon( -        whenConnected, enter=_stopped, outputs=[_noConnection], collector=_firstResult -    ) - - -class ClientService(service.Service): -    """ -    A L{ClientService} maintains a single outgoing connection to a client -    endpoint, reconnecting after a configurable timeout when a connection -    fails, either before or after connecting. - -    @since: 16.1.0 -    """ - -    _log = Logger() - -    def __init__( -        self, endpoint, factory, retryPolicy=None, clock=None, prepareConnection=None -    ): -        """ -        @param endpoint: A L{stream client endpoint -            <interfaces.IStreamClientEndpoint>} provider which will be used to -            connect when the service starts. - -        @param factory: A L{protocol factory <interfaces.IProtocolFactory>} -            which will be used to create clients for the endpoint. - -        @param retryPolicy: A policy configuring how long L{ClientService} will -            wait between attempts to connect to C{endpoint}. -        @type retryPolicy: callable taking (the number of failed connection -            attempts made in a row (L{int})) and returning the number of -            seconds to wait before making another attempt. - -        @param clock: The clock used to schedule reconnection.  It's mainly -            useful to be parametrized in tests.  If the factory is serialized, -            this attribute will not be serialized, and the default value (the -            reactor) will be restored when deserialized. -        @type clock: L{IReactorTime} - -        @param prepareConnection: A single argument L{callable} that may return -            a L{Deferred}. It will be called once with the L{protocol -            <interfaces.IProtocol>} each time a new connection is made.  It may -            call methods on the protocol to prepare it for use (e.g. -            authenticate) or validate it (check its health). - -            The C{prepareConnection} callable may raise an exception or return -            a L{Deferred} which fails to reject the connection.  A rejected -            connection is not used to fire an L{Deferred} returned by -            L{whenConnected}.  Instead, L{ClientService} handles the failure -            and continues as if the connection attempt were a failure -            (incrementing the counter passed to C{retryPolicy}). - -            L{Deferred}s returned by L{whenConnected} will not fire until -            any L{Deferred} returned by the C{prepareConnection} callable -            fire. Otherwise its successful return value is consumed, but -            ignored. - -            Present Since Twisted 18.7.0 - -        @type prepareConnection: L{callable} - -        """ -        clock = _maybeGlobalReactor(clock) -        retryPolicy = _defaultPolicy if retryPolicy is None else retryPolicy - -        self._machine = _ClientMachine( -            endpoint, -            factory, -            retryPolicy, -            clock, -            prepareConnection=prepareConnection, -            log=self._log, -        ) - -    def whenConnected(self, failAfterFailures=None): -        """ -        Retrieve the currently-connected L{Protocol}, or the next one to -        connect. - -        @param failAfterFailures: number of connection failures after which -            the Deferred will deliver a Failure (None means the Deferred will -            only fail if/when the service is stopped).  Set this to 1 to make -            the very first connection failure signal an error.  Use 2 to -            allow one failure but signal an error if the subsequent retry -            then fails. -        @type failAfterFailures: L{int} or None - -        @return: a Deferred that fires with a protocol produced by the -            factory passed to C{__init__} -        @rtype: L{Deferred} that may: - -            - fire with L{IProtocol} - -            - fail with L{CancelledError} when the service is stopped - -            - fail with e.g. -              L{DNSLookupError<twisted.internet.error.DNSLookupError>} or -              L{ConnectionRefusedError<twisted.internet.error.ConnectionRefusedError>} -              when the number of consecutive failed connection attempts -              equals the value of "failAfterFailures" -        """ -        return self._machine.whenConnected(failAfterFailures) - -    def startService(self): -        """ -        Start this L{ClientService}, initiating the connection retry loop. -        """ -        if self.running: -            self._log.warn("Duplicate ClientService.startService {log_source}") -            return -        super().startService() -        self._machine.start() - -    def stopService(self): -        """ -        Stop attempting to reconnect and close any existing connections. - -        @return: a L{Deferred} that fires when all outstanding connections are -            closed and all in-progress connection attempts halted. -        """ -        super().stopService() -        return self._machine.stop() - -  __all__ = [      "TimerService",      "CooperatorService", @@ -1202,4 +422,6 @@ __all__ = [      "SSLClient",      "UNIXDatagramServer",      "UNIXDatagramClient", +    "ClientService", +    "backoffPolicy",  ] diff --git a/contrib/python/Twisted/py3/twisted/conch/manhole.py b/contrib/python/Twisted/py3/twisted/conch/manhole.py index f552af5bbdc..670ac0480ec 100644 --- a/contrib/python/Twisted/py3/twisted/conch/manhole.py +++ b/contrib/python/Twisted/py3/twisted/conch/manhole.py @@ -124,7 +124,14 @@ class ManholeInterpreter(code.InteractiveInterpreter):          """          Format exception tracebacks and write them to the output handler.          """ -        lines = format_exception(excType, excValue, excTraceback.tb_next) +        code_obj = excTraceback.tb_frame.f_code +        if code_obj.co_filename == code.__file__ and code_obj.co_name == "runcode": +            traceback = excTraceback.tb_next +        else: +            # Workaround for https://github.com/python/cpython/issues/122478, +            # present e.g. in Python 3.12.6: +            traceback = excTraceback +        lines = format_exception(excType, excValue, traceback)          self.write("".join(lines))      def displayhook(self, obj): diff --git a/contrib/python/Twisted/py3/twisted/conch/ssh/keys.py b/contrib/python/Twisted/py3/twisted/conch/ssh/keys.py index e959f022a0b..7d2f1072f47 100644 --- a/contrib/python/Twisted/py3/twisted/conch/ssh/keys.py +++ b/contrib/python/Twisted/py3/twisted/conch/ssh/keys.py @@ -256,24 +256,38 @@ class Key:          if keyType == b"ssh-rsa":              e, n, rest = common.getMP(rest, 2)              return cls(rsa.RSAPublicNumbers(e, n).public_key(default_backend())) -        elif keyType == b"ssh-dss": + +        if keyType == b"ssh-dss":              p, q, g, y, rest = common.getMP(rest, 4)              return cls(                  dsa.DSAPublicNumbers(                      y=y, parameter_numbers=dsa.DSAParameterNumbers(p=p, q=q, g=g)                  ).public_key(default_backend())              ) -        elif keyType in _curveTable: + +        if keyType in _curveTable:              return cls(                  ec.EllipticCurvePublicKey.from_encoded_point(                      _curveTable[keyType], common.getNS(rest, 2)[1]                  )              ) -        elif keyType == b"ssh-ed25519": + +        if keyType == b"[email protected]": +            keyObject = cls._fromECEncodedPoint( +                encodedPoint=common.getNS(rest, 2)[1], +                curve=b"ecdsa-sha2-nistp256", +            ) +            keyObject._sk = True +            return keyObject + +        if keyType in [b"ssh-ed25519", b"[email protected]"]:              a, rest = common.getNS(rest) -            return cls._fromEd25519Components(a) -        else: -            raise BadKeyError(f"unknown blob type: {keyType}") +            keyObject = cls._fromEd25519Components(a) +            if keyType.startswith(b"sk-ssh-"): +                keyObject._sk = True +            return keyObject + +        raise BadKeyError(f"unknown blob type: {keyType}")      @classmethod      def _fromString_PRIVATE_BLOB(cls, blob): @@ -676,16 +690,37 @@ class Key:          """          if data.startswith(b"ssh-") or data.startswith(b"ecdsa-sha2-"):              return "public_openssh" -        elif data.startswith(b"-----BEGIN"): + +        # Twisted doesn't support certificate based keys yet. +        # https://github.com/openssh/openssh-portable/blob/05f2b141cfcc60c7cdedf9450d2b9d390c19eaad/PROTOCOL.u2f#L96C1-L97C31 +        if data.startswith(b"sk-ecdsa-sha2-nistp256-cert-v01") or data.startswith( +            b"sk-ssh-ed25519-cert-v01" +        ): +            raise BadKeyError("certificate based keys are not supported") + +        if data.startswith(b"sk-ecdsa-sha2-nistp256") or data.startswith( +            b"sk-ssh-ed25519" +        ): +            # OpenSSH FIDO2 security keys have similar public format. +            # They have the extra "application" string, +            # which for now is ignored. +            return "public_openssh" + +        if data.startswith(b"-----BEGIN"):              return "private_openssh" -        elif data.startswith(b"{"): + +        if data.startswith(b"{"):              return "public_lsh" -        elif data.startswith(b"("): + +        if data.startswith(b"("):              return "private_lsh" -        elif ( + +        if (              data.startswith(b"\x00\x00\x00\x07ssh-")              or data.startswith(b"\x00\x00\x00\x13ecdsa-")              or data.startswith(b"\x00\x00\x00\x0bssh-ed25519") +            or data.startswith(b'\x00\x00\x00"[email protected]') +            or data.startswith(b"\x00\x00\x00\[email protected]")          ):              ignored, rest = common.getNS(data)              count = 0 @@ -869,6 +904,7 @@ class Key:          @type keyObject: C{cryptography.hazmat.primitives.asymmetric} key.          """          self._keyObject = keyObject +        self._sk = False      def __eq__(self, other: object) -> bool:          """ @@ -1029,16 +1065,25 @@ class Key:          @return: The key type format.          @rtype: L{bytes}          """ +        if self._sk: +            if self.type() == "EC": +                return b"[email protected]" +            # FIXME: https://github.com/twisted/twisted/issues/12304 +            # We only support 2 key types, +            # so if the key was loaded with success and it's +            # not ECDSA, it must be an ED25519 key. +            return b"[email protected]" +          if self.type() == "EC":              return (                  b"ecdsa-sha2-" + _secToNist[self._keyObject.curve.name.encode("ascii")]              ) -        else: -            return { -                "RSA": b"ssh-rsa", -                "DSA": b"ssh-dss", -                "Ed25519": b"ssh-ed25519", -            }[self.type()] + +        return { +            "RSA": b"ssh-rsa", +            "DSA": b"ssh-dss", +            "Ed25519": b"ssh-ed25519", +        }[self.type()]      def supportedSignatureAlgorithms(self):          """ @@ -1070,14 +1115,16 @@ class Key:                      return hashes.SHA512()              else:                  return None -        else: -            return { -                ("RSA", b"ssh-rsa"): hashes.SHA1(), -                ("RSA", b"rsa-sha2-256"): hashes.SHA256(), -                ("RSA", b"rsa-sha2-512"): hashes.SHA512(), -                ("DSA", b"ssh-dss"): hashes.SHA1(), -                ("Ed25519", b"ssh-ed25519"): hashes.SHA512(), -            }.get((self.type(), signatureType)) + +        if self.type() == "Ed25519": +            return hashes.SHA512() + +        return { +            ("RSA", b"ssh-rsa"): hashes.SHA1(), +            ("RSA", b"rsa-sha2-256"): hashes.SHA256(), +            ("RSA", b"rsa-sha2-512"): hashes.SHA512(), +            ("DSA", b"ssh-dss"): hashes.SHA1(), +        }.get((self.type(), signatureType))      def size(self):          """ diff --git a/contrib/python/Twisted/py3/twisted/conch/ssh/transport.py b/contrib/python/Twisted/py3/twisted/conch/ssh/transport.py index d46f093dff9..545c010f76e 100644 --- a/contrib/python/Twisted/py3/twisted/conch/ssh/transport.py +++ b/contrib/python/Twisted/py3/twisted/conch/ssh/transport.py @@ -103,17 +103,13 @@ class SSHCiphers:      cipherMap = {          b"3des-cbc": (algorithms.TripleDES, 24, modes.CBC), -        b"blowfish-cbc": (algorithms.Blowfish, 16, modes.CBC),          b"aes256-cbc": (algorithms.AES, 32, modes.CBC),          b"aes192-cbc": (algorithms.AES, 24, modes.CBC),          b"aes128-cbc": (algorithms.AES, 16, modes.CBC), -        b"cast128-cbc": (algorithms.CAST5, 16, modes.CBC),          b"aes128-ctr": (algorithms.AES, 16, modes.CTR),          b"aes192-ctr": (algorithms.AES, 24, modes.CTR),          b"aes256-ctr": (algorithms.AES, 32, modes.CTR),          b"3des-ctr": (algorithms.TripleDES, 24, modes.CTR), -        b"blowfish-ctr": (algorithms.Blowfish, 16, modes.CTR), -        b"cast128-ctr": (algorithms.CAST5, 16, modes.CTR),          b"none": (None, 0, modes.CBC),      }      macMap = { @@ -295,10 +291,6 @@ def _getSupportedCiphers():          b"aes192-cbc",          b"aes128-ctr",          b"aes128-cbc", -        b"cast128-ctr", -        b"cast128-cbc", -        b"blowfish-ctr", -        b"blowfish-cbc",          b"3des-ctr",          b"3des-cbc",      ] diff --git a/contrib/python/Twisted/py3/twisted/internet/address.py b/contrib/python/Twisted/py3/twisted/internet/address.py index 10fa85241e5..d0ab1f69290 100644 --- a/contrib/python/Twisted/py3/twisted/internet/address.py +++ b/contrib/python/Twisted/py3/twisted/internet/address.py @@ -21,7 +21,7 @@ from twisted.python.runtime import platform  @implementer(IAddress) [email protected](hash=True, auto_attribs=True) [email protected](unsafe_hash=True, auto_attribs=True)  class IPv4Address:      """      An L{IPv4Address} represents the address of an IPv4 socket endpoint. @@ -45,7 +45,7 @@ class IPv4Address:  @implementer(IAddress) [email protected](hash=True, auto_attribs=True) [email protected](unsafe_hash=True, auto_attribs=True)  class IPv6Address:      """      An L{IPv6Address} represents the address of an IPv6 socket endpoint. @@ -85,7 +85,7 @@ class _ProcessAddress:      """ [email protected](hash=True, auto_attribs=True) [email protected](unsafe_hash=True, auto_attribs=True)  @implementer(IAddress)  class HostnameAddress:      """ @@ -102,7 +102,7 @@ class HostnameAddress:      port: int [email protected](hash=False, repr=False, eq=False, auto_attribs=True) [email protected](unsafe_hash=False, repr=False, eq=False, auto_attribs=True)  @implementer(IAddress)  class UNIXAddress:      """ diff --git a/contrib/python/Twisted/py3/twisted/internet/base.py b/contrib/python/Twisted/py3/twisted/internet/base.py index c807f418731..2c38b80b0ca 100644 --- a/contrib/python/Twisted/py3/twisted/internet/base.py +++ b/contrib/python/Twisted/py3/twisted/internet/base.py @@ -213,29 +213,23 @@ class DelayedCall:          """          return not (self.cancelled or self.called) -    def __le__(self, other: object) -> bool: +    def __le__(self, other: "DelayedCall") -> bool:          """          Implement C{<=} operator between two L{DelayedCall} instances.          Comparison is based on the C{time} attribute (unadjusted by the          delayed time).          """ -        if isinstance(other, DelayedCall): -            return self.time <= other.time -        else: -            return NotImplemented +        return self.time <= other.time -    def __lt__(self, other: object) -> bool: +    def __lt__(self, other: "DelayedCall") -> bool:          """          Implement C{<} operator between two L{DelayedCall} instances.          Comparison is based on the C{time} attribute (unadjusted by the          delayed time).          """ -        if isinstance(other, DelayedCall): -            return self.time < other.time -        else: -            return NotImplemented +        return self.time < other.time      def __repr__(self) -> str:          """ @@ -577,6 +571,8 @@ class PluggableResolverMixin:  _SystemEventID = NewType("_SystemEventID", Tuple[str, _ThreePhaseEventTriggerHandle])  _ThreadCall = Tuple[Callable[..., Any], Tuple[object, ...], Dict[str, object]] +_DEFAULT_DELAYED_CALL_LOGGING_HANDLER = _log.failureHandler("while handling timed call") +  @implementer(IReactorCore, IReactorTime, _ISupportsExitSignalCapturing)  class ReactorBase(PluggableResolverMixin): @@ -964,7 +960,6 @@ class ReactorBase(PluggableResolverMixin):          """          See twisted.internet.interfaces.IReactorTime.callLater.          """ -        assert builtins.callable(callable), f"{callable} is not callable"          assert delay >= 0, f"{delay} is not greater than or equal to 0 seconds"          delayedCall = DelayedCall(              self.seconds() + delay, @@ -1012,6 +1007,12 @@ class ReactorBase(PluggableResolverMixin):          ]      def _insertNewDelayedCalls(self) -> None: +        # This function is called twice per reactor iteration, once in +        # timeout() and once in runUntilCurrent(), and in most cases there +        # won't be any new timeouts. So have a fast path for the empty case. +        if not self._newTimedCalls: +            return +          for call in self._newTimedCalls:              if call.cancelled:                  self._cancellations -= 1 @@ -1083,18 +1084,23 @@ class ReactorBase(PluggableResolverMixin):                  heappush(self._pendingTimedCalls, call)                  continue -            with _log.failuresHandled( -                "while handling timed call {previous()}", -                previous=lambda creator=call.creator: ( -                    "" -                    if creator is None -                    else "\n" -                    + (" C: from a DelayedCall created here:\n") -                    + " C:" -                    + "".join(creator).rstrip().replace("\n", "\n C:") -                    + "\n" -                ), -            ): +            logHandler = ( +                _log.failuresHandled( +                    "while handling timed call {previous()}", +                    previous=lambda creator=call.creator: ( +                        "\n" +                        + (" C: from a DelayedCall created here:\n") +                        + " C:" +                        + "".join(creator).rstrip().replace("\n", "\n C:") +                        + "\n" +                    ), +                ) +                if call.creator +                # A much faster logging handler for the common case where extra +                # debug info is not being output: +                else _DEFAULT_DELAYED_CALL_LOGGING_HANDLER +            ) +            with logHandler:                  call.called = 1                  call.func(*call.args, **call.kw) diff --git a/contrib/python/Twisted/py3/twisted/internet/defer.py b/contrib/python/Twisted/py3/twisted/internet/defer.py index 1c58baea7cd..951ca87d6b5 100644 --- a/contrib/python/Twisted/py3/twisted/internet/defer.py +++ b/contrib/python/Twisted/py3/twisted/internet/defer.py @@ -117,7 +117,11 @@ def succeed(result: _T) -> "Deferred[_T]":             method.      """      d: Deferred[_T] = Deferred() -    d.callback(result) +    # This violate abstraction boundaries, so code that is not internal to +    # Twisted shouldn't do it, but it's a significant performance optimization: +    d.result = result +    d.called = True +    d._chainedTo = None      return d @@ -213,8 +217,8 @@ def maybeDeferred(      except BaseException:          return fail(Failure(captureVars=Deferred.debug)) -    if isinstance(result, Deferred): -        return result +    if type(result) in _DEFERRED_SUBCLASSES: +        return result  # type: ignore[return-value]      elif isinstance(result, Failure):          return fail(result)      elif type(result) is CoroutineType: @@ -522,6 +526,8 @@ class Deferred(Awaitable[_SelfResultT]):          if errbackKeywords is None:              errbackKeywords = {}  # type: ignore[unreachable] +        # Note that this logic is duplicated in addCallbac/addErrback/addBoth +        # for performance reasons.          self.callbacks.append(              (                  (callback, callbackArgs, callbackKeywords), @@ -614,10 +620,14 @@ class Deferred(Awaitable[_SelfResultT]):          See L{addCallbacks}.          """ -        # Implementation Note: Any annotations for brevity; the overloads above -        # handle specifying the actual signature, and there's nothing worth -        # type-checking in this implementation. -        return self.addCallbacks(callback, callbackArgs=args, callbackKeywords=kwargs) +        # This could be implemented as a call to addCallbacks, but doing it +        # directly is faster. +        self.callbacks.append(((callback, args, kwargs), (_failthru, (), {}))) + +        if self.called: +            self._runCallbacks() + +        return self      @overload      def addErrback( @@ -652,10 +662,14 @@ class Deferred(Awaitable[_SelfResultT]):          See L{addCallbacks}.          """ -        # See implementation note in addCallbacks about Any arguments -        return self.addCallbacks( -            passthru, errback, errbackArgs=args, errbackKeywords=kwargs -        ) +        # This could be implemented as a call to addCallbacks, but doing it +        # directly is faster. +        self.callbacks.append(((passthru, (), {}), (errback, args, kwargs))) + +        if self.called: +            self._runCallbacks() + +        return self      @overload      def addBoth( @@ -737,15 +751,15 @@ class Deferred(Awaitable[_SelfResultT]):          See L{addCallbacks}.          """ -        # See implementation note in addCallbacks about Any arguments -        return self.addCallbacks( -            callback, -            callback, -            callbackArgs=args, -            errbackArgs=args, -            callbackKeywords=kwargs, -            errbackKeywords=kwargs, -        ) +        # This could be implemented as a call to addCallbacks, but doing it +        # directly is faster. +        call = (callback, args, kwargs) +        self.callbacks.append((call, call)) + +        if self.called: +            self._runCallbacks() + +        return self      # END way too many overloads @@ -915,13 +929,13 @@ class Deferred(Awaitable[_SelfResultT]):          """          Stop processing on a L{Deferred} until L{unpause}() is called.          """ -        self.paused = self.paused + 1 +        self.paused += 1      def unpause(self) -> None:          """          Process all callbacks made since L{pause}() was called.          """ -        self.paused = self.paused - 1 +        self.paused -= 1          if self.paused:              return          if self.called: @@ -983,10 +997,8 @@ class Deferred(Awaitable[_SelfResultT]):          """          Build a tuple of callback and errback with L{_Sentinel._CONTINUE}.          """ -        return ( -            (_Sentinel._CONTINUE, (self,), _NONE_KWARGS), -            (_Sentinel._CONTINUE, (self,), _NONE_KWARGS), -        ) +        triple = (_CONTINUE, (self,), _NONE_KWARGS) +        return (triple, triple)  # type: ignore[return-value]      def _runCallbacks(self) -> None:          """ @@ -1049,7 +1061,9 @@ class Deferred(Awaitable[_SelfResultT]):                  if callback is _CONTINUE:                      # Give the waiting Deferred our current result and then                      # forget about that result ourselves. -                    chainee = cast(Deferred[object], args[0]) + +                    # We don't use cast() for performance reasons: +                    chainee: Deferred[object] = args[0]  # type: ignore[assignment]                      chainee.result = current.result                      current.result = None                      # Making sure to update _debugInfo @@ -1127,7 +1141,6 @@ class Deferred(Awaitable[_SelfResultT]):                  if isinstance(current.result, Failure):                      # Stash the Failure in the _debugInfo for unhandled error                      # reporting. -                    current.result.cleanFailure()                      if current._debugInfo is None:                          current._debugInfo = DebugInfo()                      current._debugInfo.failResult = current.result @@ -1158,38 +1171,28 @@ class Deferred(Awaitable[_SelfResultT]):      __repr__ = __str__ -    def __iter__(self) -> "Deferred[_SelfResultT]": -        return self - -    @_extraneous -    def send(self, value: object = None) -> "Deferred[_SelfResultT]": -        if self.paused: -            # If we're paused, we have no result to give -            return self - -        result = getattr(self, "result", _NO_RESULT) -        if result is _NO_RESULT: -            return self -        if isinstance(result, Failure): -            # Clear the failure on debugInfo so it doesn't raise "unhandled -            # exception" -            assert self._debugInfo is not None -            self._debugInfo.failResult = None -            result.value.__failure__ = result -            raise result.value -        else: -            raise StopIteration(result) - -    # For PEP-492 support (async/await) -    # type note: base class "Awaitable" defined the type as: -    #     Callable[[], Generator[Any, None, _SelfResultT]] -    #     See: https://github.com/python/typeshed/issues/5125 -    #     When the typeshed patch is included in a mypy release, -    #     this method can be replaced by `__await__ = __iter__`. -    def __await__(self) -> Generator[Any, None, _SelfResultT]: -        return self.__iter__()  # type: ignore[return-value] +    def __iter__(self) -> Generator[Deferred[_SelfResultT], None, _SelfResultT]: +        while True: +            if self.paused: +                # If we're paused, we have no result to give +                yield self +                continue + +            result = getattr(self, "result", _NO_RESULT) +            if result is _NO_RESULT: +                yield self +                continue + +            if isinstance(result, Failure): +                # Clear the failure on debugInfo so it doesn't raise "unhandled +                # exception" +                assert self._debugInfo is not None +                self._debugInfo.failResult = None +                result.raiseException() +            else: +                return result  # type: ignore[return-value] -    __next__ = send +    __await__ = __iter__      def asFuture(self, loop: AbstractEventLoop) -> "Future[_SelfResultT]":          """ @@ -1351,11 +1354,11 @@ def ensureDeferred(      @param coro: The coroutine object to schedule, or a L{Deferred}.      """ -    if isinstance(coro, Deferred): -        return coro +    if type(coro) in _DEFERRED_SUBCLASSES: +        return coro  # type: ignore[return-value]      else:          try: -            return Deferred.fromCoroutine(coro) +            return Deferred.fromCoroutine(coro)  # type: ignore[arg-type]          except NotACoroutineError:              # It's not a coroutine. Raise an exception, but say that it's also              # not a Deferred so the error makes sense. @@ -2112,17 +2115,23 @@ def _inlineCallbacks(              status.deferred.callback(callbackValue)              return -        if iscoroutine(result) or inspect.isgenerator(result): +        isDeferred = type(result) in _DEFERRED_SUBCLASSES +        # iscoroutine() is pretty expensive in this context, so avoid calling +        # it unnecessarily: +        if not isDeferred and (iscoroutine(result) or inspect.isgenerator(result)):              result = _cancellableInlineCallbacks(result) +            isDeferred = True + +        if isDeferred: +            # We don't cast() to Deferred because that does more work in the hot path -        if isinstance(result, Deferred):              # a deferred was yielded, get the result. -            result.addBoth(_gotResultInlineCallbacks, waiting, gen, status, context) +            result.addBoth(_gotResultInlineCallbacks, waiting, gen, status, context)  # type: ignore[attr-defined]              if waiting[0]:                  # Haven't called back yet, set flag so that we get reinvoked                  # and return from the loop                  waiting[0] = False -                status.waitingOn = result +                status.waitingOn = result  # type: ignore[assignment]                  return              result = waiting[1] diff --git a/contrib/python/Twisted/py3/twisted/internet/endpoints.py b/contrib/python/Twisted/py3/twisted/internet/endpoints.py index 7ab1d817319..a98fd2ba43b 100644 --- a/contrib/python/Twisted/py3/twisted/internet/endpoints.py +++ b/contrib/python/Twisted/py3/twisted/internet/endpoints.py @@ -808,9 +808,18 @@ class HostnameEndpoint:              seconds to wait before assuming the connection has failed.          @type timeout: L{float} or L{int} -        @param bindAddress: the local address of the network interface to make -            the connections from. -        @type bindAddress: L{bytes} +        @param bindAddress: The client socket normally uses whatever +            local interface (eth0, en0, lo, etc) is best suited for the +            target address, and a randomly-assigned port. This argument +            allows that local address/port to be overridden. Providing +            just an address (as a str) will bind the client socket to +            whichever interface is assigned that address. Providing a +            tuple of (str, int) will bind it to both an interface and a +            specific local port. To bind the port, but leave the +            interface unbound, use a tuple of ("", port), or ("0.0.0.0", +            port) for IPv4, or ("::0", port) for IPv6. To leave both +            interface and port unbound, just use None. +        @type bindAddress: L{str}, L{tuple}, or None          @param attemptDelay: The number of seconds to delay between connection              attempts. @@ -827,6 +836,11 @@ class HostnameEndpoint:          self._hostStr = self._hostBytes if bytes is str else self._hostText          self._port = port          self._timeout = timeout +        if bindAddress is not None: +            if isinstance(bindAddress, (bytes, str)): +                bindAddress = (bindAddress, 0) +            if isinstance(bindAddress[0], bytes): +                bindAddress = (bindAddress[0].decode(), bindAddress[1])          self._bindAddress = bindAddress          if attemptDelay is None:              attemptDelay = self._DEFAULT_ATTEMPT_DELAY @@ -2299,7 +2313,9 @@ def _parseClientTLS(          ),          clientFromString(reactor, endpoint)          if endpoint is not None -        else HostnameEndpoint(reactor, _idnaBytes(host), port, timeout, bindAddress), +        else HostnameEndpoint( +            reactor, _idnaBytes(host), port, timeout, (bindAddress, 0) +        ),      ) diff --git a/contrib/python/Twisted/py3/twisted/internet/tcp.py b/contrib/python/Twisted/py3/twisted/internet/tcp.py index 8f85025556b..018d1912d27 100644 --- a/contrib/python/Twisted/py3/twisted/internet/tcp.py +++ b/contrib/python/Twisted/py3/twisted/internet/tcp.py @@ -727,6 +727,7 @@ class _BaseTCPClient:              whenDone = None          if whenDone and bindAddress is not None:              try: +                assert type(bindAddress) == tuple                  if abstract.isIPv6Address(bindAddress[0]):                      bindinfo = _resolveIPv6(*bindAddress)                  else: diff --git a/contrib/python/Twisted/py3/twisted/internet/testing.py b/contrib/python/Twisted/py3/twisted/internet/testing.py index 2c372495844..6563184edf9 100644 --- a/contrib/python/Twisted/py3/twisted/internet/testing.py +++ b/contrib/python/Twisted/py3/twisted/internet/testing.py @@ -9,7 +9,18 @@ from __future__ import annotations  from io import BytesIO  from socket import AF_INET, AF_INET6 -from typing import Callable, Iterator, Sequence, overload +from time import time +from typing import ( +    Any, +    Callable, +    Coroutine, +    Generator, +    Iterator, +    Sequence, +    TypeVar, +    Union, +    overload, +)  from zope.interface import implementedBy, implementer  from zope.interface.verify import verifyClass @@ -19,7 +30,7 @@ from typing_extensions import ParamSpec, Self  from twisted.internet import address, error, protocol, task  from twisted.internet.abstract import _dataMustBeBytes, isIPv6Address  from twisted.internet.address import IPv4Address, IPv6Address, UNIXAddress -from twisted.internet.defer import Deferred +from twisted.internet.defer import Deferred, ensureDeferred, succeed  from twisted.internet.error import UnsupportedAddressFamily  from twisted.internet.interfaces import (      IConnector, @@ -967,3 +978,95 @@ class EventLoggingObserver(Sequence[LogEvent]):          publisher.addObserver(obs)          testInstance.addCleanup(lambda: publisher.removeObserver(obs))          return obs + + +_T = TypeVar("_T") + + +def _benchmarkWithReactor( +    test_target: Callable[ +        [], +        Union[ +            Coroutine[Deferred[Any], Any, _T], +            Generator[Deferred[Any], Any, _T], +            Deferred[_T], +        ], +    ] +) -> Callable[[Any], None]:  # pragma: no cover +    """ +    Decorator for running a benchmark tests that loops the reactor. + +    This is designed to decorate test method executed using pytest and +    pytest-benchmark. +    """ + +    def deferredWrapper(): +        return ensureDeferred(test_target()) + +    def benchmark_test(benchmark: Any) -> None: +        # Spinning up and spinning down the reactor adds quite a lot of +        # overhead to the benchmarked function. So, make sure that the overhead +        # isn't making the benchmark meaningless before we bother with any real +        # benchmarking. +        start = time() +        _runReactor(lambda: succeed(None)) +        justReactorElapsed = time() - start + +        start = time() +        _runReactor(deferredWrapper) +        benchmarkElapsed = time() - start + +        if benchmarkElapsed / justReactorElapsed < 5: +            raise RuntimeError(  # pragma: no cover +                "The function you are benchmarking is fast enough that its " +                "run time is being swamped by the startup/shutdown of the " +                "reactor. Consider adding a for loop to the benchmark " +                "function so it does the work a number of times." +            ) + +        benchmark(_runReactor, deferredWrapper) + +    return benchmark_test + + +def _runReactor(callback: Callable[[], Deferred[_T]]) -> None:  # pragma: no cover +    """ +    (re)Start a reactor that might have been previously started. +    """ +    # Delay to import to prevent side-effect in normal tests that are +    # expecting to import twisted.internet.testing while no reactor is +    # installed. +    from twisted.internet import reactor + +    errors: list[failure.Failure] = [] + +    deferred = callback() +    deferred.addErrback(errors.append) +    deferred.addBoth(lambda _: reactor.callLater(0, _stopReactor, reactor))  # type: ignore[attr-defined] +    reactor.run(installSignalHandlers=False)  # type: ignore[attr-defined] + +    if errors:  # pragma: no cover +        # Make sure the test fails in a visible way: +        errors[0].raiseException() + + +def _stopReactor(reactor):  # pragma: no cover +    """ +    Stop the reactor and allow it to be re-started later. +    """ +    reactor.stop() +    # Allow for on shutdown hooks to execute. +    reactor.iterate() +    # Since we're going to be poking the reactor's guts, let's make sure what +    # we're doing is vaguely reasonable: +    assert hasattr(reactor, "_startedBefore") +    assert hasattr(reactor, "_started") +    assert hasattr(reactor, "_justStopped") +    assert hasattr(reactor, "running") +    reactor._startedBefore = False +    reactor._started = False +    reactor._justStopped = False +    reactor.running = False +    # Start running has consumed the startup events, so we need +    # to restore them. +    reactor.addSystemEventTrigger("during", "startup", reactor._reallyStartRunning) diff --git a/contrib/python/Twisted/py3/twisted/logger/_format.py b/contrib/python/Twisted/py3/twisted/logger/_format.py index 59b44c7f723..57d8c3b1e60 100644 --- a/contrib/python/Twisted/py3/twisted/logger/_format.py +++ b/contrib/python/Twisted/py3/twisted/logger/_format.py @@ -199,7 +199,7 @@ class PotentialCallWrapper(object):          self._wrapped = wrapped      def __getattr__(self, name: str) -> object: -        return keycall(name, self._wrapped.__getattribute__) +        return keycall(name, lambda name_: getattr(self._wrapped, name_))      def __getitem__(self, name: str) -> object:          # The sub-object may not be indexable, but if it isn't, that's the @@ -208,13 +208,13 @@ class PotentialCallWrapper(object):          return PotentialCallWrapper(value)      def __format__(self, format_spec: str) -> str: -        return self._wrapped.__format__(format_spec) +        return format(self._wrapped, format_spec)      def __repr__(self) -> str: -        return self._wrapped.__repr__() +        return repr(self._wrapped)      def __str__(self) -> str: -        return self._wrapped.__str__() +        return str(self._wrapped)  class CallMapping(Mapping[str, Any]): diff --git a/contrib/python/Twisted/py3/twisted/python/failure.py b/contrib/python/Twisted/py3/twisted/python/failure.py index d253ffad743..987287e0c76 100644 --- a/contrib/python/Twisted/py3/twisted/python/failure.py +++ b/contrib/python/Twisted/py3/twisted/python/failure.py @@ -24,9 +24,10 @@ from inspect import getmro  from io import StringIO  from typing import Callable, NoReturn, TypeVar -import opcode +from incremental import Version  from twisted.python import reflect +from twisted.python.deprecate import deprecatedProperty  _T_Callable = TypeVar("_T_Callable", bound=Callable[..., object]) @@ -84,8 +85,7 @@ def format_frames(frames, write, detail="default"):                  w(f"  {name} : {repr(val)}\n") -# slyphon: i have a need to check for this value in trial -#          so I made it a module-level constant +# Unused, here for backwards compatibility.  EXCEPTION_CAUGHT_HERE = "--- <exception caught here> ---" @@ -96,28 +96,20 @@ class NoCurrentExceptionError(Exception):      """ -def _Traceback(stackFrames, tbFrames): +def _Traceback(tbFrames):      """      Construct a fake traceback object using a list of frames.      It should have the same API as stdlib to allow interaction with      other tools. -    @param stackFrames: [(methodname, filename, lineno, locals, globals), ...]      @param tbFrames: [(methodname, filename, lineno, locals, globals), ...]      """      assert len(tbFrames) > 0, "Must pass some frames"      # We deliberately avoid using recursion here, as the frames list may be      # long. -    # 'stackFrames' is a list of frames above (ie, older than) the point the -    # exception was caught, with oldest at the start. Start by building these -    # into a linked list of _Frame objects (with the f_back links pointing back -    # towards the oldest frame).      stack = None -    for sf in stackFrames: -        stack = _Frame(sf, stack) -      # 'tbFrames' is a list of frames from the point the exception was caught,      # down to where it was thrown, with the oldest at the start. Add these to      # the linked list of _Frames, but also wrap each one with a _Traceback @@ -237,30 +229,29 @@ class Failure(BaseException):      This is necessary because Python's built-in error mechanisms are      inconvenient for asynchronous communication. -    The C{stack} and C{frame} attributes contain frames.  Each frame is a tuple +    The C{frame} attribute contain the traceback frames.  Each frame is a tuple      of (funcName, fileName, lineNumber, localsItems, globalsItems), where      localsItems and globalsItems are the contents of      C{locals().items()}/C{globals().items()} for that frame, or an empty tuple      if those details were not captured. +    Local/global variables in C{frame} will only be captured if +    C{captureVars=True} when constructing the L{Failure}. +      @ivar value: The exception instance responsible for this failure. +      @ivar type: The exception's class. -    @ivar stack: list of frames, innermost last, excluding C{Failure.__init__}. + +    @ivar stack: Deprecated, always an empty list.  Equivalent information can +        be extracted from C{import traceback; +        traceback.extract_stack(your_failure.tb)} +      @ivar frames: list of frames, innermost first.      """      pickled = 0 -    stack = None      _parents = None -    # The opcode of "yield" in Python bytecode. We need this in -    # _findFailure in order to identify whether an exception was -    # thrown by a throwExceptionIntoGenerator. -    # on PY3, b'a'[0] == 97 while in py2 b'a'[0] == b'a' opcodes -    # are stored in bytes so we need to properly account for this -    # difference. -    _yieldOpcode = opcode.opmap["YIELD_VALUE"] -      def __init__(self, exc_value=None, exc_type=None, exc_tb=None, captureVars=False):          """          Initialize me with an explanation of the error. @@ -291,16 +282,10 @@ class Failure(BaseException):          self.type = self.value = tb = None          self.captureVars = captureVars -        stackOffset = 0 - -        if exc_value is None: -            exc_value = self._findFailure() -          if exc_value is None:              self.type, self.value, tb = sys.exc_info()              if self.type is None:                  raise NoCurrentExceptionError() -            stackOffset = 1          elif exc_type is None:              if isinstance(exc_value, Exception):                  self.type = exc_value.__class__ @@ -316,84 +301,25 @@ class Failure(BaseException):              self._extrapolate(self.value)              return -        if hasattr(self.value, "__failure__"): -            # For exceptions propagated through coroutine-awaiting (see -            # Deferred.send, AKA Deferred.__next__), which can't be raised as -            # Failure because that would mess up the ability to except: them: -            self._extrapolate(self.value.__failure__) - -            # Clean up the inherently circular reference established by storing -            # the failure there.  This should make the common case of a Twisted -            # / Deferred-returning coroutine somewhat less hard on the garbage -            # collector. -            del self.value.__failure__ -            return -          if tb is None:              if exc_tb:                  tb = exc_tb              elif getattr(self.value, "__traceback__", None):                  # Python 3                  tb = self.value.__traceback__ - -        frames = self.frames = [] -        stack = self.stack = [] - -        # Added 2003-06-23 by Chris Armstrong. Yes, I actually have a -        # use case where I need this traceback object, and I've made -        # sure that it'll be cleaned up.          self.tb = tb -        if tb: -            f = tb.tb_frame -        elif not isinstance(self.value, Failure): -            # We don't do frame introspection since it's expensive, -            # and if we were passed a plain exception with no -            # traceback, it's not useful anyway -            f = stackOffset = None - -        while stackOffset and f: -            # This excludes this Failure.__init__ frame from the -            # stack, leaving it to start with our caller instead. -            f = f.f_back -            stackOffset -= 1 - -        # Keeps the *full* stack.  Formerly in spread.pb.print_excFullStack: -        # -        #   The need for this function arises from the fact that several -        #   PB classes have the peculiar habit of discarding exceptions -        #   with bareword "except:"s.  This premature exception -        #   catching means tracebacks generated here don't tend to show -        #   what called upon the PB object. -        while f: -            if captureVars: -                localz = f.f_locals.copy() -                if f.f_locals is f.f_globals: -                    globalz = {} -                else: -                    globalz = f.f_globals.copy() -                for d in globalz, localz: -                    if "__builtins__" in d: -                        del d["__builtins__"] -                localz = localz.items() -                globalz = globalz.items() -            else: -                localz = globalz = () -            stack.insert( -                0, -                ( -                    f.f_code.co_name, -                    f.f_code.co_filename, -                    f.f_lineno, -                    localz, -                    globalz, -                ), -            ) -            f = f.f_back +    @property +    def frames(self): +        if hasattr(self, "_frames"): +            return self._frames + +        frames = self._frames = [] +        tb = self.tb          while tb is not None:              f = tb.tb_frame -            if captureVars: +            if self.captureVars:                  localz = f.f_locals.copy()                  if f.f_locals is f.f_globals:                      globalz = {} @@ -416,6 +342,19 @@ class Failure(BaseException):                  )              )              tb = tb.tb_next +        return frames + +    @frames.setter +    def frames(self, frames): +        self._frames = frames + +    @deprecatedProperty(Version("Twisted", 24, 10, 0)) +    def stack(self): +        return [] + +    @stack.setter  # type: ignore[no-redef] +    def stack(self, stack): +        del stack      @property      def parents(self): @@ -442,26 +381,9 @@ class Failure(BaseException):              one.          @type otherFailure: L{Failure}          """ -        # Copy all infos from that failure (including self.frames). +        # Copy all infos from that failure (including self._frames).          self.__dict__ = copy.copy(otherFailure.__dict__) -        # If we are re-throwing a Failure, we merge the stack-trace stored in -        # the failure with the current exception's stack.  This integrated with -        # throwExceptionIntoGenerator and allows to provide full stack trace, -        # even if we go through several layers of inlineCallbacks. -        _, _, tb = sys.exc_info() -        frames = [] -        while tb is not None: -            f = tb.tb_frame -            if f.f_code not in _inlineCallbacksExtraneous: -                frames.append( -                    (f.f_code.co_name, f.f_code.co_filename, tb.tb_lineno, (), ()) -                ) -            tb = tb.tb_next -        # Merging current stack with stack stored in the Failure. -        frames.extend(self.frames) -        self.frames = frames -      @staticmethod      def _withoutTraceback(value: BaseException) -> Failure:          """ @@ -475,8 +397,6 @@ class Failure(BaseException):          count += 1          result.captureVars = False          result.count = count -        result.frames = [] -        result.stack = []  # type: ignore          result.value = value          result.type = value.__class__          result.tb = None @@ -544,69 +464,8 @@ class Failure(BaseException):          @raise StopIteration: If there are no more values in the generator.          @raise anything else: Anything that the generator raises.          """ -        # Note that the actual magic to find the traceback information -        # is done in _findFailure.          return g.throw(self.value.with_traceback(self.tb)) -    @classmethod -    def _findFailure(cls): -        """ -        Find the failure that represents the exception currently in context. -        """ -        tb = sys.exc_info()[-1] -        if not tb: -            return - -        secondLastTb = None -        lastTb = tb -        while lastTb.tb_next: -            secondLastTb = lastTb -            lastTb = lastTb.tb_next - -        lastFrame = lastTb.tb_frame - -        # NOTE: f_locals.get('self') is used rather than -        # f_locals['self'] because psyco frames do not contain -        # anything in their locals() dicts.  psyco makes debugging -        # difficult anyhow, so losing the Failure objects (and thus -        # the tracebacks) here when it is used is not that big a deal. - -        # Handle raiseException-originated exceptions -        if lastFrame.f_code is cls.raiseException.__code__: -            return lastFrame.f_locals.get("self") - -        # Handle throwExceptionIntoGenerator-originated exceptions -        # this is tricky, and differs if the exception was caught -        # inside the generator, or above it: - -        # It is only really originating from -        # throwExceptionIntoGenerator if the bottom of the traceback -        # is a yield. -        # Pyrex and Cython extensions create traceback frames -        # with no co_code, but they can't yield so we know it's okay to -        # just return here. -        if (not lastFrame.f_code.co_code) or lastFrame.f_code.co_code[ -            lastTb.tb_lasti -        ] != cls._yieldOpcode: -            return - -        # If the exception was caught above the generator.throw -        # (outside the generator), it will appear in the tb (as the -        # second last item): -        if secondLastTb: -            frame = secondLastTb.tb_frame -            if frame.f_code is cls.throwExceptionIntoGenerator.__code__: -                return frame.f_locals.get("self") - -        # If the exception was caught below the generator.throw -        # (inside the generator), it will appear in the frames' linked -        # list, above the top-level traceback item (which must be the -        # generator frame itself, thus its caller is -        # throwExceptionIntoGenerator). -        frame = tb.tb_frame.f_back -        if frame and frame.f_code is cls.throwExceptionIntoGenerator.__code__: -            return frame.f_locals.get("self") -      def __repr__(self) -> str:          return "<{} {}: {}>".format(              reflect.qual(self.__class__), @@ -618,7 +477,10 @@ class Failure(BaseException):          return "[Failure instance: %s]" % self.getBriefTraceback()      def __setstate__(self, state): +        if "stack" in state: +            state.pop("stack")          state["_parents"] = state.pop("parents") +        state["_frames"] = state.pop("frames")          self.__dict__.update(state)      def __getstate__(self): @@ -636,6 +498,10 @@ class Failure(BaseException):          # Backwards compatibility with old code, e.g. for Perspective Broker:          c["parents"] = c.pop("_parents") +        c["stack"] = [] + +        if "_frames" in c: +            c.pop("_frames")          if self.captureVars:              c["frames"] = [ @@ -648,25 +514,12 @@ class Failure(BaseException):                  ]                  for v in self.frames              ] +        else: +            c["frames"] = self.frames          # Added 2003-06-23. See comment above in __init__          c["tb"] = None -        if self.stack is not None: -            # XXX: This is a band-aid.  I can't figure out where these -            # (failure.stack is None) instances are coming from. -            if self.captureVars: -                c["stack"] = [ -                    [ -                        v[0], -                        v[1], -                        v[2], -                        _safeReprVars(v[3]), -                        _safeReprVars(v[4]), -                    ] -                    for v in self.stack -                ] -          c["pickled"] = 1          return c @@ -682,7 +535,9 @@ class Failure(BaseException):          On Python 3, this will also set the C{__traceback__} attribute of the          exception instance to L{None}.          """ -        self.__dict__ = self.__getstate__() +        state = self.__getstate__() +        state["_frames"] = state.pop("frames") +        self.__dict__ = state          if getattr(self.value, "__traceback__", None):              # Python 3              self.value.__traceback__ = None @@ -700,7 +555,7 @@ class Failure(BaseException):          if self.tb is not None:              return self.tb          elif len(self.frames) > 0: -            return _Traceback(self.stack, self.frames) +            return _Traceback(self.frames)          else:              return None @@ -731,9 +586,7 @@ class Failure(BaseException):          @param file: If specified, a file-like object to which to write the              traceback. -        @param elideFrameworkCode: A flag indicating whether to attempt to -            remove uninteresting frames from within Twisted itself from the -            output. +        @param elideFrameworkCode: Deprecated, ignored.          @param detail: A string indicating how much information to include              in the traceback.  Must be one of C{'brief'}, C{'default'}, or @@ -743,6 +596,7 @@ class Failure(BaseException):              from twisted.python import log              file = log.logerr +          w = file.write          if detail == "verbose" and not self.captureVars: @@ -773,9 +627,6 @@ class Failure(BaseException):          # Frames, formatted in appropriate style          if self.frames: -            if not elideFrameworkCode: -                format_frames(self.stack[-traceupLength:], w, formatDetail) -                w(f"{EXCEPTION_CAUGHT_HERE}\n")              format_frames(self.frames, w, formatDetail)          elif not detail == "brief":              # Yeah, it's not really a traceback, despite looking like one... diff --git a/contrib/python/Twisted/py3/twisted/scripts/trial.py b/contrib/python/Twisted/py3/twisted/scripts/trial.py index 531fe46ce17..f3259ed21b7 100644 --- a/contrib/python/Twisted/py3/twisted/scripts/trial.py +++ b/contrib/python/Twisted/py3/twisted/scripts/trial.py @@ -36,6 +36,39 @@ TBFORMAT_MAP = {  } +def _autoJobs() -> int: +    """ +    Heuristically guess the number of job workers to run. + +    When ``os.process_cpu_count()`` is available (Python 3.13+), +    return the number of logical CPUs usable by the current +    process. This respects the ``PYTHON_CPU_COUNT`` environment +    variable and/or ``python -X cpu_count`` flag. + +    Otherwise, if ``os.sched_getaffinity()`` is available (on some +    Unixes) this returns the number of CPUs this process is +    restricted to, under the assumption that this affinity will +    be inherited. + +    Otherwise, consult ``os.cpu_count()`` to get the number of +    logical CPUs. + +    Failing all else, return 1. + +    @returns: A strictly positive integer. +    """ +    number: Optional[int] +    if getattr(os, "process_cpu_count", None) is not None: +        number = os.process_cpu_count()  # type: ignore[attr-defined] +    elif getattr(os, "sched_getaffinity", None) is not None: +        number = len(os.sched_getaffinity(0)) +    else: +        number = os.cpu_count() +    if number is None or number < 1: +        return 1 +    return number + +  def _parseLocalVariables(line):      """      Accepts a single line in Emacs local variable declaration format and @@ -477,18 +510,22 @@ class Options(_BasicOptions, usage.Options, app.ReactorSelectionMixin):      def opt_jobs(self, number):          """ -        Number of local workers to run, a strictly positive integer. +        Number of local workers to run, a strictly positive integer or 'auto' +        to spawn one worker for each available CPU.          """ -        try: -            number = int(number) -        except ValueError: -            raise usage.UsageError( -                "Expecting integer argument to jobs, got '%s'" % number -            ) -        if number <= 0: -            raise usage.UsageError( -                "Argument to jobs must be a strictly positive integer" -            ) +        if number == "auto": +            number = _autoJobs() +        else: +            try: +                number = int(number) +            except ValueError: +                raise usage.UsageError( +                    "Expecting integer argument to jobs, got '%s'" % number +                ) +            if number <= 0: +                raise usage.UsageError( +                    "Argument to jobs must be a strictly positive integer or 'auto'" +                )          self["jobs"] = number      def _getWorkerArguments(self): diff --git a/contrib/python/Twisted/py3/twisted/spread/pb.py b/contrib/python/Twisted/py3/twisted/spread/pb.py index 1a58dc6c59f..031292e318c 100644 --- a/contrib/python/Twisted/py3/twisted/spread/pb.py +++ b/contrib/python/Twisted/py3/twisted/spread/pb.py @@ -520,7 +520,7 @@ setUnjellyableForClass(CopyableFailure, CopiedFailure)  def failure2Copyable(fail, unsafeTracebacks=0): -    f = _newInstance(CopyableFailure, fail.__dict__) +    f = _newInstance(CopyableFailure, fail.__getstate__())      f.unsafeTracebacks = unsafeTracebacks      return f diff --git a/contrib/python/Twisted/py3/twisted/web/_abnf.py b/contrib/python/Twisted/py3/twisted/web/_abnf.py new file mode 100644 index 00000000000..8029943beab --- /dev/null +++ b/contrib/python/Twisted/py3/twisted/web/_abnf.py @@ -0,0 +1,68 @@ +# -*- test-case-name: twisted.web.test.test_abnf -*- +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +Tools for pedantically processing the HTTP protocol. +""" + + +def _istoken(b: bytes) -> bool: +    """ +    Is the string a token per RFC 9110 section 5.6.2? +    """ +    for c in b: +        if c not in ( +            b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"  # ALPHA +            b"0123456789"  # DIGIT +            b"!#$%&'*+-.^_`|~" +        ): +            return False +    return b != b"" + + +def _decint(data: bytes) -> int: +    """ +    Parse a decimal integer of the form C{1*DIGIT}, i.e. consisting only of +    decimal digits. The integer may be embedded in whitespace (space and +    horizontal tab). This differs from the built-in L{int()} function by +    disallowing a leading C{+} character and various forms of whitespace +    (note that we sanitize linear whitespace in header values in +    L{twisted.web.http_headers.Headers}). + +    @param data: Value to parse. + +    @returns: A non-negative integer. + +    @raises ValueError: When I{value} contains non-decimal characters. +    """ +    data = data.strip(b" \t") +    if not data.isdigit(): +        raise ValueError(f"Value contains non-decimal digits: {data!r}") +    return int(data) + + +def _ishexdigits(b: bytes) -> bool: +    """ +    Is the string case-insensitively hexidecimal? + +    It must be composed of one or more characters in the ranges a-f, A-F +    and 0-9. +    """ +    for c in b: +        if c not in b"0123456789abcdefABCDEF": +            return False +    return b != b"" + + +def _hexint(b: bytes) -> int: +    """ +    Decode a hexadecimal integer. + +    Unlike L{int(b, 16)}, this raises L{ValueError} when the integer has +    a prefix like C{b'0x'}, C{b'+'}, or C{b'-'}, which is desirable when +    parsing network protocols. +    """ +    if not _ishexdigits(b): +        raise ValueError(b) +    return int(b, 16) diff --git a/contrib/python/Twisted/py3/twisted/web/_http2.py b/contrib/python/Twisted/py3/twisted/web/_http2.py index f048c7335ec..301e9ea196b 100644 --- a/contrib/python/Twisted/py3/twisted/web/_http2.py +++ b/contrib/python/Twisted/py3/twisted/web/_http2.py @@ -14,7 +14,6 @@ This API is currently considered private because it's in early draft form. When  it has stabilised, it'll be made public.  """ -  import io  from collections import deque  from typing import List @@ -36,6 +35,7 @@ from twisted.internet.interfaces import (      IProtocol,      IPushProducer,      ISSLTransport, +    ITCPTransport,      ITransport,  )  from twisted.internet.protocol import Protocol @@ -144,6 +144,8 @@ class H2Connection(Protocol, TimeoutMixin):          by the L{twisted.web.http._GenericHTTPChannelProtocol} during upgrade          to HTTP/2.          """ +        if ITCPTransport.providedBy(self.transport): +            self.transport.setTcpNoDelay(True)          self.setTimeout(self.timeOut)          self.conn.initiate_connection()          self.transport.write(self.conn.data_to_send()) @@ -972,7 +974,7 @@ class H2Stream:                  self._request.gotLength(None)          self._request.parseCookies() -        expectContinue = self._request.requestHeaders.getRawHeaders(b"expect") +        expectContinue = self._request.requestHeaders.getRawHeaders(b"Expect")          if expectContinue and expectContinue[0].lower() == b"100-continue":              self._send100Continue() diff --git a/contrib/python/Twisted/py3/twisted/web/_newclient.py b/contrib/python/Twisted/py3/twisted/web/_newclient.py index a151bdae05c..32e12521288 100644 --- a/contrib/python/Twisted/py3/twisted/web/_newclient.py +++ b/contrib/python/Twisted/py3/twisted/web/_newclient.py @@ -35,13 +35,14 @@ from zope.interface import implementer  from twisted.internet.defer import CancelledError, Deferred, fail, succeed  from twisted.internet.error import ConnectionDone -from twisted.internet.interfaces import IConsumer, IPushProducer +from twisted.internet.interfaces import IConsumer, IPushProducer, ITCPTransport  from twisted.internet.protocol import Protocol  from twisted.logger import Logger  from twisted.protocols.basic import LineReceiver  from twisted.python.compat import networkString  from twisted.python.components import proxyForInterface  from twisted.python.failure import Failure +from twisted.web._abnf import _decint, _istoken  from twisted.web.http import (      NO_CONTENT,      NOT_MODIFIED, @@ -478,7 +479,7 @@ class HTTPClientParser(HTTPParser):              self.response._bodyDataFinished()          else:              transferEncodingHeaders = self.connHeaders.getRawHeaders( -                b"transfer-encoding" +                b"Transfer-Encoding"              )              if transferEncodingHeaders:                  # This could be a KeyError.  However, that would mean we do not @@ -556,35 +557,6 @@ class HTTPClientParser(HTTPParser):              del self._responseDeferred -_VALID_METHOD = re.compile( -    rb"\A[%s]+\Z" -    % ( -        bytes().join( -            ( -                b"!", -                b"#", -                b"$", -                b"%", -                b"&", -                b"'", -                b"*", -                b"+", -                b"-", -                b".", -                b"^", -                b"_", -                b"`", -                b"|", -                b"~", -                b"\x30-\x39", -                b"\x41-\x5a", -                b"\x61-\x7a", -            ), -        ), -    ), -) - -  def _ensureValidMethod(method):      """      An HTTP method is an HTTP token, which consists of any visible @@ -603,7 +575,7 @@ def _ensureValidMethod(method):          U{https://tools.ietf.org/html/rfc7230#section-3.2.6},          U{https://tools.ietf.org/html/rfc5234#appendix-B.1}      """ -    if _VALID_METHOD.match(method): +    if _istoken(method):          return method      raise ValueError(f"Invalid method {method!r}") @@ -634,27 +606,6 @@ def _ensureValidURI(uri):      raise ValueError(f"Invalid URI {uri!r}") -def _decint(data: bytes) -> int: -    """ -    Parse a decimal integer of the form C{1*DIGIT}, i.e. consisting only of -    decimal digits. The integer may be embedded in whitespace (space and -    horizontal tab). This differs from the built-in L{int()} function by -    disallowing a leading C{+} character and various forms of whitespace -    (note that we sanitize linear whitespace in header values in -    L{twisted.web.http_headers.Headers}). - -    @param data: Value to parse. - -    @returns: A non-negative integer. - -    @raises ValueError: When I{value} contains non-decimal characters. -    """ -    data = data.strip(b" \t") -    if not data.isdigit(): -        raise ValueError(f"Value contains non-decimal digits: {data!r}") -    return int(data) - -  def _contentLength(connHeaders: Headers) -> Optional[int]:      """      Parse the I{Content-Length} connection header. @@ -681,7 +632,7 @@ def _contentLength(connHeaders: Headers) -> Optional[int]:      @see: U{https://datatracker.ietf.org/doc/html/rfc9110#section-8.6}      """ -    headers = connHeaders.getRawHeaders(b"content-length") +    headers = connHeaders.getRawHeaders(b"Content-Length")      if headers is None:          return None @@ -781,7 +732,7 @@ class Request:          return getattr(self._parsedURI, "toBytes", lambda: None)()      def _writeHeaders(self, transport, TEorCL): -        hosts = self.headers.getRawHeaders(b"host", ()) +        hosts = self.headers.getRawHeaders(b"Host", ())          if len(hosts) != 1:              raise BadHeaders("Exactly one Host header required") @@ -1547,6 +1498,10 @@ class HTTP11ClientProtocol(Protocol):          self._quiescentCallback = quiescentCallback          self._abortDeferreds = [] +    def connectionMade(self) -> None: +        if ITCPTransport.providedBy(self.transport): +            self.transport.setTcpNoDelay(True) +      @property      def state(self):          return self._state @@ -1661,7 +1616,7 @@ class HTTP11ClientProtocol(Protocol):              return          reason = ConnectionDone("synthetic!") -        connHeaders = self._parser.connHeaders.getRawHeaders(b"connection", ()) +        connHeaders = self._parser.connHeaders.getRawHeaders(b"Connection", ())          if (              (b"close" in connHeaders)              or self._state != "QUIESCENT" diff --git a/contrib/python/Twisted/py3/twisted/web/_stan.py b/contrib/python/Twisted/py3/twisted/web/_stan.py index 88e82d2dfe2..b165bdb6fda 100644 --- a/contrib/python/Twisted/py3/twisted/web/_stan.py +++ b/contrib/python/Twisted/py3/twisted/web/_stan.py @@ -32,7 +32,7 @@ if TYPE_CHECKING:      from twisted.web.template import Flattenable [email protected](hash=False, eq=False, auto_attribs=True) [email protected](unsafe_hash=False, eq=False, auto_attribs=True)  class slot:      """      Marker for markup insertion in a template. @@ -82,7 +82,7 @@ class slot:      """ [email protected](hash=False, eq=False, repr=False, auto_attribs=True) [email protected](unsafe_hash=False, eq=False, repr=False, auto_attribs=True)  class Tag:      """      A L{Tag} represents an XML tags with a tag name, attributes, and children. @@ -314,7 +314,7 @@ voidElements = (  ) [email protected](hash=False, eq=False, repr=False, auto_attribs=True) [email protected](unsafe_hash=False, eq=False, repr=False, auto_attribs=True)  class CDATA:      """      A C{<![CDATA[]]>} block from a template.  Given a separate representation in @@ -329,7 +329,7 @@ class CDATA:          return f"CDATA({self.data!r})" [email protected](hash=False, eq=False, repr=False, auto_attribs=True) [email protected](unsafe_hash=False, eq=False, repr=False, auto_attribs=True)  class Comment:      """      A C{<!-- -->} comment from a template.  Given a separate representation in @@ -344,7 +344,7 @@ class Comment:          return f"Comment({self.data!r})" [email protected](hash=False, eq=False, repr=False, auto_attribs=True) [email protected](unsafe_hash=False, eq=False, repr=False, auto_attribs=True)  class CharRef:      """      A numeric character reference.  Given a separate representation in the DOM diff --git a/contrib/python/Twisted/py3/twisted/web/client.py b/contrib/python/Twisted/py3/twisted/web/client.py index b06f1bef286..cfd945d5d40 100644 --- a/contrib/python/Twisted/py3/twisted/web/client.py +++ b/contrib/python/Twisted/py3/twisted/web/client.py @@ -37,7 +37,7 @@ from twisted.python.deprecate import (  from twisted.python.failure import Failure  from twisted.web import error, http  from twisted.web._newclient import _ensureValidMethod, _ensureValidURI -from twisted.web.http_headers import Headers +from twisted.web.http_headers import Headers, _nameEncoder  from twisted.web.iweb import (      UNKNOWN_LENGTH,      IAgent, @@ -924,10 +924,10 @@ class _AgentBase:          # Create minimal headers, if necessary:          if headers is None:              headers = Headers() -        if not headers.hasHeader(b"host"): +        if not headers.hasHeader(b"Host"):              headers = headers.copy()              headers.addRawHeader( -                b"host", +                b"Host",                  self._computeHostValue(                      parsedURI.scheme, parsedURI.host, parsedURI.port                  ), @@ -1367,12 +1367,12 @@ class CookieAgent:          lastRequest = _FakeStdlibRequest(uri)          # Setting a cookie header explicitly will disable automatic request          # cookies. -        if not actualHeaders.hasHeader(b"cookie"): +        if not actualHeaders.hasHeader(b"Cookie"):              self.cookieJar.add_cookie_header(lastRequest)              cookieHeader = lastRequest.get_header("Cookie", None)              if cookieHeader is not None:                  actualHeaders = actualHeaders.copy() -                actualHeaders.addRawHeader(b"cookie", networkString(cookieHeader)) +                actualHeaders.addRawHeader(b"Cookie", networkString(cookieHeader))          return self._agent.request(              method, uri, actualHeaders, bodyProducer @@ -1502,7 +1502,7 @@ class ContentDecoderAgent:              headers = Headers()          else:              headers = headers.copy() -        headers.addRawHeader(b"accept-encoding", self._supported) +        headers.addRawHeader(b"Accept-Encoding", self._supported)          deferred = self._agent.request(method, uri, headers, bodyProducer)          return deferred.addCallback(self._handleResponse) @@ -1510,7 +1510,7 @@ class ContentDecoderAgent:          """          Check if the response is encoded, and wrap it to handle decompression.          """ -        contentEncodingHeaders = response.headers.getRawHeaders(b"content-encoding", []) +        contentEncodingHeaders = response.headers.getRawHeaders(b"Content-Encoding", [])          contentEncodingHeaders = b",".join(contentEncodingHeaders).split(b",")          while contentEncodingHeaders:              name = contentEncodingHeaders.pop().strip() @@ -1523,14 +1523,14 @@ class ContentDecoderAgent:                  break          if contentEncodingHeaders:              response.headers.setRawHeaders( -                b"content-encoding", [b",".join(contentEncodingHeaders)] +                b"Content-Encoding", [b",".join(contentEncodingHeaders)]              )          else: -            response.headers.removeHeader(b"content-encoding") +            response.headers.removeHeader(b"Content-Encoding")          return response -_canonicalHeaderName = Headers()._encodeName +_canonicalHeaderName = _nameEncoder.encode  _defaultSensitiveHeaders = frozenset(      [          b"Authorization", diff --git a/contrib/python/Twisted/py3/twisted/web/http.py b/contrib/python/Twisted/py3/twisted/web/http.py index e80f6cb365f..8aa31dfe306 100644 --- a/contrib/python/Twisted/py3/twisted/web/http.py +++ b/contrib/python/Twisted/py3/twisted/web/http.py @@ -107,11 +107,11 @@ import math  import os  import re  import tempfile -import time  import warnings  from email import message_from_bytes  from email.message import EmailMessage, Message  from io import BufferedIOBase, BytesIO, TextIOWrapper +from time import gmtime, time  from typing import (      AnyStr,      Callable, @@ -134,7 +134,13 @@ from incremental import Version  from twisted.internet import address, interfaces, protocol  from twisted.internet._producer_helpers import _PullToPush  from twisted.internet.defer import Deferred -from twisted.internet.interfaces import IAddress, IDelayedCall, IProtocol, IReactorTime +from twisted.internet.interfaces import ( +    IAddress, +    IDelayedCall, +    IProtocol, +    IReactorTime, +    ITCPTransport, +)  from twisted.internet.protocol import Protocol  from twisted.logger import Logger  from twisted.protocols import basic, policies @@ -143,6 +149,7 @@ from twisted.python.compat import nativeString, networkString  from twisted.python.components import proxyForInterface  from twisted.python.deprecate import deprecated, deprecatedModuleAttribute  from twisted.python.failure import Failure +from twisted.web._abnf import _hexint, _istoken  from twisted.web._responses import (      ACCEPTED,      BAD_GATEWAY, @@ -190,7 +197,12 @@ from twisted.web._responses import (      UNSUPPORTED_MEDIA_TYPE,      USE_PROXY,  ) -from twisted.web.http_headers import Headers, _sanitizeLinearWhitespace +from twisted.web.http_headers import ( +    Headers, +    InvalidHeaderName, +    _nameEncoder, +    _sanitizeLinearWhitespace, +)  from twisted.web.iweb import IAccessLogFormatter, INonQueuedRequestFactory, IRequest  try: @@ -217,8 +229,7 @@ responses = RESPONSES  # datetime parsing and formatting  weekdayname = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] -monthname = [ -    None, +_months = [      "Jan",      "Feb",      "Mar", @@ -232,6 +243,9 @@ monthname = [      "Nov",      "Dec",  ] +monthname = [None] + _months +_weekdaynameBytes = [s.encode("ascii") for s in weekdayname] +_monthnameBytes = [None] + [s.encode("ascii") for s in _months]  weekdayname_lower = [name.lower() for name in weekdayname]  monthname_lower = [name and name.lower() for name in monthname] @@ -391,14 +405,18 @@ def datetimeToString(msSinceEpoch=None):      @rtype: C{bytes}      """ -    if msSinceEpoch == None: -        msSinceEpoch = time.time() -    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch) -    s = networkString( -        "%s, %02d %3s %4d %02d:%02d:%02d GMT" -        % (weekdayname[wd], day, monthname[month], year, hh, mm, ss) +    year, month, day, hh, mm, ss, wd, _, _ = ( +        gmtime() if msSinceEpoch is None else gmtime(msSinceEpoch) +    ) +    return b"%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( +        _weekdaynameBytes[wd], +        day, +        _monthnameBytes[month], +        year, +        hh, +        mm, +        ss,      ) -    return s  def datetimeToLogString(msSinceEpoch=None): @@ -408,8 +426,9 @@ def datetimeToLogString(msSinceEpoch=None):      @rtype: C{str}      """      if msSinceEpoch == None: -        msSinceEpoch = time.time() -    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch) +        # This code path is apparently never used in practice inside Twisted. +        msSinceEpoch = time()  # pragma: no cover +    year, month, day, hh, mm, ss, wd, y, z = gmtime(msSinceEpoch)      s = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (          day,          monthname[month], @@ -507,46 +526,6 @@ def toChunk(data):      return (networkString(f"{len(data):x}"), b"\r\n", data, b"\r\n") -def _istoken(b: bytes) -> bool: -    """ -    Is the string a token per RFC 9110 section 5.6.2? -    """ -    for c in b: -        if c not in ( -            b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"  # ALPHA -            b"0123456789"  # DIGIT -            b"!#$%^'*+-.^_`|~" -        ): -            return False -    return b != b"" - - -def _ishexdigits(b: bytes) -> bool: -    """ -    Is the string case-insensitively hexidecimal? - -    It must be composed of one or more characters in the ranges a-f, A-F -    and 0-9. -    """ -    for c in b: -        if c not in b"0123456789abcdefABCDEF": -            return False -    return b != b"" - - -def _hexint(b: bytes) -> int: -    """ -    Decode a hexadecimal integer. - -    Unlike L{int(b, 16)}, this raises L{ValueError} when the integer has -    a prefix like C{b'0x'}, C{b'+'}, or C{b'-'}, which is desirable when -    parsing network protocols. -    """ -    if not _ishexdigits(b): -        raise ValueError(b) -    return int(b, 16) - -  def fromChunk(data: bytes) -> Tuple[bytes, bytes]:      """      Convert chunk to string. @@ -1031,7 +1010,7 @@ class Request:          This method is not intended for users.          """ -        cookieheaders = self.requestHeaders.getRawHeaders(b"cookie") +        cookieheaders = self.requestHeaders.getRawHeaders(b"Cookie")          if cookieheaders is None:              return @@ -1086,7 +1065,7 @@ class Request:          # Argument processing          args = self.args -        ctype = self.requestHeaders.getRawHeaders(b"content-type") +        ctype = self.requestHeaders.getRawHeaders(b"Content-Type")          if ctype is not None:              ctype = ctype[0] @@ -1270,7 +1249,7 @@ class Request:          """          if self.finished:              raise RuntimeError( -                "Request.write called on a request after " "Request.finish was called." +                "Request.write called on a request after Request.finish was called."              )          if self._disconnected: @@ -1290,7 +1269,7 @@ class Request:              # persistent connections.              if (                  (version == b"HTTP/1.1") -                and (self.responseHeaders.getRawHeaders(b"content-length") is None) +                and (self.responseHeaders.getRawHeaders(b"Content-Length") is None)                  and self.method != b"HEAD"                  and self.code not in NO_BODY_CODES              ): @@ -1298,14 +1277,14 @@ class Request:                  self.chunked = 1              if self.lastModified is not None: -                if self.responseHeaders.hasHeader(b"last-modified"): +                if self.responseHeaders.hasHeader(b"Last-Modified"):                      self._log.info(                          "Warning: last-modified specified both in"                          " header list and lastModified attribute."                      )                  else:                      self.responseHeaders.setRawHeaders( -                        b"last-modified", [datetimeToString(self.lastModified)] +                        b"Last-Modified", [datetimeToString(self.lastModified)]                      )              if self.etag is not None: @@ -1483,7 +1462,7 @@ class Request:          @type url: L{bytes} or L{str}          """          self.setResponseCode(FOUND) -        self.setHeader(b"location", url) +        self.setHeader(b"Location", url)      def setLastModified(self, when):          """ @@ -1510,7 +1489,7 @@ class Request:          if (not self.lastModified) or (self.lastModified < when):              self.lastModified = when -        modifiedSince = self.getHeader(b"if-modified-since") +        modifiedSince = self.getHeader(b"If-Modified-Since")          if modifiedSince:              firstPart = modifiedSince.split(b";", 1)[0]              try: @@ -1544,7 +1523,7 @@ class Request:          if etag:              self.etag = etag -        tags = self.getHeader(b"if-none-match") +        tags = self.getHeader(b"If-None-Match")          if tags:              tags = tags.split()              if (etag in tags) or (b"*" in tags): @@ -1578,7 +1557,7 @@ class Request:          @rtype: C{bytes}          """ -        host = self.getHeader(b"host") +        host = self.getHeader(b"Host")          if host is not None:              match = _hostHeaderExpression.match(host)              if match is not None: @@ -1626,7 +1605,7 @@ class Request:              hostHeader = host          else:              hostHeader = b"%b:%d" % (host, port) -        self.requestHeaders.setRawHeaders(b"host", [hostHeader]) +        self.requestHeaders.setRawHeaders(b"Host", [hostHeader])          self.host = address.IPv4Address("TCP", host, port)      @deprecated(Version("Twisted", 18, 4, 0), replacement="getClientAddress") @@ -1828,6 +1807,8 @@ class _IdentityTransferDecoder:          chunk.      """ +    __slots__ = ["contentLength", "dataCallback", "finishCallback"] +      def __init__(self, contentLength, dataCallback, finishCallback):          self.contentLength = contentLength          self.dataCallback = dataCallback @@ -2324,7 +2305,7 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):      totalHeadersSize = 16384      abortTimeout = 15 -    length = 0 +    length: Optional[int] = 0      persistent = 1      __header = b""      __first_line = 1 @@ -2351,6 +2332,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):          self._transferDecoder = None      def connectionMade(self): +        if ITCPTransport.providedBy(self.transport): +            self.transport.setTcpNoDelay(True)          self.setTimeout(self.timeOut)          self._networkProducer = interfaces.IPushProducer(              self.transport, _NoPushProducer() @@ -2431,6 +2414,14 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):          self._dataBuffer.append(data)          self.allContentReceived() +    def _failChooseTransferDecoder(self) -> bool: +        """ +        Utility to indicate failure to choose a decoder. +        """ +        self._respondToBadRequestAndDisconnect() +        self.length = None +        return False +      def _maybeChooseTransferDecoder(self, header, data):          """          If the provided header is C{content-length} or @@ -2438,24 +2429,15 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):          Returns L{True} if the request can proceed and L{False} if not.          """ - -        def fail(): -            self._respondToBadRequestAndDisconnect() -            self.length = None -            return False -          # Can this header determine the length? -        if header == b"content-length": +        if header == b"Content-Length":              if not data.isdigit(): -                return fail() -            try: -                length = int(data) -            except ValueError: -                return fail() +                return self._failChooseTransferDecoder() +            length = int(data)              newTransferDecoder = _IdentityTransferDecoder(                  length, self.requests[-1].handleContentChunk, self._finishRequestBody              ) -        elif header == b"transfer-encoding": +        elif header == b"Transfer-Encoding":              # XXX Rather poorly tested code block, apparently only exercised by              # test_chunkedEncoding              if data.lower() == b"chunked": @@ -2466,13 +2448,13 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):              elif data.lower() == b"identity":                  return True              else: -                return fail() +                return self._failChooseTransferDecoder()          else:              # It's not a length related header, so exit              return True          if self._transferDecoder is not None: -            return fail() +            return self._failChooseTransferDecoder()          else:              self.length = length              self._transferDecoder = newTransferDecoder @@ -2480,7 +2462,7 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):      def headerReceived(self, line):          """ -        Do pre-processing (for content-length) and store this header away. +        Do pre-processing (for Content-Length) and store this header away.          Enforce the per-request header limit.          @type line: C{bytes} @@ -2496,13 +2478,17 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):              self._respondToBadRequestAndDisconnect()              return False -        # Header names must be tokens, per RFC 9110 section 5.1. -        if not _istoken(header): +        # Canonicalize the header name. +        try: +            header = _nameEncoder.encode(header) +        except InvalidHeaderName:              self._respondToBadRequestAndDisconnect()              return False -        header = header.lower()          data = data.strip(b" \t") +        if b"\x00" in data: +            self._respondToBadRequestAndDisconnect() +            return False          if not self._maybeChooseTransferDecoder(header, data):              return False @@ -2592,7 +2578,7 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):          req.gotLength(self.length)          # Handle 'Expect: 100-continue' with automated 100 response code,          # a simplistic implementation of RFC 2686 8.2.3: -        expectContinue = req.requestHeaders.getRawHeaders(b"expect") +        expectContinue = req.requestHeaders.getRawHeaders(b"Expect")          if (              expectContinue              and expectContinue[0].lower() == b"100-continue" @@ -2617,7 +2603,7 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):              must be closed in order to indicate the completion of the response              to C{request}.          """ -        connection = request.requestHeaders.getRawHeaders(b"connection") +        connection = request.requestHeaders.getRawHeaders(b"Connection")          if connection:              tokens = [t.lower() for t in connection[0].split(b" ")]          else: @@ -2636,7 +2622,7 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):          if version == b"HTTP/1.1":              if b"close" in tokens: -                request.responseHeaders.setRawHeaders(b"connection", [b"close"]) +                request.responseHeaders.setRawHeaders(b"Connection", [b"close"])                  return False              else:                  return True @@ -3034,7 +3020,7 @@ class _XForwardedForRequest(proxyForInterface(IRequest, "_request")):  # type: i              expected by L{combinedLogFormatter}.          """          host = ( -            self._request.requestHeaders.getRawHeaders(b"x-forwarded-for", [b"-"])[0] +            self._request.requestHeaders.getRawHeaders(b"X-Forwarded-For", [b"-"])[0]              .split(b",")[0]              .strip()          ) diff --git a/contrib/python/Twisted/py3/twisted/web/http_headers.py b/contrib/python/Twisted/py3/twisted/web/http_headers.py index 8b1d41adb64..88b653439b5 100644 --- a/contrib/python/Twisted/py3/twisted/web/http_headers.py +++ b/contrib/python/Twisted/py3/twisted/web/http_headers.py @@ -22,18 +22,26 @@ from typing import (  )  from twisted.python.compat import cmp, comparable +from twisted.web._abnf import _istoken + + +class InvalidHeaderName(ValueError): +    """ +    HTTP header names must be tokens, per RFC 9110 section 5.1. +    """ +  _T = TypeVar("_T")  def _sanitizeLinearWhitespace(headerComponent: bytes) -> bytes:      r""" -    Replace linear whitespace (C{\n}, C{\r\n}, C{\r}) in a header key -    or value with a single space. +    Replace linear whitespace (C{\n}, C{\r\n}, C{\r}) in a header +    value with a single space. -    @param headerComponent: The header key or value to sanitize. +    @param headerComponent: The header value to sanitize. -    @return: The sanitized header key or value. +    @return: The sanitized header value.      """      return b" ".join(headerComponent.splitlines()) @@ -53,31 +61,10 @@ class Headers:      ensure no decoding or encoding is done, and L{Headers} will treat the keys      and values as opaque byte strings. -    @cvar _caseMappings: A L{dict} that maps lowercase header names -        to their canonicalized representation, for headers with unconventional -        capitalization. - -    @cvar _canonicalHeaderCache: A L{dict} that maps header names to their -        canonicalized representation. -      @ivar _rawHeaders: A L{dict} mapping header names as L{bytes} to L{list}s of          header values as L{bytes}.      """ -    _caseMappings: ClassVar[Dict[bytes, bytes]] = { -        b"content-md5": b"Content-MD5", -        b"dnt": b"DNT", -        b"etag": b"ETag", -        b"p3p": b"P3P", -        b"te": b"TE", -        b"www-authenticate": b"WWW-Authenticate", -        b"x-xss-protection": b"X-XSS-Protection", -    } - -    _canonicalHeaderCache: ClassVar[Dict[Union[bytes, str], bytes]] = {} - -    _MAX_CACHED_HEADERS: ClassVar[int] = 10_000 -      __slots__ = ["_rawHeaders"]      def __init__( @@ -109,39 +96,6 @@ class Headers:              )          return NotImplemented -    def _encodeName(self, name: Union[str, bytes]) -> bytes: -        """ -        Encode the name of a header (eg 'Content-Type') to an ISO-8859-1 -        encoded bytestring if required.  It will be canonicalized and -        whitespace-sanitized. - -        @param name: A HTTP header name - -        @return: C{name}, encoded if required, lowercased -        """ -        if canonicalName := self._canonicalHeaderCache.get(name, None): -            return canonicalName - -        bytes_name = name.encode("iso-8859-1") if isinstance(name, str) else name - -        if bytes_name.lower() in self._caseMappings: -            # Some headers have special capitalization: -            result = self._caseMappings[bytes_name.lower()] -        else: -            result = _sanitizeLinearWhitespace( -                b"-".join([word.capitalize() for word in bytes_name.split(b"-")]) -            ) - -        # In general, we should only see a very small number of header -        # variations in the real world, so caching them is fine. However, an -        # attacker could generate infinite header variations to fill up RAM, so -        # we cap how many we cache. The performance degradation from lack of -        # caching won't be that bad, and legit traffic won't hit it. -        if len(self._canonicalHeaderCache) < self._MAX_CACHED_HEADERS: -            self._canonicalHeaderCache[name] = result - -        return result -      def copy(self):          """          Return a copy of itself with the same headers set. @@ -158,7 +112,7 @@ class Headers:          @return: C{True} if the header exists, otherwise C{False}.          """ -        return self._encodeName(name) in self._rawHeaders +        return _nameEncoder.encode(name) in self._rawHeaders      def removeHeader(self, name: AnyStr) -> None:          """ @@ -168,7 +122,7 @@ class Headers:          @return: L{None}          """ -        self._rawHeaders.pop(self._encodeName(name), None) +        self._rawHeaders.pop(_nameEncoder.encode(name), None)      def setRawHeaders(          self, name: Union[str, bytes], values: Sequence[Union[str, bytes]] @@ -186,7 +140,7 @@ class Headers:          @return: L{None}          """ -        _name = self._encodeName(name) +        _name = _nameEncoder.encode(name)          encodedValues: List[bytes] = []          for v in values:              if isinstance(v, str): @@ -205,7 +159,7 @@ class Headers:          @param value: The value to set for the named header.          """ -        self._rawHeaders.setdefault(self._encodeName(name), []).append( +        self._rawHeaders.setdefault(_nameEncoder.encode(name), []).append(              _sanitizeLinearWhitespace(                  value.encode("utf8") if isinstance(value, str) else value              ) @@ -234,7 +188,7 @@ class Headers:          @return: If the named header is present, a sequence of its              values.  Otherwise, C{default}.          """ -        encodedName = self._encodeName(name) +        encodedName = _nameEncoder.encode(name)          values = self._rawHeaders.get(encodedName, [])          if not values:              return default @@ -252,4 +206,79 @@ class Headers:          return iter(self._rawHeaders.items()) +class _NameEncoder: +    """ +    C{_NameEncoder} converts HTTP header names to L{bytes} and canonicalizies +    their capitalization. + +    @cvar _caseMappings: A L{dict} that maps conventionally-capitalized +        header names to their canonicalized representation, for headers with +        unconventional capitalization. + +    @cvar _canonicalHeaderCache: A L{dict} that maps header names to their +        canonicalized representation. +    """ + +    __slots__ = ("_canonicalHeaderCache",) +    _canonicalHeaderCache: Dict[Union[bytes, str], bytes] + +    _caseMappings: ClassVar[Dict[bytes, bytes]] = { +        b"Content-Md5": b"Content-MD5", +        b"Dnt": b"DNT", +        b"Etag": b"ETag", +        b"P3p": b"P3P", +        b"Te": b"TE", +        b"Www-Authenticate": b"WWW-Authenticate", +        b"X-Xss-Protection": b"X-XSS-Protection", +    } + +    _MAX_CACHED_HEADERS: ClassVar[int] = 10_000 + +    def __init__(self): +        self._canonicalHeaderCache = {} + +    def encode(self, name: Union[str, bytes]) -> bytes: +        """ +        Encode the name of a header (eg 'Content-Type') to an ISO-8859-1 +        bytestring if required. It will be canonicalized to Http-Header-Case. + +        @raises InvalidHeaderName: +            If the header name contains invalid characters like whitespace +            or NUL. + +        @param name: An HTTP header name + +        @return: C{name}, encoded if required, in Header-Case +        """ +        if canonicalName := self._canonicalHeaderCache.get(name): +            return canonicalName + +        bytes_name = name.encode("iso-8859-1") if isinstance(name, str) else name + +        if not _istoken(bytes_name): +            raise InvalidHeaderName(bytes_name) + +        result = b"-".join([word.capitalize() for word in bytes_name.split(b"-")]) + +        # Some headers have special capitalization: +        if result in self._caseMappings: +            result = self._caseMappings[result] + +        # In general, we should only see a very small number of header +        # variations in the real world, so caching them is fine. However, an +        # attacker could generate infinite header variations to fill up RAM, so +        # we cap how many we cache. The performance degradation from lack of +        # caching won't be that bad, and legit traffic won't hit it. +        if len(self._canonicalHeaderCache) < self._MAX_CACHED_HEADERS: +            self._canonicalHeaderCache[name] = result + +        return result + + +_nameEncoder = _NameEncoder() +""" +The global name encoder. +""" + +  __all__ = ["Headers"] diff --git a/contrib/python/Twisted/py3/twisted/web/server.py b/contrib/python/Twisted/py3/twisted/web/server.py index cfcefad7f36..1a4318022b9 100644 --- a/contrib/python/Twisted/py3/twisted/web/server.py +++ b/contrib/python/Twisted/py3/twisted/web/server.py @@ -13,7 +13,6 @@ This is a web server which integrates with the twisted.internet infrastructure.      value.  """ -  import copy  import os  import re @@ -192,8 +191,8 @@ class Request(Copyable, http.Request, components.Componentized):          self.site = self.channel.site          # set various default headers -        self.setHeader(b"server", version) -        self.setHeader(b"date", datetimeToString()) +        self.setHeader(b"Server", version) +        self.setHeader(b"Date", datetimeToString())          # Resource Identification          self.prepath = [] @@ -228,8 +227,8 @@ class Request(Copyable, http.Request, components.Componentized):              # is a Content-Length header set to 0, as empty bodies don't need              # a content-type.              needsCT = self.code not in (NOT_MODIFIED, NO_CONTENT) -            contentType = self.responseHeaders.getRawHeaders(b"content-type") -            contentLength = self.responseHeaders.getRawHeaders(b"content-length") +            contentType = self.responseHeaders.getRawHeaders(b"Content-Type") +            contentLength = self.responseHeaders.getRawHeaders(b"Content-Length")              contentLengthZero = contentLength and (contentLength[0] == b"0")              if ( @@ -239,7 +238,7 @@ class Request(Copyable, http.Request, components.Componentized):                  and not contentLengthZero              ):                  self.responseHeaders.setRawHeaders( -                    b"content-type", [self.defaultContentType] +                    b"Content-Type", [self.defaultContentType]                  )          # Only let the write happen if we're not generating a HEAD response by @@ -298,7 +297,7 @@ class Request(Copyable, http.Request, components.Componentized):                      )                      # Oh well, I guess we won't include the content length.                  else: -                    self.setHeader(b"content-length", b"%d" % (len(body),)) +                    self.setHeader(b"Content-Length", b"%d" % (len(body),))                  self._inFakeHead = False                  self.method = b"HEAD" @@ -361,10 +360,10 @@ class Request(Copyable, http.Request, components.Componentized):                      slf=self,                      resrc=resrc,                  ) -                self.setHeader(b"content-length", b"%d" % (len(body),)) +                self.setHeader(b"Content-Length", b"%d" % (len(body),))              self.write(b"")          else: -            self.setHeader(b"content-length", b"%d" % (len(body),)) +            self.setHeader(b"Content-Length", b"%d" % (len(body),))              self.write(body)          self.finish() @@ -397,8 +396,8 @@ class Request(Copyable, http.Request, components.Componentized):              )          self.setResponseCode(http.INTERNAL_SERVER_ERROR) -        self.setHeader(b"content-type", b"text/html") -        self.setHeader(b"content-length", b"%d" % (len(body),)) +        self.setHeader(b"Content-Type", b"text/html") +        self.setHeader(b"Content-Length", b"%d" % (len(body),))          self.write(body)          self.finish()          return reason @@ -605,16 +604,16 @@ class GzipEncoderFactory:          request if so.          """          acceptHeaders = b",".join( -            request.requestHeaders.getRawHeaders(b"accept-encoding", []) +            request.requestHeaders.getRawHeaders(b"Accept-Encoding", [])          )          if self._gzipCheckRegex.search(acceptHeaders): -            encoding = request.responseHeaders.getRawHeaders(b"content-encoding") +            encoding = request.responseHeaders.getRawHeaders(b"Content-Encoding")              if encoding:                  encoding = b",".join(encoding + [b"gzip"])              else:                  encoding = b"gzip" -            request.responseHeaders.setRawHeaders(b"content-encoding", [encoding]) +            request.responseHeaders.setRawHeaders(b"Content-Encoding", [encoding])              return _GzipEncoder(self.compressLevel, request) @@ -646,7 +645,7 @@ class _GzipEncoder:          if not self._request.startedWriting:              # Remove the content-length header, we can't honor it              # because we compress on the fly. -            self._request.responseHeaders.removeHeader(b"content-length") +            self._request.responseHeaders.removeHeader(b"Content-Length")          return self._zlibCompressor.compress(data)      def finish(self): diff --git a/contrib/python/Twisted/py3/ya.make b/contrib/python/Twisted/py3/ya.make index d74fcea5c50..0ad42217967 100644 --- a/contrib/python/Twisted/py3/ya.make +++ b/contrib/python/Twisted/py3/ya.make @@ -2,7 +2,7 @@  PY3_LIBRARY() -VERSION(24.7.0) +VERSION(24.10.0)  LICENSE(MIT) @@ -35,6 +35,7 @@ PY_SRCS(      twisted/_threads/_threadworker.py      twisted/_version.py      twisted/application/__init__.py +    twisted/application/_client_service.py      twisted/application/app.py      twisted/application/internet.py      twisted/application/reactors.py @@ -388,6 +389,7 @@ PY_SRCS(      twisted/trial/unittest.py      twisted/trial/util.py      twisted/web/__init__.py +    twisted/web/_abnf.py      twisted/web/_auth/__init__.py      twisted/web/_auth/basic.py      twisted/web/_auth/digest.py  | 
