diff options
author | robot-contrib <robot-contrib@yandex-team.ru> | 2022-05-18 00:43:36 +0300 |
---|---|---|
committer | robot-contrib <robot-contrib@yandex-team.ru> | 2022-05-18 00:43:36 +0300 |
commit | 9e5f436a8b2a27bcc7802e443ea3ef3e41a82a75 (patch) | |
tree | 78b522cab9f76336e62064d4d8ff7c897659b20e /contrib/python | |
parent | 8113a823ffca6451bb5ff8f0334560885a939a24 (diff) | |
download | ydb-9e5f436a8b2a27bcc7802e443ea3ef3e41a82a75.tar.gz |
Update contrib/python/ipython/py3 to 8.3.0
ref:e84342d4d30476f9148137f37fd0c6405fd36f55
Diffstat (limited to 'contrib/python')
208 files changed, 11469 insertions, 7873 deletions
diff --git a/contrib/python/asttokens/.dist-info/METADATA b/contrib/python/asttokens/.dist-info/METADATA new file mode 100644 index 0000000000..7b1ba082cd --- /dev/null +++ b/contrib/python/asttokens/.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: asttokens +Version: 2.0.5 +Summary: Annotate AST trees with source code positions +Home-page: https://github.com/gristlabs/asttokens +Author: Dmitry Sagalovskiy, Grist Labs +Author-email: dmitry@getgrist.com +License: Apache 2.0 +Keywords: code,ast,parse,tokenize,refactor +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Software Development :: Code Generators +Classifier: Topic :: Software Development :: Compilers +Classifier: Topic :: Software Development :: Interpreters +Classifier: Topic :: Software Development :: Pre-processors +Classifier: Environment :: Console +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: six +Provides-Extra: test +Requires-Dist: astroid ; extra == 'test' +Requires-Dist: pytest ; extra == 'test' + +ASTTokens +========= + +.. image:: https://img.shields.io/pypi/v/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://img.shields.io/pypi/pyversions/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://travis-ci.org/gristlabs/asttokens.svg?branch=master + :target: https://travis-ci.org/gristlabs/asttokens +.. image:: https://readthedocs.org/projects/asttokens/badge/?version=latest + :target: http://asttokens.readthedocs.io/en/latest/index.html +.. image:: https://coveralls.io/repos/github/gristlabs/asttokens/badge.svg + :target: https://coveralls.io/github/gristlabs/asttokens + +.. Start of user-guide + +The ``asttokens`` module annotates Python abstract syntax trees (ASTs) with the positions of tokens +and text in the source code that generated them. + +It makes it possible for tools that work with logical AST nodes to find the particular text that +resulted in those nodes, for example for automated refactoring or highlighting. + +Installation +------------ +asttokens is available on PyPI: https://pypi.python.org/pypi/asttokens/:: + + pip install asttokens + +The code is on GitHub: https://github.com/gristlabs/asttokens. + +The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.html. + +Usage +----- +ASTTokens works with both Python2 and Python3. + +ASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_, +AND those built by `astroid <https://github.com/PyCQA/astroid>`_. + +Here's an example: + +.. code-block:: python + + import asttokens, ast + source = "Robot('blue').walk(steps=10*n)" + atok = asttokens.ASTTokens(source, parse=True) + +Once the tree has been marked, nodes get ``.first_token``, ``.last_token`` attributes, and +the ``ASTTokens`` object offers helpful methods: + +.. code-block:: python + + attr_node = next(n for n in ast.walk(atok.tree) if isinstance(n, ast.Attribute)) + print(atok.get_text(attr_node)) + start, end = attr_node.last_token.startpos, attr_node.last_token.endpos + print(atok.text[:start] + 'RUN' + atok.text[end:]) + +Which produces this output: + +.. code-block:: text + + Robot('blue').walk + Robot('blue').RUN(steps=10*n) + +The ``ASTTokens`` object also offers methods to walk and search the list of tokens that make up +the code (or a particular AST node), which is more useful and powerful than dealing with the text +directly. + + +Contribute +---------- + +To contribute: + +1. Fork this repository, and clone your fork. +2. Install the package with test dependencies (ideally in a virtualenv) with:: + + pip install -e '.[test]' + +3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``. +4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel. +5. By default certain tests which take a very long time to run are skipped, but they are run on travis CI. To run them locally, set the environment variable ``ASTTOKENS_SLOW_TESTS``. For example run ``ASTTOKENS_SLOW_TESTS=1 tox`` to run the full suite of tests. + + diff --git a/contrib/python/asttokens/.dist-info/top_level.txt b/contrib/python/asttokens/.dist-info/top_level.txt new file mode 100644 index 0000000000..7adf4c51fd --- /dev/null +++ b/contrib/python/asttokens/.dist-info/top_level.txt @@ -0,0 +1 @@ +asttokens diff --git a/contrib/python/asttokens/LICENSE b/contrib/python/asttokens/LICENSE new file mode 100644 index 0000000000..8dada3edaf --- /dev/null +++ b/contrib/python/asttokens/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/contrib/python/asttokens/README.rst b/contrib/python/asttokens/README.rst new file mode 100644 index 0000000000..4f88184fcb --- /dev/null +++ b/contrib/python/asttokens/README.rst @@ -0,0 +1,82 @@ +ASTTokens +========= + +.. image:: https://img.shields.io/pypi/v/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://img.shields.io/pypi/pyversions/asttokens.svg + :target: https://pypi.python.org/pypi/asttokens/ +.. image:: https://travis-ci.org/gristlabs/asttokens.svg?branch=master + :target: https://travis-ci.org/gristlabs/asttokens +.. image:: https://readthedocs.org/projects/asttokens/badge/?version=latest + :target: http://asttokens.readthedocs.io/en/latest/index.html +.. image:: https://coveralls.io/repos/github/gristlabs/asttokens/badge.svg + :target: https://coveralls.io/github/gristlabs/asttokens + +.. Start of user-guide + +The ``asttokens`` module annotates Python abstract syntax trees (ASTs) with the positions of tokens +and text in the source code that generated them. + +It makes it possible for tools that work with logical AST nodes to find the particular text that +resulted in those nodes, for example for automated refactoring or highlighting. + +Installation +------------ +asttokens is available on PyPI: https://pypi.python.org/pypi/asttokens/:: + + pip install asttokens + +The code is on GitHub: https://github.com/gristlabs/asttokens. + +The API Reference is here: http://asttokens.readthedocs.io/en/latest/api-index.html. + +Usage +----- +ASTTokens works with both Python2 and Python3. + +ASTTokens can annotate both trees built by `ast <https://docs.python.org/2/library/ast.html>`_, +AND those built by `astroid <https://github.com/PyCQA/astroid>`_. + +Here's an example: + +.. code-block:: python + + import asttokens, ast + source = "Robot('blue').walk(steps=10*n)" + atok = asttokens.ASTTokens(source, parse=True) + +Once the tree has been marked, nodes get ``.first_token``, ``.last_token`` attributes, and +the ``ASTTokens`` object offers helpful methods: + +.. code-block:: python + + attr_node = next(n for n in ast.walk(atok.tree) if isinstance(n, ast.Attribute)) + print(atok.get_text(attr_node)) + start, end = attr_node.last_token.startpos, attr_node.last_token.endpos + print(atok.text[:start] + 'RUN' + atok.text[end:]) + +Which produces this output: + +.. code-block:: text + + Robot('blue').walk + Robot('blue').RUN(steps=10*n) + +The ``ASTTokens`` object also offers methods to walk and search the list of tokens that make up +the code (or a particular AST node), which is more useful and powerful than dealing with the text +directly. + + +Contribute +---------- + +To contribute: + +1. Fork this repository, and clone your fork. +2. Install the package with test dependencies (ideally in a virtualenv) with:: + + pip install -e '.[test]' + +3. Run tests in your current interpreter with the command ``pytest`` or ``python -m pytest``. +4. Run tests across all supported interpreters with the ``tox`` command. You will need to have the interpreters installed separately. We recommend ``pyenv`` for that. Use ``tox -p auto`` to run the tests in parallel. +5. By default certain tests which take a very long time to run are skipped, but they are run on travis CI. To run them locally, set the environment variable ``ASTTOKENS_SLOW_TESTS``. For example run ``ASTTOKENS_SLOW_TESTS=1 tox`` to run the full suite of tests. diff --git a/contrib/python/asttokens/asttokens/__init__.py b/contrib/python/asttokens/asttokens/__init__.py new file mode 100644 index 0000000000..cde4aabc5e --- /dev/null +++ b/contrib/python/asttokens/asttokens/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2016 Grist Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module enhances the Python AST tree with token and source code information, sufficent to +detect the source text of each AST node. This is helpful for tools that make source code +transformations. +""" + +from .line_numbers import LineNumbers +from .asttokens import ASTTokens diff --git a/contrib/python/asttokens/asttokens/asttokens.py b/contrib/python/asttokens/asttokens/asttokens.py new file mode 100644 index 0000000000..c299b94e0f --- /dev/null +++ b/contrib/python/asttokens/asttokens/asttokens.py @@ -0,0 +1,206 @@ +# Copyright 2016 Grist Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ast +import bisect +import token +import tokenize +import io +import six +from six.moves import xrange # pylint: disable=redefined-builtin +from .line_numbers import LineNumbers +from .util import Token, match_token, is_non_coding_token +from .mark_tokens import MarkTokens + +class ASTTokens(object): + """ + ASTTokens maintains the text of Python code in several forms: as a string, as line numbers, and + as tokens, and is used to mark and access token and position information. + + ``source_text`` must be a unicode or UTF8-encoded string. If you pass in UTF8 bytes, remember + that all offsets you'll get are to the unicode text, which is available as the ``.text`` + property. + + If ``parse`` is set, the ``source_text`` will be parsed with ``ast.parse()``, and the resulting + tree marked with token info and made available as the ``.tree`` property. + + If ``tree`` is given, it will be marked and made available as the ``.tree`` property. In + addition to the trees produced by the ``ast`` module, ASTTokens will also mark trees produced + using ``astroid`` library <https://www.astroid.org>. + + If only ``source_text`` is given, you may use ``.mark_tokens(tree)`` to mark the nodes of an AST + tree created separately. + """ + def __init__(self, source_text, parse=False, tree=None, filename='<unknown>'): + self._filename = filename + self._tree = ast.parse(source_text, filename) if parse else tree + + # Decode source after parsing to let Python 2 handle coding declarations. + # (If the encoding was not utf-8 compatible, then even if it parses correctly, + # we'll fail with a unicode error here.) + if isinstance(source_text, six.binary_type): + source_text = source_text.decode('utf8') + + self._text = source_text + self._line_numbers = LineNumbers(source_text) + + # Tokenize the code. + self._tokens = list(self._generate_tokens(source_text)) + + # Extract the start positions of all tokens, so that we can quickly map positions to tokens. + self._token_offsets = [tok.startpos for tok in self._tokens] + + if self._tree: + self.mark_tokens(self._tree) + + + def mark_tokens(self, root_node): + """ + Given the root of the AST or Astroid tree produced from source_text, visits all nodes marking + them with token and position information by adding ``.first_token`` and + ``.last_token``attributes. This is done automatically in the constructor when ``parse`` or + ``tree`` arguments are set, but may be used manually with a separate AST or Astroid tree. + """ + # The hard work of this class is done by MarkTokens + MarkTokens(self).visit_tree(root_node) + + + def _generate_tokens(self, text): + """ + Generates tokens for the given code. + """ + # This is technically an undocumented API for Python3, but allows us to use the same API as for + # Python2. See http://stackoverflow.com/a/4952291/328565. + for index, tok in enumerate(tokenize.generate_tokens(io.StringIO(text).readline)): + tok_type, tok_str, start, end, line = tok + yield Token(tok_type, tok_str, start, end, line, index, + self._line_numbers.line_to_offset(start[0], start[1]), + self._line_numbers.line_to_offset(end[0], end[1])) + + @property + def text(self): + """The source code passed into the constructor.""" + return self._text + + @property + def tokens(self): + """The list of tokens corresponding to the source code from the constructor.""" + return self._tokens + + @property + def tree(self): + """The root of the AST tree passed into the constructor or parsed from the source code.""" + return self._tree + + @property + def filename(self): + """The filename that was parsed""" + return self._filename + + def get_token_from_offset(self, offset): + """ + Returns the token containing the given character offset (0-based position in source text), + or the preceeding token if the position is between tokens. + """ + return self._tokens[bisect.bisect(self._token_offsets, offset) - 1] + + def get_token(self, lineno, col_offset): + """ + Returns the token containing the given (lineno, col_offset) position, or the preceeding token + if the position is between tokens. + """ + # TODO: add test for multibyte unicode. We need to translate offsets from ast module (which + # are in utf8) to offsets into the unicode text. tokenize module seems to use unicode offsets + # but isn't explicit. + return self.get_token_from_offset(self._line_numbers.line_to_offset(lineno, col_offset)) + + def get_token_from_utf8(self, lineno, col_offset): + """ + Same as get_token(), but interprets col_offset as a UTF8 offset, which is what `ast` uses. + """ + return self.get_token(lineno, self._line_numbers.from_utf8_col(lineno, col_offset)) + + def next_token(self, tok, include_extra=False): + """ + Returns the next token after the given one. If include_extra is True, includes non-coding + tokens from the tokenize module, such as NL and COMMENT. + """ + i = tok.index + 1 + if not include_extra: + while is_non_coding_token(self._tokens[i].type): + i += 1 + return self._tokens[i] + + def prev_token(self, tok, include_extra=False): + """ + Returns the previous token before the given one. If include_extra is True, includes non-coding + tokens from the tokenize module, such as NL and COMMENT. + """ + i = tok.index - 1 + if not include_extra: + while is_non_coding_token(self._tokens[i].type): + i -= 1 + return self._tokens[i] + + def find_token(self, start_token, tok_type, tok_str=None, reverse=False): + """ + Looks for the first token, starting at start_token, that matches tok_type and, if given, the + token string. Searches backwards if reverse is True. Returns ENDMARKER token if not found (you + can check it with `token.ISEOF(t.type)`. + """ + t = start_token + advance = self.prev_token if reverse else self.next_token + while not match_token(t, tok_type, tok_str) and not token.ISEOF(t.type): + t = advance(t, include_extra=True) + return t + + def token_range(self, first_token, last_token, include_extra=False): + """ + Yields all tokens in order from first_token through and including last_token. If + include_extra is True, includes non-coding tokens such as tokenize.NL and .COMMENT. + """ + for i in xrange(first_token.index, last_token.index + 1): + if include_extra or not is_non_coding_token(self._tokens[i].type): + yield self._tokens[i] + + def get_tokens(self, node, include_extra=False): + """ + Yields all tokens making up the given node. If include_extra is True, includes non-coding + tokens such as tokenize.NL and .COMMENT. + """ + return self.token_range(node.first_token, node.last_token, include_extra=include_extra) + + def get_text_range(self, node): + """ + After mark_tokens() has been called, returns the (startpos, endpos) positions in source text + corresponding to the given node. Returns (0, 0) for nodes (like `Load`) that don't correspond + to any particular text. + """ + if not hasattr(node, 'first_token'): + return (0, 0) + + start = node.first_token.startpos + if any(match_token(t, token.NEWLINE) for t in self.get_tokens(node)): + # Multi-line nodes would be invalid unless we keep the indentation of the first node. + start = self._text.rfind('\n', 0, start) + 1 + + return (start, node.last_token.endpos) + + def get_text(self, node): + """ + After mark_tokens() has been called, returns the text corresponding to the given node. Returns + '' for nodes (like `Load`) that don't correspond to any particular text. + """ + start, end = self.get_text_range(node) + return self._text[start : end] diff --git a/contrib/python/asttokens/asttokens/line_numbers.py b/contrib/python/asttokens/asttokens/line_numbers.py new file mode 100644 index 0000000000..64ee81e1c1 --- /dev/null +++ b/contrib/python/asttokens/asttokens/line_numbers.py @@ -0,0 +1,71 @@ +# Copyright 2016 Grist Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import bisect +import re + +_line_start_re = re.compile(r'^', re.M) + +class LineNumbers(object): + """ + Class to convert between character offsets in a text string, and pairs (line, column) of 1-based + line and 0-based column numbers, as used by tokens and AST nodes. + + This class expects unicode for input and stores positions in unicode. But it supports + translating to and from utf8 offsets, which are used by ast parsing. + """ + def __init__(self, text): + # A list of character offsets of each line's first character. + self._line_offsets = [m.start(0) for m in _line_start_re.finditer(text)] + self._text = text + self._text_len = len(text) + self._utf8_offset_cache = {} # maps line num to list of char offset for each byte in line + + def from_utf8_col(self, line, utf8_column): + """ + Given a 1-based line number and 0-based utf8 column, returns a 0-based unicode column. + """ + offsets = self._utf8_offset_cache.get(line) + if offsets is None: + end_offset = self._line_offsets[line] if line < len(self._line_offsets) else self._text_len + line_text = self._text[self._line_offsets[line - 1] : end_offset] + + offsets = [i for i,c in enumerate(line_text) for byte in c.encode('utf8')] + offsets.append(len(line_text)) + self._utf8_offset_cache[line] = offsets + + return offsets[max(0, min(len(offsets)-1, utf8_column))] + + def line_to_offset(self, line, column): + """ + Converts 1-based line number and 0-based column to 0-based character offset into text. + """ + line -= 1 + if line >= len(self._line_offsets): + return self._text_len + elif line < 0: + return 0 + else: + return min(self._line_offsets[line] + max(0, column), self._text_len) + + def offset_to_line(self, offset): + """ + Converts 0-based character offset to pair (line, col) of 1-based line and 0-based column + numbers. + """ + offset = max(0, min(self._text_len, offset)) + line_index = bisect.bisect_right(self._line_offsets, offset) - 1 + return (line_index + 1, offset - self._line_offsets[line_index]) + + diff --git a/contrib/python/asttokens/asttokens/mark_tokens.py b/contrib/python/asttokens/asttokens/mark_tokens.py new file mode 100644 index 0000000000..d0ba6b9415 --- /dev/null +++ b/contrib/python/asttokens/asttokens/mark_tokens.py @@ -0,0 +1,377 @@ +# Copyright 2016 Grist Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import numbers +import sys +import token + +import six + +from . import util + +# Mapping of matching braces. To find a token here, look up token[:2]. +_matching_pairs_left = { + (token.OP, '('): (token.OP, ')'), + (token.OP, '['): (token.OP, ']'), + (token.OP, '{'): (token.OP, '}'), +} + +_matching_pairs_right = { + (token.OP, ')'): (token.OP, '('), + (token.OP, ']'): (token.OP, '['), + (token.OP, '}'): (token.OP, '{'), +} + + +class MarkTokens(object): + """ + Helper that visits all nodes in the AST tree and assigns .first_token and .last_token attributes + to each of them. This is the heart of the token-marking logic. + """ + def __init__(self, code): + self._code = code + self._methods = util.NodeMethods() + self._iter_children = None + + def visit_tree(self, node): + self._iter_children = util.iter_children_func(node) + util.visit_tree(node, self._visit_before_children, self._visit_after_children) + + def _visit_before_children(self, node, parent_token): + col = getattr(node, 'col_offset', None) + token = self._code.get_token_from_utf8(node.lineno, col) if col is not None else None + + if not token and util.is_module(node): + # We'll assume that a Module node starts at the start of the source code. + token = self._code.get_token(1, 0) + + # Use our own token, or our parent's if we don't have one, to pass to child calls as + # parent_token argument. The second value becomes the token argument of _visit_after_children. + return (token or parent_token, token) + + def _visit_after_children(self, node, parent_token, token): + # This processes the node generically first, after all children have been processed. + + # Get the first and last tokens that belong to children. Note how this doesn't assume that we + # iterate through children in order that corresponds to occurrence in source code. This + # assumption can fail (e.g. with return annotations). + first = token + last = None + for child in self._iter_children(node): + if not first or child.first_token.index < first.index: + first = child.first_token + if not last or child.last_token.index > last.index: + last = child.last_token + + # If we don't have a first token from _visit_before_children, and there were no children, then + # use the parent's token as the first token. + first = first or parent_token + + # If no children, set last token to the first one. + last = last or first + + # Statements continue to before NEWLINE. This helps cover a few different cases at once. + if util.is_stmt(node): + last = self._find_last_in_stmt(last) + + # Capture any unmatched brackets. + first, last = self._expand_to_matching_pairs(first, last, node) + + # Give a chance to node-specific methods to adjust. + nfirst, nlast = self._methods.get(self, node.__class__)(node, first, last) + + if (nfirst, nlast) != (first, last): + # If anything changed, expand again to capture any unmatched brackets. + nfirst, nlast = self._expand_to_matching_pairs(nfirst, nlast, node) + + node.first_token = nfirst + node.last_token = nlast + + def _find_last_in_stmt(self, start_token): + t = start_token + while (not util.match_token(t, token.NEWLINE) and + not util.match_token(t, token.OP, ';') and + not token.ISEOF(t.type)): + t = self._code.next_token(t, include_extra=True) + return self._code.prev_token(t) + + def _expand_to_matching_pairs(self, first_token, last_token, node): + """ + Scan tokens in [first_token, last_token] range that are between node's children, and for any + unmatched brackets, adjust first/last tokens to include the closing pair. + """ + # We look for opening parens/braces among non-child tokens (i.e. tokens between our actual + # child nodes). If we find any closing ones, we match them to the opens. + to_match_right = [] + to_match_left = [] + for tok in self._code.token_range(first_token, last_token): + tok_info = tok[:2] + if to_match_right and tok_info == to_match_right[-1]: + to_match_right.pop() + elif tok_info in _matching_pairs_left: + to_match_right.append(_matching_pairs_left[tok_info]) + elif tok_info in _matching_pairs_right: + to_match_left.append(_matching_pairs_right[tok_info]) + + # Once done, extend `last_token` to match any unclosed parens/braces. + for match in reversed(to_match_right): + last = self._code.next_token(last_token) + # Allow for trailing commas or colons (allowed in subscripts) before the closing delimiter + while any(util.match_token(last, token.OP, x) for x in (',', ':')): + last = self._code.next_token(last) + # Now check for the actual closing delimiter. + if util.match_token(last, *match): + last_token = last + + # And extend `first_token` to match any unclosed opening parens/braces. + for match in to_match_left: + first = self._code.prev_token(first_token) + if util.match_token(first, *match): + first_token = first + + return (first_token, last_token) + + #---------------------------------------------------------------------- + # Node visitors. Each takes a preliminary first and last tokens, and returns the adjusted pair + # that will actually be assigned. + + def visit_default(self, node, first_token, last_token): + # pylint: disable=no-self-use + # By default, we don't need to adjust the token we computed earlier. + return (first_token, last_token) + + def handle_comp(self, open_brace, node, first_token, last_token): + # For list/set/dict comprehensions, we only get the token of the first child, so adjust it to + # include the opening brace (the closing brace will be matched automatically). + before = self._code.prev_token(first_token) + util.expect_token(before, token.OP, open_brace) + return (before, last_token) + + # Python 3.8 fixed the starting position of list comprehensions: + # https://bugs.python.org/issue31241 + if sys.version_info < (3, 8): + def visit_listcomp(self, node, first_token, last_token): + return self.handle_comp('[', node, first_token, last_token) + + if six.PY2: + # We shouldn't do this on PY3 because its SetComp/DictComp already have a correct start. + def visit_setcomp(self, node, first_token, last_token): + return self.handle_comp('{', node, first_token, last_token) + + def visit_dictcomp(self, node, first_token, last_token): + return self.handle_comp('{', node, first_token, last_token) + + def visit_comprehension(self, node, first_token, last_token): + # The 'comprehension' node starts with 'for' but we only get first child; we search backwards + # to find the 'for' keyword. + first = self._code.find_token(first_token, token.NAME, 'for', reverse=True) + return (first, last_token) + + def visit_if(self, node, first_token, last_token): + while first_token.string not in ('if', 'elif'): + first_token = self._code.prev_token(first_token) + return first_token, last_token + + def handle_attr(self, node, first_token, last_token): + # Attribute node has ".attr" (2 tokens) after the last child. + dot = self._code.find_token(last_token, token.OP, '.') + name = self._code.next_token(dot) + util.expect_token(name, token.NAME) + return (first_token, name) + + visit_attribute = handle_attr + visit_assignattr = handle_attr + visit_delattr = handle_attr + + def handle_def(self, node, first_token, last_token): + # With astroid, nodes that start with a doc-string can have an empty body, in which case we + # need to adjust the last token to include the doc string. + if not node.body and getattr(node, 'doc', None): + last_token = self._code.find_token(last_token, token.STRING) + + # Include @ from decorator + if first_token.index > 0: + prev = self._code.prev_token(first_token) + if util.match_token(prev, token.OP, '@'): + first_token = prev + return (first_token, last_token) + + visit_classdef = handle_def + visit_functiondef = handle_def + + def handle_following_brackets(self, node, last_token, opening_bracket): + # This is for calls and subscripts, which have a pair of brackets + # at the end which may contain no nodes, e.g. foo() or bar[:]. + # We look for the opening bracket and then let the matching pair be found automatically + # Remember that last_token is at the end of all children, + # so we are not worried about encountering a bracket that belongs to a child. + first_child = next(self._iter_children(node)) + call_start = self._code.find_token(first_child.last_token, token.OP, opening_bracket) + if call_start.index > last_token.index: + last_token = call_start + return last_token + + def visit_call(self, node, first_token, last_token): + last_token = self.handle_following_brackets(node, last_token, '(') + + # Handling a python bug with decorators with empty parens, e.g. + # @deco() + # def ... + if util.match_token(first_token, token.OP, '@'): + first_token = self._code.next_token(first_token) + return (first_token, last_token) + + def visit_subscript(self, node, first_token, last_token): + last_token = self.handle_following_brackets(node, last_token, '[') + return (first_token, last_token) + + def handle_bare_tuple(self, node, first_token, last_token): + # A bare tuple doesn't include parens; if there is a trailing comma, make it part of the tuple. + maybe_comma = self._code.next_token(last_token) + if util.match_token(maybe_comma, token.OP, ','): + last_token = maybe_comma + return (first_token, last_token) + + if sys.version_info >= (3, 8): + # In Python3.8 parsed tuples include parentheses when present. + def handle_tuple_nonempty(self, node, first_token, last_token): + # It's a bare tuple if the first token belongs to the first child. The first child may + # include extraneous parentheses (which don't create new nodes), so account for those too. + child = node.elts[0] + child_first, child_last = self._gobble_parens(child.first_token, child.last_token, True) + if first_token == child_first: + return self.handle_bare_tuple(node, first_token, last_token) + return (first_token, last_token) + else: + # Before python 3.8, parsed tuples do not include parens. + def handle_tuple_nonempty(self, node, first_token, last_token): + (first_token, last_token) = self.handle_bare_tuple(node, first_token, last_token) + return self._gobble_parens(first_token, last_token, False) + + def visit_tuple(self, node, first_token, last_token): + if not node.elts: + # An empty tuple is just "()", and we need no further info. + return (first_token, last_token) + return self.handle_tuple_nonempty(node, first_token, last_token) + + def _gobble_parens(self, first_token, last_token, include_all=False): + # Expands a range of tokens to include one or all pairs of surrounding parentheses, and + # returns (first, last) tokens that include these parens. + while first_token.index > 0: + prev = self._code.prev_token(first_token) + next = self._code.next_token(last_token) + if util.match_token(prev, token.OP, '(') and util.match_token(next, token.OP, ')'): + first_token, last_token = prev, next + if include_all: + continue + break + return (first_token, last_token) + + def visit_str(self, node, first_token, last_token): + return self.handle_str(first_token, last_token) + + def visit_joinedstr(self, node, first_token, last_token): + return self.handle_str(first_token, last_token) + + def visit_bytes(self, node, first_token, last_token): + return self.handle_str(first_token, last_token) + + def handle_str(self, first_token, last_token): + # Multiple adjacent STRING tokens form a single string. + last = self._code.next_token(last_token) + while util.match_token(last, token.STRING): + last_token = last + last = self._code.next_token(last_token) + return (first_token, last_token) + + def handle_num(self, node, value, first_token, last_token): + # A constant like '-1' gets turned into two tokens; this will skip the '-'. + while util.match_token(last_token, token.OP): + last_token = self._code.next_token(last_token) + + if isinstance(value, complex): + # A complex number like -2j cannot be compared directly to 0 + # A complex number like 1-2j is expressed as a binary operation + # so we don't need to worry about it + value = value.imag + + # This makes sure that the - is included + if value < 0 and first_token.type == token.NUMBER: + first_token = self._code.prev_token(first_token) + return (first_token, last_token) + + def visit_num(self, node, first_token, last_token): + return self.handle_num(node, node.n, first_token, last_token) + + # In Astroid, the Num and Str nodes are replaced by Const. + def visit_const(self, node, first_token, last_token): + if isinstance(node.value, numbers.Number): + return self.handle_num(node, node.value, first_token, last_token) + elif isinstance(node.value, (six.text_type, six.binary_type)): + return self.visit_str(node, first_token, last_token) + return (first_token, last_token) + + # In Python >= 3.6, there is a similar class 'Constant' for literals + # In 3.8 it became the type produced by ast.parse + # https://bugs.python.org/issue32892 + visit_constant = visit_const + + def visit_keyword(self, node, first_token, last_token): + # Until python 3.9 (https://bugs.python.org/issue40141), + # ast.keyword nodes didn't have line info. Astroid has lineno None. + if node.arg is not None and getattr(node, 'lineno', None) is None: + equals = self._code.find_token(first_token, token.OP, '=', reverse=True) + name = self._code.prev_token(equals) + util.expect_token(name, token.NAME, node.arg) + first_token = name + return (first_token, last_token) + + def visit_starred(self, node, first_token, last_token): + # Astroid has 'Starred' nodes (for "foo(*bar)" type args), but they need to be adjusted. + if not util.match_token(first_token, token.OP, '*'): + star = self._code.prev_token(first_token) + if util.match_token(star, token.OP, '*'): + first_token = star + return (first_token, last_token) + + def visit_assignname(self, node, first_token, last_token): + # Astroid may turn 'except' clause into AssignName, but we need to adjust it. + if util.match_token(first_token, token.NAME, 'except'): + colon = self._code.find_token(last_token, token.OP, ':') + first_token = last_token = self._code.prev_token(colon) + return (first_token, last_token) + + if six.PY2: + # No need for this on Python3, which already handles 'with' nodes correctly. + def visit_with(self, node, first_token, last_token): + first = self._code.find_token(first_token, token.NAME, 'with', reverse=True) + return (first, last_token) + + # Async nodes should typically start with the word 'async' + # but Python < 3.7 doesn't put the col_offset there + # AsyncFunctionDef is slightly different because it might have + # decorators before that, which visit_functiondef handles + def handle_async(self, node, first_token, last_token): + if not first_token.string == 'async': + first_token = self._code.prev_token(first_token) + return (first_token, last_token) + + visit_asyncfor = handle_async + visit_asyncwith = handle_async + + def visit_asyncfunctiondef(self, node, first_token, last_token): + if util.match_token(first_token, token.NAME, 'def'): + # Include the 'async' token + first_token = self._code.prev_token(first_token) + return self.visit_functiondef(node, first_token, last_token) diff --git a/contrib/python/asttokens/asttokens/util.py b/contrib/python/asttokens/asttokens/util.py new file mode 100644 index 0000000000..79ac430036 --- /dev/null +++ b/contrib/python/asttokens/asttokens/util.py @@ -0,0 +1,270 @@ +# Copyright 2016 Grist Labs, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ast +import collections +import token +from six import iteritems + + +def token_repr(tok_type, string): + """Returns a human-friendly representation of a token with the given type and string.""" + # repr() prefixes unicode with 'u' on Python2 but not Python3; strip it out for consistency. + return '%s:%s' % (token.tok_name[tok_type], repr(string).lstrip('u')) + + +class Token(collections.namedtuple('Token', 'type string start end line index startpos endpos')): + """ + TokenInfo is an 8-tuple containing the same 5 fields as the tokens produced by the tokenize + module, and 3 additional ones useful for this module: + + - [0] .type Token type (see token.py) + - [1] .string Token (a string) + - [2] .start Starting (row, column) indices of the token (a 2-tuple of ints) + - [3] .end Ending (row, column) indices of the token (a 2-tuple of ints) + - [4] .line Original line (string) + - [5] .index Index of the token in the list of tokens that it belongs to. + - [6] .startpos Starting character offset into the input text. + - [7] .endpos Ending character offset into the input text. + """ + def __str__(self): + return token_repr(self.type, self.string) + + +def match_token(token, tok_type, tok_str=None): + """Returns true if token is of the given type and, if a string is given, has that string.""" + return token.type == tok_type and (tok_str is None or token.string == tok_str) + + +def expect_token(token, tok_type, tok_str=None): + """ + Verifies that the given token is of the expected type. If tok_str is given, the token string + is verified too. If the token doesn't match, raises an informative ValueError. + """ + if not match_token(token, tok_type, tok_str): + raise ValueError("Expected token %s, got %s on line %s col %s" % ( + token_repr(tok_type, tok_str), str(token), + token.start[0], token.start[1] + 1)) + +# These were previously defined in tokenize.py and distinguishable by being greater than +# token.N_TOKEN. As of python3.7, they are in token.py, and we check for them explicitly. +if hasattr(token, 'ENCODING'): + def is_non_coding_token(token_type): + """ + These are considered non-coding tokens, as they don't affect the syntax tree. + """ + return token_type in (token.NL, token.COMMENT, token.ENCODING) +else: + def is_non_coding_token(token_type): + """ + These are considered non-coding tokens, as they don't affect the syntax tree. + """ + return token_type >= token.N_TOKENS + + +def iter_children_func(node): + """ + Returns a function which yields all direct children of a AST node, + skipping children that are singleton nodes. + The function depends on whether ``node`` is from ``ast`` or from the ``astroid`` module. + """ + return iter_children_astroid if hasattr(node, 'get_children') else iter_children_ast + + +def iter_children_astroid(node): + # Don't attempt to process children of JoinedStr nodes, which we can't fully handle yet. + if is_joined_str(node): + return [] + + return node.get_children() + + +SINGLETONS = {c for n, c in iteritems(ast.__dict__) if isinstance(c, type) and + issubclass(c, (ast.expr_context, ast.boolop, ast.operator, ast.unaryop, ast.cmpop))} + +def iter_children_ast(node): + # Don't attempt to process children of JoinedStr nodes, which we can't fully handle yet. + if is_joined_str(node): + return + + if isinstance(node, ast.Dict): + # override the iteration order: instead of <all keys>, <all values>, + # yield keys and values in source order (key1, value1, key2, value2, ...) + for (key, value) in zip(node.keys, node.values): + if key is not None: + yield key + yield value + return + + for child in ast.iter_child_nodes(node): + # Skip singleton children; they don't reflect particular positions in the code and break the + # assumptions about the tree consisting of distinct nodes. Note that collecting classes + # beforehand and checking them in a set is faster than using isinstance each time. + if child.__class__ not in SINGLETONS: + yield child + + +stmt_class_names = {n for n, c in iteritems(ast.__dict__) + if isinstance(c, type) and issubclass(c, ast.stmt)} +expr_class_names = ({n for n, c in iteritems(ast.__dict__) + if isinstance(c, type) and issubclass(c, ast.expr)} | + {'AssignName', 'DelName', 'Const', 'AssignAttr', 'DelAttr'}) + +# These feel hacky compared to isinstance() but allow us to work with both ast and astroid nodes +# in the same way, and without even importing astroid. +def is_expr(node): + """Returns whether node is an expression node.""" + return node.__class__.__name__ in expr_class_names + +def is_stmt(node): + """Returns whether node is a statement node.""" + return node.__class__.__name__ in stmt_class_names + +def is_module(node): + """Returns whether node is a module node.""" + return node.__class__.__name__ == 'Module' + +def is_joined_str(node): + """Returns whether node is a JoinedStr node, used to represent f-strings.""" + # At the moment, nodes below JoinedStr have wrong line/col info, and trying to process them only + # leads to errors. + return node.__class__.__name__ == 'JoinedStr' + + +def is_starred(node): + """Returns whether node is a starred expression node.""" + return node.__class__.__name__ == 'Starred' + + +def is_slice(node): + """Returns whether node represents a slice, e.g. `1:2` in `x[1:2]`""" + # Before 3.9, a tuple containing a slice is an ExtSlice, + # but this was removed in https://bugs.python.org/issue34822 + return ( + node.__class__.__name__ in ('Slice', 'ExtSlice') + or ( + node.__class__.__name__ == 'Tuple' + and any(map(is_slice, node.elts)) + ) + ) + + +# Sentinel value used by visit_tree(). +_PREVISIT = object() + +def visit_tree(node, previsit, postvisit): + """ + Scans the tree under the node depth-first using an explicit stack. It avoids implicit recursion + via the function call stack to avoid hitting 'maximum recursion depth exceeded' error. + + It calls ``previsit()`` and ``postvisit()`` as follows: + + * ``previsit(node, par_value)`` - should return ``(par_value, value)`` + ``par_value`` is as returned from ``previsit()`` of the parent. + + * ``postvisit(node, par_value, value)`` - should return ``value`` + ``par_value`` is as returned from ``previsit()`` of the parent, and ``value`` is as + returned from ``previsit()`` of this node itself. The return ``value`` is ignored except + the one for the root node, which is returned from the overall ``visit_tree()`` call. + + For the initial node, ``par_value`` is None. ``postvisit`` may be None. + """ + if not postvisit: + postvisit = lambda node, pvalue, value: None + + iter_children = iter_children_func(node) + done = set() + ret = None + stack = [(node, None, _PREVISIT)] + while stack: + current, par_value, value = stack.pop() + if value is _PREVISIT: + assert current not in done # protect againt infinite loop in case of a bad tree. + done.add(current) + + pvalue, post_value = previsit(current, par_value) + stack.append((current, par_value, post_value)) + + # Insert all children in reverse order (so that first child ends up on top of the stack). + ins = len(stack) + for n in iter_children(current): + stack.insert(ins, (n, pvalue, _PREVISIT)) + else: + ret = postvisit(current, par_value, value) + return ret + + + +def walk(node): + """ + Recursively yield all descendant nodes in the tree starting at ``node`` (including ``node`` + itself), using depth-first pre-order traversal (yieling parents before their children). + + This is similar to ``ast.walk()``, but with a different order, and it works for both ``ast`` and + ``astroid`` trees. Also, as ``iter_children()``, it skips singleton nodes generated by ``ast``. + """ + iter_children = iter_children_func(node) + done = set() + stack = [node] + while stack: + current = stack.pop() + assert current not in done # protect againt infinite loop in case of a bad tree. + done.add(current) + + yield current + + # Insert all children in reverse order (so that first child ends up on top of the stack). + # This is faster than building a list and reversing it. + ins = len(stack) + for c in iter_children(current): + stack.insert(ins, c) + + +def replace(text, replacements): + """ + Replaces multiple slices of text with new values. This is a convenience method for making code + modifications of ranges e.g. as identified by ``ASTTokens.get_text_range(node)``. Replacements is + an iterable of ``(start, end, new_text)`` tuples. + + For example, ``replace("this is a test", [(0, 4, "X"), (8, 9, "THE")])`` produces + ``"X is THE test"``. + """ + p = 0 + parts = [] + for (start, end, new_text) in sorted(replacements): + parts.append(text[p:start]) + parts.append(new_text) + p = end + parts.append(text[p:]) + return ''.join(parts) + + +class NodeMethods(object): + """ + Helper to get `visit_{node_type}` methods given a node's class and cache the results. + """ + def __init__(self): + self._cache = {} + + def get(self, obj, cls): + """ + Using the lowercase name of the class as node_type, returns `obj.visit_{node_type}`, + or `obj.visit_default` if the type-specific method is not found. + """ + method = self._cache.get(cls) + if not method: + name = "visit_" + cls.__name__.lower() + method = getattr(obj, name, obj.visit_default) + self._cache[cls] = method + return method diff --git a/contrib/python/asttokens/asttokens/version.py b/contrib/python/asttokens/asttokens/version.py new file mode 100644 index 0000000000..34c5111c54 --- /dev/null +++ b/contrib/python/asttokens/asttokens/version.py @@ -0,0 +1 @@ +__version__ = "2.0.5" diff --git a/contrib/python/executing/.dist-info/METADATA b/contrib/python/executing/.dist-info/METADATA new file mode 100644 index 0000000000..c2b96f141c --- /dev/null +++ b/contrib/python/executing/.dist-info/METADATA @@ -0,0 +1,166 @@ +Metadata-Version: 2.1 +Name: executing +Version: 0.8.3 +Summary: Get the currently executing AST node of a frame, and other information +Home-page: https://github.com/alexmojaki/executing +Author: Alex Hall +Author-email: alex.mojaki@gmail.com +License: MIT +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Description-Content-Type: text/markdown +License-File: LICENSE.txt + +# executing + +[![Build Status](https://github.com/alexmojaki/executing/workflows/Tests/badge.svg?branch=master)](https://github.com/alexmojaki/executing/actions) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/executing/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/executing?branch=master) [![Supports Python versions 2.7 and 3.4+, including PyPy](https://img.shields.io/pypi/pyversions/executing.svg)](https://pypi.python.org/pypi/executing) + +This mini-package lets you get information about what a frame is currently doing, particularly the AST node being executed. + +* [Usage](#usage) + * [Getting the AST node](#getting-the-ast-node) + * [Getting the source code of the node](#getting-the-source-code-of-the-node) + * [Getting the `__qualname__` of the current function](#getting-the-__qualname__-of-the-current-function) + * [The Source class](#the-source-class) +* [Installation](#installation) +* [How does it work?](#how-does-it-work) +* [Is it reliable?](#is-it-reliable) +* [Which nodes can it identify?](#which-nodes-can-it-identify) +* [Libraries that use this](#libraries-that-use-this) + +## Usage + +### Getting the AST node + +```python +import executing + +node = executing.Source.executing(frame).node +``` + +Then `node` will be an AST node (from the `ast` standard library module) or None if the node couldn't be identified (which may happen often and should always be checked). + +`node` will always be the same instance for multiple calls with frames at the same point of execution. + +If you have a traceback object, pass it directly to `Source.executing()` rather than the `tb_frame` attribute to get the correct node. + +### Getting the source code of the node + +For this you will need to separately install the [`asttokens`](https://github.com/gristlabs/asttokens) library, then obtain an `ASTTokens` object: + +```python +executing.Source.executing(frame).source.asttokens() +``` + +or: + +```python +executing.Source.for_frame(frame).asttokens() +``` + +or use one of the convenience methods: + +```python +executing.Source.executing(frame).text() +executing.Source.executing(frame).text_range() +``` + +### Getting the `__qualname__` of the current function + +```python +executing.Source.executing(frame).code_qualname() +``` + +or: + +```python +executing.Source.for_frame(frame).code_qualname(frame.f_code) +``` + +### The `Source` class + +Everything goes through the `Source` class. Only one instance of the class is created for each filename. Subclassing it to add more attributes on creation or methods is recommended. The classmethods such as `executing` will respect this. See the source code and docstrings for more detail. + +## Installation + + pip install executing + +If you don't like that you can just copy the file `executing.py`, there are no dependencies (but of course you won't get updates). + +## How does it work? + +Suppose the frame is executing this line: + +```python +self.foo(bar.x) +``` + +and in particular it's currently obtaining the attribute `self.foo`. Looking at the bytecode, specifically `frame.f_code.co_code[frame.f_lasti]`, we can tell that it's loading an attribute, but it's not obvious which one. We can narrow down the statement being executed using `frame.f_lineno` and find the two `ast.Attribute` nodes representing `self.foo` and `bar.x`. How do we find out which one it is, without recreating the entire compiler in Python? + +The trick is to modify the AST slightly for each candidate expression and observe the changes in the bytecode instructions. We change the AST to this: + +```python +(self.foo ** 'longuniqueconstant')(bar.x) +``` + +and compile it, and the bytecode will be almost the same but there will be two new instructions: + + LOAD_CONST 'longuniqueconstant' + BINARY_POWER + +and just before that will be a `LOAD_ATTR` instruction corresponding to `self.foo`. Seeing that it's in the same position as the original instruction lets us know we've found our match. + +## Is it reliable? + +Yes - if it identifies a node, you can trust that it's identified the correct one. The tests are very thorough - in addition to unit tests which check various situations directly, there are property tests against a large number of files (see the filenames printed in [this build](https://travis-ci.org/alexmojaki/executing/jobs/557970457)) with real code. Specifically, for each file, the tests: + + 1. Identify as many nodes as possible from all the bytecode instructions in the file, and assert that they are all distinct + 2. Find all the nodes that should be identifiable, and assert that they were indeed identified somewhere + +In other words, it shows that there is a one-to-one mapping between the nodes and the instructions that can be handled. This leaves very little room for a bug to creep in. + +Furthermore, `executing` checks that the instructions compiled from the modified AST exactly match the original code save for a few small known exceptions. This accounts for all the quirks and optimisations in the interpreter. + +## Which nodes can it identify? + +Currently it works in almost all cases for the following `ast` nodes: + + - `Call`, e.g. `self.foo(bar)` + - `Attribute`, e.g. `point.x` + - `Subscript`, e.g. `lst[1]` + - `BinOp`, e.g. `x + y` (doesn't include `and` and `or`) + - `UnaryOp`, e.g. `-n` (includes `not` but only works sometimes) + - `Compare` e.g. `a < b` (not for chains such as `0 < p < 1`) + +The plan is to extend to more operations in the future. + +## Projects that use this + +### My Projects + +- **[`stack_data`](https://github.com/alexmojaki/stack_data)**: Extracts data from stack frames and tracebacks, particularly to display more useful tracebacks than the default. Also uses another related library of mine: **[`pure_eval`](https://github.com/alexmojaki/pure_eval)**. +- **[`futurecoder`](https://futurecoder.io/)**: Highlights the executing node in tracebacks using `executing` via `stack_data`, and provides debugging with `snoop`. +- **[`snoop`](https://github.com/alexmojaki/snoop)**: A feature-rich and convenient debugging library. Uses `executing` to show the operation which caused an exception and to allow the `pp` function to display the source of its arguments. +- **[`heartrate`](https://github.com/alexmojaki/heartrate)**: A simple real time visualisation of the execution of a Python program. Uses `executing` to highlight currently executing operations, particularly in each frame of the stack trace. +- **[`sorcery`](https://github.com/alexmojaki/sorcery)**: Dark magic delights in Python. Uses `executing` to let special callables called spells know where they're being called from. + +### Projects I've contributed to + +- **[`IPython`](https://github.com/ipython/ipython/pull/12150)**: Highlights the executing node in tracebacks using `executing` via [`stack_data`](https://github.com/alexmojaki/stack_data). +- **[`icecream`](https://github.com/gruns/icecream)**: 🍦 Sweet and creamy print debugging. Uses `executing` to identify where `ic` is called and print its arguments. +- **[`friendly_traceback`](https://github.com/friendly-traceback/friendly-traceback)**: Uses `stack_data` and `executing` to pinpoint the cause of errors and provide helpful explanations. +- **[`python-devtools`](https://github.com/samuelcolvin/python-devtools)**: Uses `executing` for print debugging similar to `icecream`. +- **[`sentry_sdk`](https://github.com/getsentry/sentry-python)**: Add the integration `sentry_sdk.integrations.executingExecutingIntegration()` to show the function `__qualname__` in each frame in sentry events. +- **[`varname`](https://github.com/pwwang/python-varname)**: Dark magics about variable names in python. Uses `executing` to find where its various magical functions like `varname` and `nameof` are called from. + + diff --git a/contrib/python/executing/.dist-info/top_level.txt b/contrib/python/executing/.dist-info/top_level.txt new file mode 100644 index 0000000000..a920f2c56c --- /dev/null +++ b/contrib/python/executing/.dist-info/top_level.txt @@ -0,0 +1 @@ +executing diff --git a/contrib/python/executing/LICENSE.txt b/contrib/python/executing/LICENSE.txt new file mode 100644 index 0000000000..473e36e246 --- /dev/null +++ b/contrib/python/executing/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Alex Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/contrib/python/executing/README.md b/contrib/python/executing/README.md new file mode 100644 index 0000000000..616d3683cc --- /dev/null +++ b/contrib/python/executing/README.md @@ -0,0 +1,141 @@ +# executing + +[![Build Status](https://github.com/alexmojaki/executing/workflows/Tests/badge.svg?branch=master)](https://github.com/alexmojaki/executing/actions) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/executing/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/executing?branch=master) [![Supports Python versions 2.7 and 3.4+, including PyPy](https://img.shields.io/pypi/pyversions/executing.svg)](https://pypi.python.org/pypi/executing) + +This mini-package lets you get information about what a frame is currently doing, particularly the AST node being executed. + +* [Usage](#usage) + * [Getting the AST node](#getting-the-ast-node) + * [Getting the source code of the node](#getting-the-source-code-of-the-node) + * [Getting the `__qualname__` of the current function](#getting-the-__qualname__-of-the-current-function) + * [The Source class](#the-source-class) +* [Installation](#installation) +* [How does it work?](#how-does-it-work) +* [Is it reliable?](#is-it-reliable) +* [Which nodes can it identify?](#which-nodes-can-it-identify) +* [Libraries that use this](#libraries-that-use-this) + +## Usage + +### Getting the AST node + +```python +import executing + +node = executing.Source.executing(frame).node +``` + +Then `node` will be an AST node (from the `ast` standard library module) or None if the node couldn't be identified (which may happen often and should always be checked). + +`node` will always be the same instance for multiple calls with frames at the same point of execution. + +If you have a traceback object, pass it directly to `Source.executing()` rather than the `tb_frame` attribute to get the correct node. + +### Getting the source code of the node + +For this you will need to separately install the [`asttokens`](https://github.com/gristlabs/asttokens) library, then obtain an `ASTTokens` object: + +```python +executing.Source.executing(frame).source.asttokens() +``` + +or: + +```python +executing.Source.for_frame(frame).asttokens() +``` + +or use one of the convenience methods: + +```python +executing.Source.executing(frame).text() +executing.Source.executing(frame).text_range() +``` + +### Getting the `__qualname__` of the current function + +```python +executing.Source.executing(frame).code_qualname() +``` + +or: + +```python +executing.Source.for_frame(frame).code_qualname(frame.f_code) +``` + +### The `Source` class + +Everything goes through the `Source` class. Only one instance of the class is created for each filename. Subclassing it to add more attributes on creation or methods is recommended. The classmethods such as `executing` will respect this. See the source code and docstrings for more detail. + +## Installation + + pip install executing + +If you don't like that you can just copy the file `executing.py`, there are no dependencies (but of course you won't get updates). + +## How does it work? + +Suppose the frame is executing this line: + +```python +self.foo(bar.x) +``` + +and in particular it's currently obtaining the attribute `self.foo`. Looking at the bytecode, specifically `frame.f_code.co_code[frame.f_lasti]`, we can tell that it's loading an attribute, but it's not obvious which one. We can narrow down the statement being executed using `frame.f_lineno` and find the two `ast.Attribute` nodes representing `self.foo` and `bar.x`. How do we find out which one it is, without recreating the entire compiler in Python? + +The trick is to modify the AST slightly for each candidate expression and observe the changes in the bytecode instructions. We change the AST to this: + +```python +(self.foo ** 'longuniqueconstant')(bar.x) +``` + +and compile it, and the bytecode will be almost the same but there will be two new instructions: + + LOAD_CONST 'longuniqueconstant' + BINARY_POWER + +and just before that will be a `LOAD_ATTR` instruction corresponding to `self.foo`. Seeing that it's in the same position as the original instruction lets us know we've found our match. + +## Is it reliable? + +Yes - if it identifies a node, you can trust that it's identified the correct one. The tests are very thorough - in addition to unit tests which check various situations directly, there are property tests against a large number of files (see the filenames printed in [this build](https://travis-ci.org/alexmojaki/executing/jobs/557970457)) with real code. Specifically, for each file, the tests: + + 1. Identify as many nodes as possible from all the bytecode instructions in the file, and assert that they are all distinct + 2. Find all the nodes that should be identifiable, and assert that they were indeed identified somewhere + +In other words, it shows that there is a one-to-one mapping between the nodes and the instructions that can be handled. This leaves very little room for a bug to creep in. + +Furthermore, `executing` checks that the instructions compiled from the modified AST exactly match the original code save for a few small known exceptions. This accounts for all the quirks and optimisations in the interpreter. + +## Which nodes can it identify? + +Currently it works in almost all cases for the following `ast` nodes: + + - `Call`, e.g. `self.foo(bar)` + - `Attribute`, e.g. `point.x` + - `Subscript`, e.g. `lst[1]` + - `BinOp`, e.g. `x + y` (doesn't include `and` and `or`) + - `UnaryOp`, e.g. `-n` (includes `not` but only works sometimes) + - `Compare` e.g. `a < b` (not for chains such as `0 < p < 1`) + +The plan is to extend to more operations in the future. + +## Projects that use this + +### My Projects + +- **[`stack_data`](https://github.com/alexmojaki/stack_data)**: Extracts data from stack frames and tracebacks, particularly to display more useful tracebacks than the default. Also uses another related library of mine: **[`pure_eval`](https://github.com/alexmojaki/pure_eval)**. +- **[`futurecoder`](https://futurecoder.io/)**: Highlights the executing node in tracebacks using `executing` via `stack_data`, and provides debugging with `snoop`. +- **[`snoop`](https://github.com/alexmojaki/snoop)**: A feature-rich and convenient debugging library. Uses `executing` to show the operation which caused an exception and to allow the `pp` function to display the source of its arguments. +- **[`heartrate`](https://github.com/alexmojaki/heartrate)**: A simple real time visualisation of the execution of a Python program. Uses `executing` to highlight currently executing operations, particularly in each frame of the stack trace. +- **[`sorcery`](https://github.com/alexmojaki/sorcery)**: Dark magic delights in Python. Uses `executing` to let special callables called spells know where they're being called from. + +### Projects I've contributed to + +- **[`IPython`](https://github.com/ipython/ipython/pull/12150)**: Highlights the executing node in tracebacks using `executing` via [`stack_data`](https://github.com/alexmojaki/stack_data). +- **[`icecream`](https://github.com/gruns/icecream)**: 🍦 Sweet and creamy print debugging. Uses `executing` to identify where `ic` is called and print its arguments. +- **[`friendly_traceback`](https://github.com/friendly-traceback/friendly-traceback)**: Uses `stack_data` and `executing` to pinpoint the cause of errors and provide helpful explanations. +- **[`python-devtools`](https://github.com/samuelcolvin/python-devtools)**: Uses `executing` for print debugging similar to `icecream`. +- **[`sentry_sdk`](https://github.com/getsentry/sentry-python)**: Add the integration `sentry_sdk.integrations.executingExecutingIntegration()` to show the function `__qualname__` in each frame in sentry events. +- **[`varname`](https://github.com/pwwang/python-varname)**: Dark magics about variable names in python. Uses `executing` to find where its various magical functions like `varname` and `nameof` are called from. diff --git a/contrib/python/executing/executing/__init__.py b/contrib/python/executing/executing/__init__.py new file mode 100644 index 0000000000..4c41629717 --- /dev/null +++ b/contrib/python/executing/executing/__init__.py @@ -0,0 +1,25 @@ +""" +Get information about what a frame is currently doing. Typical usage: + + import executing + + node = executing.Source.executing(frame).node + # node will be an AST node or None +""" + +from collections import namedtuple +_VersionInfo = namedtuple('VersionInfo', ('major', 'minor', 'micro')) +from .executing import Source, Executing, only, NotOneValueFound, cache, future_flags +try: + from .version import __version__ + if "dev" in __version__: + raise ValueError +except Exception: + # version.py is auto-generated with the git tag when building + __version__ = "???" + __version_info__ = _VersionInfo(-1, -1, -1) +else: + __version_info__ = _VersionInfo(*map(int, __version__.split('.'))) + + +__all__ = ["Source"] diff --git a/contrib/python/executing/executing/executing.py b/contrib/python/executing/executing/executing.py new file mode 100644 index 0000000000..5dc0621583 --- /dev/null +++ b/contrib/python/executing/executing/executing.py @@ -0,0 +1,1088 @@ +""" +MIT License + +Copyright (c) 2021 Alex Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +import __future__ +import ast +import dis +import functools +import inspect +import io +import linecache +import re +import sys +import types +from collections import defaultdict, namedtuple +from copy import deepcopy +from itertools import islice +from operator import attrgetter +from threading import RLock + +function_node_types = (ast.FunctionDef,) + +PY3 = sys.version_info[0] == 3 + +if PY3: + # noinspection PyUnresolvedReferences + from functools import lru_cache + # noinspection PyUnresolvedReferences + from tokenize import detect_encoding + from itertools import zip_longest + # noinspection PyUnresolvedReferences,PyCompatibility + from pathlib import Path + + cache = lru_cache(maxsize=None) + text_type = str +else: + from lib2to3.pgen2.tokenize import detect_encoding, cookie_re as encoding_pattern + from itertools import izip_longest as zip_longest + + + class Path(object): + pass + + + def cache(func): + d = {} + + @functools.wraps(func) + def wrapper(*args): + if args in d: + return d[args] + result = d[args] = func(*args) + return result + + return wrapper + + + # noinspection PyUnresolvedReferences + text_type = unicode + +try: + # noinspection PyUnresolvedReferences + _get_instructions = dis.get_instructions +except AttributeError: + class Instruction(namedtuple('Instruction', 'offset argval opname starts_line')): + lineno = None + + + from dis import HAVE_ARGUMENT, EXTENDED_ARG, hasconst, opname, findlinestarts, hasname + + # Based on dis.disassemble from 2.7 + # Left as similar as possible for easy diff + + def _get_instructions(co): + code = co.co_code + linestarts = dict(findlinestarts(co)) + n = len(code) + i = 0 + extended_arg = 0 + while i < n: + offset = i + c = code[i] + op = ord(c) + lineno = linestarts.get(i) + argval = None + i = i + 1 + if op >= HAVE_ARGUMENT: + oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg + extended_arg = 0 + i = i + 2 + if op == EXTENDED_ARG: + extended_arg = oparg * 65536 + + if op in hasconst: + argval = co.co_consts[oparg] + elif op in hasname: + argval = co.co_names[oparg] + elif opname[op] == 'LOAD_FAST': + argval = co.co_varnames[oparg] + yield Instruction(offset, argval, opname[op], lineno) + + +try: + function_node_types += (ast.AsyncFunctionDef,) +except AttributeError: + pass + + +def assert_(condition, message=""): + """ + Like an assert statement, but unaffected by -O + :param condition: value that is expected to be truthy + :type message: Any + """ + if not condition: + raise AssertionError(str(message)) + + +def get_instructions(co): + lineno = co.co_firstlineno + for inst in _get_instructions(co): + lineno = inst.starts_line or lineno + assert_(lineno) + inst.lineno = lineno + yield inst + + +TESTING = 0 + + +class NotOneValueFound(Exception): + pass + + +def only(it): + if hasattr(it, '__len__'): + if len(it) != 1: + raise NotOneValueFound('Expected one value, found %s' % len(it)) + # noinspection PyTypeChecker + return list(it)[0] + + lst = tuple(islice(it, 2)) + if len(lst) == 0: + raise NotOneValueFound('Expected one value, found 0') + if len(lst) > 1: + raise NotOneValueFound('Expected one value, found several') + return lst[0] + + +class Source(object): + """ + The source code of a single file and associated metadata. + + The main method of interest is the classmethod `executing(frame)`. + + If you want an instance of this class, don't construct it. + Ideally use the classmethod `for_frame(frame)`. + If you don't have a frame, use `for_filename(filename [, module_globals])`. + These methods cache instances by filename, so at most one instance exists per filename. + + Attributes: + - filename + - text + - lines + - tree: AST parsed from text, or None if text is not valid Python + All nodes in the tree have an extra `parent` attribute + + Other methods of interest: + - statements_at_line + - asttokens + - code_qualname + """ + + def __init__(self, filename, lines): + """ + Don't call this constructor, see the class docstring. + """ + + self.filename = filename + text = ''.join(lines) + + if not isinstance(text, text_type): + encoding = self.detect_encoding(text) + # noinspection PyUnresolvedReferences + text = text.decode(encoding) + lines = [line.decode(encoding) for line in lines] + + self.text = text + self.lines = [line.rstrip('\r\n') for line in lines] + + if PY3: + ast_text = text + else: + # In python 2 it's a syntax error to parse unicode + # with an encoding declaration, so we remove it but + # leave empty lines in its place to keep line numbers the same + ast_text = ''.join([ + '\n' if i < 2 and encoding_pattern.match(line) + else line + for i, line in enumerate(lines) + ]) + + self._nodes_by_line = defaultdict(list) + self.tree = None + self._qualnames = {} + + try: + self.tree = ast.parse(ast_text, filename=filename) + except SyntaxError: + pass + else: + for node in ast.walk(self.tree): + for child in ast.iter_child_nodes(node): + child.parent = node + if hasattr(node, "lineno"): + if hasattr(node, "end_lineno") and isinstance(node, ast.expr): + linenos = range(node.lineno, node.end_lineno + 1) + else: + linenos = [node.lineno] + for lineno in linenos: + self._nodes_by_line[lineno].append(node) + + visitor = QualnameVisitor() + visitor.visit(self.tree) + self._qualnames = visitor.qualnames + + @classmethod + def for_frame(cls, frame, use_cache=True): + """ + Returns the `Source` object corresponding to the file the frame is executing in. + """ + return cls.for_filename(frame.f_code.co_filename, frame.f_globals or {}, use_cache) + + @classmethod + def for_filename(cls, filename, module_globals=None, use_cache=True): + if isinstance(filename, Path): + filename = str(filename) + + source_cache = cls._class_local('__source_cache', {}) + if use_cache: + try: + return source_cache[filename] + except KeyError: + pass + + if not use_cache: + linecache.checkcache(filename) + + lines = tuple(linecache.getlines(filename, module_globals)) + result = source_cache[filename] = cls._for_filename_and_lines(filename, lines) + return result + + @classmethod + def _for_filename_and_lines(cls, filename, lines): + source_cache = cls._class_local('__source_cache_with_lines', {}) + try: + return source_cache[(filename, lines)] + except KeyError: + pass + + result = source_cache[(filename, lines)] = cls(filename, lines) + return result + + @classmethod + def lazycache(cls, frame): + if hasattr(linecache, 'lazycache'): + linecache.lazycache(frame.f_code.co_filename, frame.f_globals) + + @classmethod + def executing(cls, frame_or_tb): + """ + Returns an `Executing` object representing the operation + currently executing in the given frame or traceback object. + """ + if isinstance(frame_or_tb, types.TracebackType): + # https://docs.python.org/3/reference/datamodel.html#traceback-objects + # "tb_lineno gives the line number where the exception occurred; + # tb_lasti indicates the precise instruction. + # The line number and last instruction in the traceback may differ + # from the line number of its frame object + # if the exception occurred in a try statement with no matching except clause + # or with a finally clause." + tb = frame_or_tb + frame = tb.tb_frame + lineno = tb.tb_lineno + lasti = tb.tb_lasti + else: + frame = frame_or_tb + lineno = frame.f_lineno + lasti = frame.f_lasti + + code = frame.f_code + key = (code, id(code), lasti) + executing_cache = cls._class_local('__executing_cache', {}) + + try: + args = executing_cache[key] + except KeyError: + def find(source, retry_cache): + node = stmts = decorator = None + tree = source.tree + if tree: + try: + stmts = source.statements_at_line(lineno) + if stmts: + if is_ipython_cell_code(code): + for stmt in stmts: + tree = _extract_ipython_statement(stmt) + try: + node_finder = NodeFinder(frame, stmts, tree, lasti) + if (node or decorator) and (node_finder.result or node_finder.decorator): + if retry_cache: + raise AssertionError + # Found potential nodes in separate statements, + # cannot resolve ambiguity, give up here + node = decorator = None + break + + node = node_finder.result + decorator = node_finder.decorator + except Exception: + if retry_cache: + raise + + else: + node_finder = NodeFinder(frame, stmts, tree, lasti) + node = node_finder.result + decorator = node_finder.decorator + except Exception as e: + # These exceptions can be caused by the source code having changed + # so the cached Source doesn't match the running code + # (e.g. when using IPython %autoreload) + # Try again with a fresh Source object + if retry_cache and isinstance(e, (NotOneValueFound, AssertionError)): + return find( + source=cls.for_frame(frame, use_cache=False), + retry_cache=False, + ) + if TESTING: + raise + + if node: + new_stmts = {statement_containing_node(node)} + assert_(new_stmts <= stmts) + stmts = new_stmts + + return source, node, stmts, decorator + + args = find(source=cls.for_frame(frame), retry_cache=True) + executing_cache[key] = args + + return Executing(frame, *args) + + @classmethod + def _class_local(cls, name, default): + """ + Returns an attribute directly associated with this class + (as opposed to subclasses), setting default if necessary + """ + # classes have a mappingproxy preventing us from using setdefault + result = cls.__dict__.get(name, default) + setattr(cls, name, result) + return result + + @cache + def statements_at_line(self, lineno): + """ + Returns the statement nodes overlapping the given line. + + Returns at most one statement unless semicolons are present. + + If the `text` attribute is not valid python, meaning + `tree` is None, returns an empty set. + + Otherwise, `Source.for_frame(frame).statements_at_line(frame.f_lineno)` + should return at least one statement. + """ + + return { + statement_containing_node(node) + for node in + self._nodes_by_line[lineno] + } + + @cache + def asttokens(self): + """ + Returns an ASTTokens object for getting the source of specific AST nodes. + + See http://asttokens.readthedocs.io/en/latest/api-index.html + """ + from asttokens import ASTTokens # must be installed separately + return ASTTokens( + self.text, + tree=self.tree, + filename=self.filename, + ) + + @staticmethod + def decode_source(source): + if isinstance(source, bytes): + encoding = Source.detect_encoding(source) + source = source.decode(encoding) + return source + + @staticmethod + def detect_encoding(source): + return detect_encoding(io.BytesIO(source).readline)[0] + + def code_qualname(self, code): + """ + Imitates the __qualname__ attribute of functions for code objects. + Given: + + - A function `func` + - A frame `frame` for an execution of `func`, meaning: + `frame.f_code is func.__code__` + + `Source.for_frame(frame).code_qualname(frame.f_code)` + will be equal to `func.__qualname__`*. Works for Python 2 as well, + where of course no `__qualname__` attribute exists. + + Falls back to `code.co_name` if there is no appropriate qualname. + + Based on https://github.com/wbolster/qualname + + (* unless `func` is a lambda + nested inside another lambda on the same line, in which case + the outer lambda's qualname will be returned for the codes + of both lambdas) + """ + assert_(code.co_filename == self.filename) + return self._qualnames.get((code.co_name, code.co_firstlineno), code.co_name) + + +class Executing(object): + """ + Information about the operation a frame is currently executing. + + Generally you will just want `node`, which is the AST node being executed, + or None if it's unknown. + + If a decorator is currently being called, then: + - `node` is a function or class definition + - `decorator` is the expression in `node.decorator_list` being called + - `statements == {node}` + """ + + def __init__(self, frame, source, node, stmts, decorator): + self.frame = frame + self.source = source + self.node = node + self.statements = stmts + self.decorator = decorator + + def code_qualname(self): + return self.source.code_qualname(self.frame.f_code) + + def text(self): + return self.source.asttokens().get_text(self.node) + + def text_range(self): + return self.source.asttokens().get_text_range(self.node) + + +class QualnameVisitor(ast.NodeVisitor): + def __init__(self): + super(QualnameVisitor, self).__init__() + self.stack = [] + self.qualnames = {} + + def add_qualname(self, node, name=None): + name = name or node.name + self.stack.append(name) + if getattr(node, 'decorator_list', ()): + lineno = node.decorator_list[0].lineno + else: + lineno = node.lineno + self.qualnames.setdefault((name, lineno), ".".join(self.stack)) + + def visit_FunctionDef(self, node, name=None): + self.add_qualname(node, name) + self.stack.append('<locals>') + if isinstance(node, ast.Lambda): + children = [node.body] + else: + children = node.body + for child in children: + self.visit(child) + self.stack.pop() + self.stack.pop() + + # Find lambdas in the function definition outside the body, + # e.g. decorators or default arguments + # Based on iter_child_nodes + for field, child in ast.iter_fields(node): + if field == 'body': + continue + if isinstance(child, ast.AST): + self.visit(child) + elif isinstance(child, list): + for grandchild in child: + if isinstance(grandchild, ast.AST): + self.visit(grandchild) + + visit_AsyncFunctionDef = visit_FunctionDef + + def visit_Lambda(self, node): + # noinspection PyTypeChecker + self.visit_FunctionDef(node, '<lambda>') + + def visit_ClassDef(self, node): + self.add_qualname(node) + self.generic_visit(node) + self.stack.pop() + + +future_flags = sum( + getattr(__future__, fname).compiler_flag + for fname in __future__.all_feature_names +) + + +def compile_similar_to(source, matching_code): + return compile( + source, + matching_code.co_filename, + 'exec', + flags=future_flags & matching_code.co_flags, + dont_inherit=True, + ) + + +sentinel = 'io8urthglkjdghvljusketgIYRFYUVGHFRTBGVHKGF78678957647698' + + +class NodeFinder(object): + def __init__(self, frame, stmts, tree, lasti): + assert_(stmts) + self.frame = frame + self.tree = tree + self.code = code = frame.f_code + self.is_pytest = any( + 'pytest' in name.lower() + for group in [code.co_names, code.co_varnames] + for name in group + ) + + if self.is_pytest: + self.ignore_linenos = frozenset(assert_linenos(tree)) + else: + self.ignore_linenos = frozenset() + + self.decorator = None + + self.instruction = instruction = self.get_actual_current_instruction(lasti) + op_name = instruction.opname + extra_filter = lambda e: True + + if op_name.startswith('CALL_'): + typ = ast.Call + elif op_name.startswith(('BINARY_SUBSCR', 'SLICE+')): + typ = ast.Subscript + elif op_name.startswith('BINARY_'): + typ = ast.BinOp + op_type = dict( + BINARY_POWER=ast.Pow, + BINARY_MULTIPLY=ast.Mult, + BINARY_MATRIX_MULTIPLY=getattr(ast, "MatMult", ()), + BINARY_FLOOR_DIVIDE=ast.FloorDiv, + BINARY_TRUE_DIVIDE=ast.Div, + BINARY_MODULO=ast.Mod, + BINARY_ADD=ast.Add, + BINARY_SUBTRACT=ast.Sub, + BINARY_LSHIFT=ast.LShift, + BINARY_RSHIFT=ast.RShift, + BINARY_AND=ast.BitAnd, + BINARY_XOR=ast.BitXor, + BINARY_OR=ast.BitOr, + )[op_name] + extra_filter = lambda e: isinstance(e.op, op_type) + elif op_name.startswith('UNARY_'): + typ = ast.UnaryOp + op_type = dict( + UNARY_POSITIVE=ast.UAdd, + UNARY_NEGATIVE=ast.USub, + UNARY_NOT=ast.Not, + UNARY_INVERT=ast.Invert, + )[op_name] + extra_filter = lambda e: isinstance(e.op, op_type) + elif op_name in ('LOAD_ATTR', 'LOAD_METHOD', 'LOOKUP_METHOD'): + typ = ast.Attribute + # `in` to handle private mangled attributes + extra_filter = lambda e: e.attr in instruction.argval + elif op_name in ('LOAD_NAME', 'LOAD_GLOBAL', 'LOAD_FAST', 'LOAD_DEREF', 'LOAD_CLASSDEREF'): + typ = ast.Name + if PY3 or instruction.argval: + extra_filter = lambda e: e.id == instruction.argval + elif op_name in ('COMPARE_OP', 'IS_OP', 'CONTAINS_OP'): + typ = ast.Compare + extra_filter = lambda e: len(e.ops) == 1 + else: + raise RuntimeError(op_name) + + with lock: + exprs = { + node + for stmt in stmts + for node in ast.walk(stmt) + if isinstance(node, typ) + if not (hasattr(node, "ctx") and not isinstance(node.ctx, ast.Load)) + if extra_filter(node) + if statement_containing_node(node) == stmt + } + + matching = list(self.matching_nodes(exprs)) + if not matching and typ == ast.Call: + self.find_decorator(stmts) + else: + self.result = only(matching) + + def find_decorator(self, stmts): + stmt = only(stmts) + assert_(isinstance(stmt, (ast.ClassDef, function_node_types))) + decorators = stmt.decorator_list + assert_(decorators) + line_instructions = [ + inst + for inst in self.clean_instructions(self.code) + if inst.lineno == self.frame.f_lineno + ] + last_decorator_instruction_index = [ + i + for i, inst in enumerate(line_instructions) + if inst.opname == "CALL_FUNCTION" + ][-1] + assert_( + line_instructions[last_decorator_instruction_index + 1].opname.startswith( + "STORE_" + ) + ) + decorator_instructions = line_instructions[ + last_decorator_instruction_index + - len(decorators) + + 1 : last_decorator_instruction_index + + 1 + ] + assert_({inst.opname for inst in decorator_instructions} == {"CALL_FUNCTION"}) + decorator_index = decorator_instructions.index(self.instruction) + decorator = decorators[::-1][decorator_index] + self.decorator = decorator + self.result = stmt + + def clean_instructions(self, code): + return [ + inst + for inst in get_instructions(code) + if inst.opname not in ("EXTENDED_ARG", "NOP") + if inst.lineno not in self.ignore_linenos + ] + + def get_original_clean_instructions(self): + result = self.clean_instructions(self.code) + + # pypy sometimes (when is not clear) + # inserts JUMP_IF_NOT_DEBUG instructions in bytecode + # If they're not present in our compiled instructions, + # ignore them in the original bytecode + if not any( + inst.opname == "JUMP_IF_NOT_DEBUG" + for inst in self.compile_instructions() + ): + result = [ + inst for inst in result + if inst.opname != "JUMP_IF_NOT_DEBUG" + ] + + return result + + def matching_nodes(self, exprs): + original_instructions = self.get_original_clean_instructions() + original_index = only( + i + for i, inst in enumerate(original_instructions) + if inst == self.instruction + ) + for expr_index, expr in enumerate(exprs): + setter = get_setter(expr) + # noinspection PyArgumentList + replacement = ast.BinOp( + left=expr, + op=ast.Pow(), + right=ast.Str(s=sentinel), + ) + ast.fix_missing_locations(replacement) + setter(replacement) + try: + instructions = self.compile_instructions() + finally: + setter(expr) + + if sys.version_info >= (3, 10): + try: + handle_jumps(instructions, original_instructions) + except Exception: + # Give other candidates a chance + if TESTING or expr_index < len(exprs) - 1: + continue + raise + + indices = [ + i + for i, instruction in enumerate(instructions) + if instruction.argval == sentinel + ] + + # There can be several indices when the bytecode is duplicated, + # as happens in a finally block in 3.9+ + # First we remove the opcodes caused by our modifications + for index_num, sentinel_index in enumerate(indices): + # Adjustment for removing sentinel instructions below + # in past iterations + sentinel_index -= index_num * 2 + + assert_(instructions.pop(sentinel_index).opname == 'LOAD_CONST') + assert_(instructions.pop(sentinel_index).opname == 'BINARY_POWER') + + # Then we see if any of the instruction indices match + for index_num, sentinel_index in enumerate(indices): + sentinel_index -= index_num * 2 + new_index = sentinel_index - 1 + + if new_index != original_index: + continue + + original_inst = original_instructions[original_index] + new_inst = instructions[new_index] + + # In Python 3.9+, changing 'not x in y' to 'not sentinel_transformation(x in y)' + # changes a CONTAINS_OP(invert=1) to CONTAINS_OP(invert=0),<sentinel stuff>,UNARY_NOT + if ( + original_inst.opname == new_inst.opname in ('CONTAINS_OP', 'IS_OP') + and original_inst.arg != new_inst.arg + and ( + original_instructions[original_index + 1].opname + != instructions[new_index + 1].opname == 'UNARY_NOT' + )): + # Remove the difference for the upcoming assert + instructions.pop(new_index + 1) + + # Check that the modified instructions don't have anything unexpected + # 3.10 is a bit too weird to assert this in all cases but things still work + if sys.version_info < (3, 10): + for inst1, inst2 in zip_longest( + original_instructions, instructions + ): + assert_(inst1 and inst2 and opnames_match(inst1, inst2)) + + yield expr + + def compile_instructions(self): + module_code = compile_similar_to(self.tree, self.code) + code = only(self.find_codes(module_code)) + return self.clean_instructions(code) + + def find_codes(self, root_code): + checks = [ + attrgetter('co_firstlineno'), + attrgetter('co_freevars'), + attrgetter('co_cellvars'), + lambda c: is_ipython_cell_code_name(c.co_name) or c.co_name, + ] + if not self.is_pytest: + checks += [ + attrgetter('co_names'), + attrgetter('co_varnames'), + ] + + def matches(c): + return all( + f(c) == f(self.code) + for f in checks + ) + + code_options = [] + if matches(root_code): + code_options.append(root_code) + + def finder(code): + for const in code.co_consts: + if not inspect.iscode(const): + continue + + if matches(const): + code_options.append(const) + finder(const) + + finder(root_code) + return code_options + + def get_actual_current_instruction(self, lasti): + """ + Get the instruction corresponding to the current + frame offset, skipping EXTENDED_ARG instructions + """ + # Don't use get_original_clean_instructions + # because we need the actual instructions including + # EXTENDED_ARG + instructions = list(get_instructions(self.code)) + index = only( + i + for i, inst in enumerate(instructions) + if inst.offset == lasti + ) + + while True: + instruction = instructions[index] + if instruction.opname != "EXTENDED_ARG": + return instruction + index += 1 + + +def non_sentinel_instructions(instructions, start): + """ + Yields (index, instruction) pairs excluding the basic + instructions introduced by the sentinel transformation + """ + skip_power = False + for i, inst in islice(enumerate(instructions), start, None): + if inst.argval == sentinel: + assert_(inst.opname == "LOAD_CONST") + skip_power = True + continue + elif skip_power: + assert_(inst.opname == "BINARY_POWER") + skip_power = False + continue + yield i, inst + + +def walk_both_instructions(original_instructions, original_start, instructions, start): + """ + Yields matching indices and instructions from the new and original instructions, + leaving out changes made by the sentinel transformation. + """ + original_iter = islice(enumerate(original_instructions), original_start, None) + new_iter = non_sentinel_instructions(instructions, start) + inverted_comparison = False + while True: + try: + original_i, original_inst = next(original_iter) + new_i, new_inst = next(new_iter) + except StopIteration: + return + if ( + inverted_comparison + and original_inst.opname != new_inst.opname == "UNARY_NOT" + ): + new_i, new_inst = next(new_iter) + inverted_comparison = ( + original_inst.opname == new_inst.opname in ("CONTAINS_OP", "IS_OP") + and original_inst.arg != new_inst.arg + ) + yield original_i, original_inst, new_i, new_inst + + +def handle_jumps(instructions, original_instructions): + """ + Transforms instructions in place until it looks more like original_instructions. + This is only needed in 3.10+ where optimisations lead to more drastic changes + after the sentinel transformation. + Replaces JUMP instructions that aren't also present in original_instructions + with the sections that they jump to until a raise or return. + In some other cases duplication found in `original_instructions` + is replicated in `instructions`. + """ + while True: + for original_i, original_inst, new_i, new_inst in walk_both_instructions( + original_instructions, 0, instructions, 0 + ): + if opnames_match(original_inst, new_inst): + continue + + if "JUMP" in new_inst.opname and "JUMP" not in original_inst.opname: + # Find where the new instruction is jumping to, ignoring + # instructions which have been copied in previous iterations + start = only( + i + for i, inst in enumerate(instructions) + if inst.offset == new_inst.argval + and not getattr(inst, "_copied", False) + ) + # Replace the jump instruction with the jumped to section of instructions + # That section may also be deleted if it's not similarly duplicated + # in original_instructions + instructions[new_i : new_i + 1] = handle_jump( + original_instructions, original_i, instructions, start + ) + else: + # Extract a section of original_instructions from original_i to return/raise + orig_section = [] + for section_inst in original_instructions[original_i:]: + orig_section.append(section_inst) + if section_inst.opname in ("RETURN_VALUE", "RAISE_VARARGS"): + break + else: + # No return/raise - this is just a mismatch we can't handle + raise AssertionError + + instructions[new_i:new_i] = only(find_new_matching(orig_section, instructions)) + + # instructions has been modified, the for loop can't sensibly continue + # Restart it from the beginning, checking for other issues + break + + else: # No mismatched jumps found, we're done + return + + +def find_new_matching(orig_section, instructions): + """ + Yields sections of `instructions` which match `orig_section`. + The yielded sections include sentinel instructions, but these + are ignored when checking for matches. + """ + for start in range(len(instructions) - len(orig_section)): + indices, dup_section = zip( + *islice( + non_sentinel_instructions(instructions, start), + len(orig_section), + ) + ) + if len(dup_section) < len(orig_section): + return + if sections_match(orig_section, dup_section): + yield instructions[start:indices[-1] + 1] + + +def handle_jump(original_instructions, original_start, instructions, start): + """ + Returns the section of instructions starting at `start` and ending + with a RETURN_VALUE or RAISE_VARARGS instruction. + There should be a matching section in original_instructions starting at original_start. + If that section doesn't appear elsewhere in original_instructions, + then also delete the returned section of instructions. + """ + for original_j, original_inst, new_j, new_inst in walk_both_instructions( + original_instructions, original_start, instructions, start + ): + assert_(opnames_match(original_inst, new_inst)) + if original_inst.opname in ("RETURN_VALUE", "RAISE_VARARGS"): + inlined = deepcopy(instructions[start : new_j + 1]) + for inl in inlined: + inl._copied = True + orig_section = original_instructions[original_start : original_j + 1] + if not check_duplicates( + original_start, orig_section, original_instructions + ): + instructions[start : new_j + 1] = [] + return inlined + + +def check_duplicates(original_i, orig_section, original_instructions): + """ + Returns True if a section of original_instructions starting somewhere other + than original_i and matching orig_section is found, i.e. orig_section is duplicated. + """ + for dup_start in range(len(original_instructions)): + if dup_start == original_i: + continue + dup_section = original_instructions[dup_start : dup_start + len(orig_section)] + if len(dup_section) < len(orig_section): + return False + if sections_match(orig_section, dup_section): + return True + + +def sections_match(orig_section, dup_section): + """ + Returns True if the given lists of instructions have matching linenos and opnames. + """ + return all( + ( + orig_inst.lineno == dup_inst.lineno + # POP_BLOCKs have been found to have differing linenos in innocent cases + or "POP_BLOCK" == orig_inst.opname == dup_inst.opname + ) + and opnames_match(orig_inst, dup_inst) + for orig_inst, dup_inst in zip(orig_section, dup_section) + ) + + +def opnames_match(inst1, inst2): + return ( + inst1.opname == inst2.opname + or "JUMP" in inst1.opname + and "JUMP" in inst2.opname + or (inst1.opname == "PRINT_EXPR" and inst2.opname == "POP_TOP") + or ( + inst1.opname in ("LOAD_METHOD", "LOOKUP_METHOD") + and inst2.opname == "LOAD_ATTR" + ) + or (inst1.opname == "CALL_METHOD" and inst2.opname == "CALL_FUNCTION") + ) + + +def get_setter(node): + parent = node.parent + for name, field in ast.iter_fields(parent): + if field is node: + return lambda new_node: setattr(parent, name, new_node) + elif isinstance(field, list): + for i, item in enumerate(field): + if item is node: + def setter(new_node): + field[i] = new_node + + return setter + + +lock = RLock() + + +@cache +def statement_containing_node(node): + while not isinstance(node, ast.stmt): + node = node.parent + return node + + +def assert_linenos(tree): + for node in ast.walk(tree): + if ( + hasattr(node, 'parent') and + hasattr(node, 'lineno') and + isinstance(statement_containing_node(node), ast.Assert) + ): + yield node.lineno + + +def _extract_ipython_statement(stmt): + # IPython separates each statement in a cell to be executed separately + # So NodeFinder should only compile one statement at a time or it + # will find a code mismatch. + while not isinstance(stmt.parent, ast.Module): + stmt = stmt.parent + # use `ast.parse` instead of `ast.Module` for better portability + # python3.8 changes the signature of `ast.Module` + # Inspired by https://github.com/pallets/werkzeug/pull/1552/files + tree = ast.parse("") + tree.body = [stmt] + ast.copy_location(tree, stmt) + return tree + + +def is_ipython_cell_code_name(code_name): + return bool(re.match(r"(<module>|<cell line: \d+>)$", code_name)) + + +def is_ipython_cell_filename(filename): + return re.search(r"<ipython-input-|[/\\]ipykernel_\d+[/\\]", filename) + + +def is_ipython_cell_code(code_obj): + return ( + is_ipython_cell_filename(code_obj.co_filename) and + is_ipython_cell_code_name(code_obj.co_name) + ) diff --git a/contrib/python/executing/executing/version.py b/contrib/python/executing/executing/version.py new file mode 100644 index 0000000000..d2825abd9f --- /dev/null +++ b/contrib/python/executing/executing/version.py @@ -0,0 +1 @@ +__version__ = '0.8.3'
\ No newline at end of file diff --git a/contrib/python/ipython/py3/.dist-info/METADATA b/contrib/python/ipython/py3/.dist-info/METADATA index 998a98b150..b2118bad26 100644 --- a/contrib/python/ipython/py3/.dist-info/METADATA +++ b/contrib/python/ipython/py3/.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: ipython -Version: 7.33.0 +Version: 8.3.0 Summary: IPython: Productive Interactive Computing Home-page: https://ipython.org Author: The IPython Development Team @@ -15,6 +15,7 @@ Platform: Linux Platform: Mac OSX Platform: Windows Classifier: Framework :: IPython +Classifier: Framework :: Jupyter Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: BSD License @@ -22,34 +23,42 @@ Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3 :: Only Classifier: Topic :: System :: Shells -Requires-Python: >=3.7 +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst License-File: LICENSE -Requires-Dist: setuptools (>=18.5) -Requires-Dist: jedi (>=0.13) +Requires-Dist: backcall Requires-Dist: decorator +Requires-Dist: jedi (>=0.13) +Requires-Dist: matplotlib-inline Requires-Dist: pickleshare -Requires-Dist: traitlets (>=4.2) Requires-Dist: prompt-toolkit (!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0) -Requires-Dist: pygments -Requires-Dist: backcall -Requires-Dist: matplotlib-inline +Requires-Dist: pygments (>=2.4.0) +Requires-Dist: setuptools (>=18.5) +Requires-Dist: stack-data +Requires-Dist: traitlets (>=5) Requires-Dist: pexpect (>4.3) ; sys_platform != "win32" Requires-Dist: appnope ; sys_platform == "darwin" Requires-Dist: colorama ; sys_platform == "win32" Provides-Extra: all +Requires-Dist: black ; extra == 'all' Requires-Dist: Sphinx (>=1.3) ; extra == 'all' Requires-Dist: ipykernel ; extra == 'all' -Requires-Dist: ipyparallel ; extra == 'all' -Requires-Dist: ipywidgets ; extra == 'all' Requires-Dist: nbconvert ; extra == 'all' Requires-Dist: nbformat ; extra == 'all' -Requires-Dist: nose (>=0.10.1) ; extra == 'all' +Requires-Dist: ipywidgets ; extra == 'all' Requires-Dist: notebook ; extra == 'all' -Requires-Dist: numpy (>=1.17) ; extra == 'all' -Requires-Dist: pygments ; extra == 'all' +Requires-Dist: ipyparallel ; extra == 'all' Requires-Dist: qtconsole ; extra == 'all' -Requires-Dist: requests ; extra == 'all' +Requires-Dist: pytest (<7.1) ; extra == 'all' +Requires-Dist: pytest-asyncio ; extra == 'all' Requires-Dist: testpath ; extra == 'all' +Requires-Dist: curio ; extra == 'all' +Requires-Dist: matplotlib (!=3.2.0) ; extra == 'all' +Requires-Dist: numpy (>=1.19) ; extra == 'all' +Requires-Dist: pandas ; extra == 'all' +Requires-Dist: trio ; extra == 'all' +Provides-Extra: black +Requires-Dist: black ; extra == 'black' Provides-Extra: doc Requires-Dist: Sphinx (>=1.3) ; extra == 'doc' Provides-Extra: kernel @@ -59,57 +68,62 @@ Requires-Dist: nbconvert ; extra == 'nbconvert' Provides-Extra: nbformat Requires-Dist: nbformat ; extra == 'nbformat' Provides-Extra: notebook -Requires-Dist: notebook ; extra == 'notebook' Requires-Dist: ipywidgets ; extra == 'notebook' +Requires-Dist: notebook ; extra == 'notebook' Provides-Extra: parallel Requires-Dist: ipyparallel ; extra == 'parallel' Provides-Extra: qtconsole Requires-Dist: qtconsole ; extra == 'qtconsole' Provides-Extra: terminal Provides-Extra: test -Requires-Dist: nose (>=0.10.1) ; extra == 'test' -Requires-Dist: requests ; extra == 'test' +Requires-Dist: pytest (<7.1) ; extra == 'test' +Requires-Dist: pytest-asyncio ; extra == 'test' Requires-Dist: testpath ; extra == 'test' -Requires-Dist: pygments ; extra == 'test' -Requires-Dist: nbformat ; extra == 'test' -Requires-Dist: ipykernel ; extra == 'test' -Requires-Dist: numpy (>=1.17) ; extra == 'test' - +Provides-Extra: test_extra +Requires-Dist: pytest (<7.1) ; extra == 'test_extra' +Requires-Dist: pytest-asyncio ; extra == 'test_extra' +Requires-Dist: testpath ; extra == 'test_extra' +Requires-Dist: curio ; extra == 'test_extra' +Requires-Dist: matplotlib (!=3.2.0) ; extra == 'test_extra' +Requires-Dist: nbformat ; extra == 'test_extra' +Requires-Dist: numpy (>=1.19) ; extra == 'test_extra' +Requires-Dist: pandas ; extra == 'test_extra' +Requires-Dist: trio ; extra == 'test_extra' IPython provides a rich toolkit to help you make the most out of using Python interactively. Its main components are: -* A powerful interactive Python shell -* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter - notebooks and other interactive frontends. + * A powerful interactive Python shell + * A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter + notebooks and other interactive frontends. The enhanced interactive Python shells have the following main features: -* Comprehensive object introspection. + * Comprehensive object introspection. -* Input history, persistent across sessions. + * Input history, persistent across sessions. -* Caching of output results during a session with automatically generated - references. + * Caching of output results during a session with automatically generated + references. -* Extensible tab completion, with support by default for completion of python - variables and keywords, filenames and function keywords. + * Extensible tab completion, with support by default for completion of python + variables and keywords, filenames and function keywords. -* Extensible system of 'magic' commands for controlling the environment and - performing many tasks related either to IPython or the operating system. + * Extensible system of 'magic' commands for controlling the environment and + performing many tasks related either to IPython or the operating system. -* A rich configuration system with easy switching between different setups - (simpler than changing $PYTHONSTARTUP environment variables every time). + * A rich configuration system with easy switching between different setups + (simpler than changing $PYTHONSTARTUP environment variables every time). -* Session logging and reloading. + * Session logging and reloading. -* Extensible syntax processing for special purpose situations. + * Extensible syntax processing for special purpose situations. -* Access to the system shell with user-extensible alias system. + * Access to the system shell with user-extensible alias system. -* Easily embeddable in other Python programs and GUIs. + * Easily embeddable in other Python programs and GUIs. -* Integrated access to the pdb debugger and the Python profiler. + * Integrated access to the pdb debugger and the Python profiler. The latest development version is always available from IPython's `GitHub site <http://github.com/ipython>`_. diff --git a/contrib/python/ipython/py3/.dist-info/entry_points.txt b/contrib/python/ipython/py3/.dist-info/entry_points.txt index 30c576bd75..3de4479bae 100644 --- a/contrib/python/ipython/py3/.dist-info/entry_points.txt +++ b/contrib/python/ipython/py3/.dist-info/entry_points.txt @@ -1,6 +1,4 @@ [console_scripts] -iptest = IPython.testing.iptestcontroller:main -iptest3 = IPython.testing.iptestcontroller:main ipython = IPython:start_ipython ipython3 = IPython:start_ipython diff --git a/contrib/python/ipython/py3/IPython/__init__.py b/contrib/python/ipython/py3/IPython/__init__.py index c17ec76a60..7ebb80b362 100644 --- a/contrib/python/ipython/py3/IPython/__init__.py +++ b/contrib/python/ipython/py3/IPython/__init__.py @@ -1,4 +1,3 @@ -# encoding: utf-8 """ IPython: tools for interactive and parallel computing in Python. @@ -27,24 +26,22 @@ import sys #----------------------------------------------------------------------------- # Don't forget to also update setup.py when this changes! -if sys.version_info < (3, 6): +if sys.version_info < (3, 8): raise ImportError( -""" -IPython 7.10+ supports Python 3.6 and above. + """ +IPython 8+ supports Python 3.8 and above, following NEP 29. When using Python 2.7, please install IPython 5.x LTS Long Term Support version. Python 3.3 and 3.4 were supported up to IPython 6.x. Python 3.5 was supported with IPython 7.0 to 7.9. +Python 3.6 was supported with IPython up to 7.16. +Python 3.7 was still supported with the 7.x branch. See IPython `README.rst` file for more information: https://github.com/ipython/ipython/blob/master/README.rst -""") - -# Make it easy to import extensions - they are always directly on pythonpath. -# Therefore, non-IPython modules can be added to extensions directory. -# This should probably be in ipapp.py. -sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) +""" + ) #----------------------------------------------------------------------------- # Setup the top level names @@ -56,7 +53,6 @@ from .core.application import Application from .terminal.embed import embed from .core.interactiveshell import InteractiveShell -from .testing import test from .utils.sysinfo import sys_info from .utils.frame import extract_module_locals @@ -72,20 +68,19 @@ __patched_cves__ = {"CVE-2022-21699"} def embed_kernel(module=None, local_ns=None, **kwargs): """Embed and start an IPython kernel in a given scope. - + If you don't want the kernel to initialize the namespace from the scope of the surrounding function, and/or you want to load full IPython configuration, you probably want `IPython.start_kernel()` instead. - + Parameters ---------- module : types.ModuleType, optional The module to load into IPython globals (default: caller) local_ns : dict, optional The namespace to load into IPython user namespace (default: caller) - - kwargs : various, optional + **kwargs : various, optional Further keyword args are relayed to the IPKernelApp constructor, allowing configuration of the Kernel. Will only have an effect on the first embed_kernel call for a given process. @@ -103,26 +98,25 @@ def embed_kernel(module=None, local_ns=None, **kwargs): def start_ipython(argv=None, **kwargs): """Launch a normal IPython instance (as opposed to embedded) - + `IPython.embed()` puts a shell in a particular calling scope, such as a function or method for debugging purposes, which is often not desirable. - + `start_ipython()` does full, regular IPython initialization, including loading startup files, configuration, etc. much of which is skipped by `embed()`. - + This is a public API method, and will survive implementation changes. - + Parameters ---------- - argv : list or None, optional If unspecified or None, IPython will parse command-line options from sys.argv. To prevent any command-line parsing, pass an empty list: `argv=[]`. user_ns : dict, optional specify this dictionary to initialize the IPython user namespace with particular values. - kwargs : various, optional + **kwargs : various, optional Any other kwargs will be passed to the Application constructor, such as `config`. """ @@ -131,26 +125,32 @@ def start_ipython(argv=None, **kwargs): def start_kernel(argv=None, **kwargs): """Launch a normal IPython kernel instance (as opposed to embedded) - + `IPython.embed_kernel()` puts a shell in a particular calling scope, such as a function or method for debugging purposes, which is often not desirable. - + `start_kernel()` does full, regular IPython initialization, including loading startup files, configuration, etc. much of which is skipped by `embed()`. - + Parameters ---------- - argv : list or None, optional If unspecified or None, IPython will parse command-line options from sys.argv. To prevent any command-line parsing, pass an empty list: `argv=[]`. user_ns : dict, optional specify this dictionary to initialize the IPython user namespace with particular values. - kwargs : various, optional + **kwargs : various, optional Any other kwargs will be passed to the Application constructor, such as `config`. """ - from IPython.kernel.zmq.kernelapp import launch_new_instance + import warnings + + warnings.warn( + "start_kernel is deprecated since IPython 8.0, use from `ipykernel.kernelapp.launch_new_instance`", + DeprecationWarning, + stacklevel=2, + ) + from ipykernel.kernelapp import launch_new_instance return launch_new_instance(argv=argv, **kwargs) diff --git a/contrib/python/ipython/py3/IPython/config.py b/contrib/python/ipython/py3/IPython/config.py deleted file mode 100644 index 964f46f10a..0000000000 --- a/contrib/python/ipython/py3/IPython/config.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.config imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from .utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.config` package has been deprecated since IPython 4.0. " - "You should import from traitlets.config instead.", ShimWarning) - - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.config'] = ShimModule(src='IPython.config', mirror='traitlets.config') diff --git a/contrib/python/ipython/py3/IPython/core/application.py b/contrib/python/ipython/py3/IPython/core/application.py index b319888b59..0cdea5c69b 100644 --- a/contrib/python/ipython/py3/IPython/core/application.py +++ b/contrib/python/ipython/py3/IPython/core/application.py @@ -20,6 +20,8 @@ import os import shutil import sys +from pathlib import Path + from traitlets.config.application import Application, catch_config_error from traitlets.config.loader import ConfigFileNotFound, PyFileConfigLoader from IPython.core import release, crashhandler @@ -31,10 +33,10 @@ from traitlets import ( default, observe, ) -if os.name == 'nt': - programdata = os.environ.get('PROGRAMDATA', None) - if programdata: - SYSTEM_CONFIG_DIRS = [os.path.join(programdata, 'ipython')] +if os.name == "nt": + programdata = os.environ.get("PROGRAMDATA", None) + if programdata is not None: + SYSTEM_CONFIG_DIRS = [str(Path(programdata) / "ipython")] else: # PROGRAMDATA is not defined by default on XP. SYSTEM_CONFIG_DIRS = [] else: @@ -64,27 +66,49 @@ else: # aliases and flags -base_aliases = { - 'profile-dir' : 'ProfileDir.location', - 'profile' : 'BaseIPythonApplication.profile', - 'ipython-dir' : 'BaseIPythonApplication.ipython_dir', - 'log-level' : 'Application.log_level', - 'config' : 'BaseIPythonApplication.extra_config_file', -} - -base_flags = dict( - debug = ({'Application' : {'log_level' : logging.DEBUG}}, - "set log level to logging.DEBUG (maximize logging output)"), - quiet = ({'Application' : {'log_level' : logging.CRITICAL}}, - "set log level to logging.CRITICAL (minimize logging output)"), - init = ({'BaseIPythonApplication' : { - 'copy_config_files' : True, - 'auto_create' : True} - }, """Initialize profile with default config files. This is equivalent +base_aliases = {} +if isinstance(Application.aliases, dict): + # traitlets 5 + base_aliases.update(Application.aliases) +base_aliases.update( + { + "profile-dir": "ProfileDir.location", + "profile": "BaseIPythonApplication.profile", + "ipython-dir": "BaseIPythonApplication.ipython_dir", + "log-level": "Application.log_level", + "config": "BaseIPythonApplication.extra_config_file", + } +) + +base_flags = dict() +if isinstance(Application.flags, dict): + # traitlets 5 + base_flags.update(Application.flags) +base_flags.update( + dict( + debug=( + {"Application": {"log_level": logging.DEBUG}}, + "set log level to logging.DEBUG (maximize logging output)", + ), + quiet=( + {"Application": {"log_level": logging.CRITICAL}}, + "set log level to logging.CRITICAL (minimize logging output)", + ), + init=( + { + "BaseIPythonApplication": { + "copy_config_files": True, + "auto_create": True, + } + }, + """Initialize profile with default config files. This is equivalent to running `ipython profile create <profile>` prior to startup. - """) + """, + ), + ) ) + class ProfileAwareConfigLoader(PyFileConfigLoader): """A Python file config loader that is aware of IPython profiles.""" def load_subconfig(self, fname, path=None, profile=None): @@ -161,6 +185,17 @@ class BaseIPythonApplication(Application): get_ipython_package_dir(), u'config', u'profile', change['new'] ) + add_ipython_dir_to_sys_path = Bool( + False, + """Should the IPython profile directory be added to sys path ? + + This option was non-existing before IPython 8.0, and ipython_dir was added to + sys path to allow import of extensions present there. This was historical + baggage from when pip did not exist. This now default to false, + but can be set to true for legacy reasons. + """, + ).tag(config=True) + ipython_dir = Unicode( help=""" The name of the IPython directory. This directory is used for logging @@ -232,16 +267,6 @@ class BaseIPythonApplication(Application): # Various stages of Application creation #------------------------------------------------------------------------- - deprecated_subcommands = {} - - def initialize_subcommand(self, subc, argv=None): - if subc in self.deprecated_subcommands: - self.log.warning("Subcommand `ipython {sub}` is deprecated and will be removed " - "in future versions.".format(sub=subc)) - self.log.warning("You likely want to use `jupyter {sub}` in the " - "future".format(sub=subc)) - return super(BaseIPythonApplication, self).initialize_subcommand(subc, argv) - def init_crash_handler(self): """Create a crash handler, typically setting sys.excepthook to it.""" self.crash_handler = self.crash_handler_class(self) @@ -252,7 +277,7 @@ class BaseIPythonApplication(Application): def excepthook(self, etype, evalue, tb): """this is sys.excepthook after init_crashhandler - + set self.verbose_crash=True to use our full crashhandler, instead of a regular traceback with a short message (crash_handler_lite) """ @@ -270,21 +295,24 @@ class BaseIPythonApplication(Application): str_old = os.path.abspath(old) if str_old in sys.path: sys.path.remove(str_old) - str_path = os.path.abspath(new) - sys.path.append(str_path) - ensure_dir_exists(new) - readme = os.path.join(new, 'README') - readme_src = os.path.join(get_ipython_package_dir(), u'config', u'profile', 'README') - if not os.path.exists(readme) and os.path.exists(readme_src): - shutil.copy(readme_src, readme) - for d in ('extensions', 'nbextensions'): - path = os.path.join(new, d) - try: - ensure_dir_exists(path) - except OSError as e: - # this will not be EEXIST - self.log.error("couldn't create path %s: %s", path, e) - self.log.debug("IPYTHONDIR set to: %s" % new) + if self.add_ipython_dir_to_sys_path: + str_path = os.path.abspath(new) + sys.path.append(str_path) + ensure_dir_exists(new) + readme = os.path.join(new, "README") + readme_src = os.path.join( + get_ipython_package_dir(), "config", "profile", "README" + ) + if not os.path.exists(readme) and os.path.exists(readme_src): + shutil.copy(readme_src, readme) + for d in ("extensions", "nbextensions"): + path = os.path.join(new, d) + try: + ensure_dir_exists(path) + except OSError as e: + # this will not be EEXIST + self.log.error("couldn't create path %s: %s", path, e) + self.log.debug("IPYTHONDIR set to: %s" % new) def load_config_file(self, suppress_errors=IPYTHON_SUPPRESS_CONFIG_ERRORS): """Load the config file. @@ -409,14 +437,15 @@ class BaseIPythonApplication(Application): self.config_file_paths.extend(ENV_CONFIG_DIRS) self.config_file_paths.extend(SYSTEM_CONFIG_DIRS) # copy config files - path = self.builtin_profile_dir + path = Path(self.builtin_profile_dir) if self.copy_config_files: src = self.profile cfg = self.config_file_name - if path and os.path.exists(os.path.join(path, cfg)): - self.log.warning("Staging %r from %s into %r [overwrite=%s]"%( - cfg, src, self.profile_dir.location, self.overwrite) + if path and (path / cfg).exists(): + self.log.warning( + "Staging %r from %s into %r [overwrite=%s]" + % (cfg, src, self.profile_dir.location, self.overwrite) ) self.profile_dir.copy_config_file(cfg, path=path, overwrite=self.overwrite) else: @@ -425,9 +454,9 @@ class BaseIPythonApplication(Application): # Still stage *bundled* config files, but not generated ones # This is necessary for `ipython profile=sympy` to load the profile # on the first go - files = glob.glob(os.path.join(path, '*.py')) + files = path.glob("*.py") for fullpath in files: - cfg = os.path.basename(fullpath) + cfg = fullpath.name if self.profile_dir.copy_config_file(cfg, path=path, overwrite=False): # file was copied self.log.warning("Staging bundled %s from %s into %r"%( @@ -438,11 +467,10 @@ class BaseIPythonApplication(Application): def stage_default_config_file(self): """auto generate default config file, and stage it into the profile.""" s = self.generate_config_file() - fname = os.path.join(self.profile_dir.location, self.config_file_name) - if self.overwrite or not os.path.exists(fname): - self.log.warning("Generating default config file: %r"%(fname)) - with open(fname, 'w') as f: - f.write(s) + config_file = Path(self.profile_dir.location) / self.config_file_name + if self.overwrite or not config_file.exists(): + self.log.warning("Generating default config file: %r" % (config_file)) + config_file.write_text(s, encoding="utf-8") @catch_config_error def initialize(self, argv=None): diff --git a/contrib/python/ipython/py3/IPython/core/async_helpers.py b/contrib/python/ipython/py3/IPython/core/async_helpers.py index fca78def85..0e7db0bb54 100644 --- a/contrib/python/ipython/py3/IPython/core/async_helpers.py +++ b/contrib/python/ipython/py3/IPython/core/async_helpers.py @@ -12,37 +12,89 @@ Python semantics. import ast -import sys import asyncio import inspect -from textwrap import dedent, indent +from functools import wraps +_asyncio_event_loop = None -class _AsyncIORunner: - def __init__(self): - self._loop = None - - @property - def loop(self): - """Always returns a non-closed event loop""" - if self._loop is None or self._loop.is_closed(): - policy = asyncio.get_event_loop_policy() - self._loop = policy.new_event_loop() - policy.set_event_loop(self._loop) - return self._loop +def get_asyncio_loop(): + """asyncio has deprecated get_event_loop + + Replicate it here, with our desired semantics: + + - always returns a valid, not-closed loop + - not thread-local like asyncio's, + because we only want one loop for IPython + - if called from inside a coroutine (e.g. in ipykernel), + return the running loop + + .. versionadded:: 8.0 + """ + try: + return asyncio.get_running_loop() + except RuntimeError: + # not inside a coroutine, + # track our own global + pass + + # not thread-local like asyncio's, + # because we only track one event loop to run for IPython itself, + # always in the main thread. + global _asyncio_event_loop + if _asyncio_event_loop is None or _asyncio_event_loop.is_closed(): + _asyncio_event_loop = asyncio.new_event_loop() + return _asyncio_event_loop + + +class _AsyncIORunner: def __call__(self, coro): """ Handler for asyncio autoawait """ - return self.loop.run_until_complete(coro) + return get_asyncio_loop().run_until_complete(coro) def __str__(self): - return 'asyncio' + return "asyncio" + _asyncio_runner = _AsyncIORunner() +class _AsyncIOProxy: + """Proxy-object for an asyncio + + Any coroutine methods will be wrapped in event_loop.run_ + """ + + def __init__(self, obj, event_loop): + self._obj = obj + self._event_loop = event_loop + + def __repr__(self): + return f"<_AsyncIOProxy({self._obj!r})>" + + def __getattr__(self, key): + attr = getattr(self._obj, key) + if inspect.iscoroutinefunction(attr): + # if it's a coroutine method, + # return a threadsafe wrapper onto the _current_ asyncio loop + @wraps(attr) + def _wrapped(*args, **kwargs): + concurrent_future = asyncio.run_coroutine_threadsafe( + attr(*args, **kwargs), self._event_loop + ) + return asyncio.wrap_future(concurrent_future) + + return _wrapped + else: + return attr + + def __dir__(self): + return dir(self._obj) + + def _curio_runner(coroutine): """ handler for curio autoawait @@ -72,7 +124,6 @@ def _pseudo_sync_runner(coro): See discussion in https://github.com/python-trio/trio/issues/608, Credit to Nathaniel Smith - """ try: coro.send(None) @@ -85,69 +136,6 @@ def _pseudo_sync_runner(coro): ) -def _asyncify(code: str) -> str: - """wrap code in async def definition. - - And setup a bit of context to run it later. - """ - res = dedent( - """ - async def __wrapper__(): - try: - {usercode} - finally: - locals() - """ - ).format(usercode=indent(code, " " * 8)) - return res - - -class _AsyncSyntaxErrorVisitor(ast.NodeVisitor): - """ - Find syntax errors that would be an error in an async repl, but because - the implementation involves wrapping the repl in an async function, it - is erroneously allowed (e.g. yield or return at the top level) - """ - def __init__(self): - if sys.version_info >= (3,8): - raise ValueError('DEPRECATED in Python 3.8+') - self.depth = 0 - super().__init__() - - def generic_visit(self, node): - func_types = (ast.FunctionDef, ast.AsyncFunctionDef) - invalid_types_by_depth = { - 0: (ast.Return, ast.Yield, ast.YieldFrom), - 1: (ast.Nonlocal,) - } - - should_traverse = self.depth < max(invalid_types_by_depth.keys()) - if isinstance(node, func_types) and should_traverse: - self.depth += 1 - super().generic_visit(node) - self.depth -= 1 - elif isinstance(node, invalid_types_by_depth[self.depth]): - raise SyntaxError() - else: - super().generic_visit(node) - - -def _async_parse_cell(cell: str) -> ast.AST: - """ - This is a compatibility shim for pre-3.7 when async outside of a function - is a syntax error at the parse stage. - - It will return an abstract syntax tree parsed as if async and await outside - of a function were not a syntax error. - """ - if sys.version_info < (3, 7): - # Prior to 3.7 you need to asyncify before parse - wrapped_parse_tree = ast.parse(_asyncify(cell)) - return wrapped_parse_tree.body[0].body[0] - else: - return ast.parse(cell) - - def _should_be_async(cell: str) -> bool: """Detect if a block of code need to be wrapped in an `async def` @@ -159,25 +147,10 @@ def _should_be_async(cell: str) -> bool: Not handled yet: If the block of code has a return statement as the top level, it will be seen as async. This is a know limitation. """ - if sys.version_info > (3, 8): - try: - code = compile(cell, "<>", "exec", flags=getattr(ast,'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0)) - return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE - except (SyntaxError, MemoryError): - return False try: - # we can't limit ourself to ast.parse, as it __accepts__ to parse on - # 3.7+, but just does not _compile_ - code = compile(cell, "<>", "exec") + code = compile( + cell, "<>", "exec", flags=getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0) + ) + return inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE except (SyntaxError, MemoryError): - try: - parse_tree = _async_parse_cell(cell) - - # Raise a SyntaxError if there are top-level return or yields - v = _AsyncSyntaxErrorVisitor() - v.visit(parse_tree) - - except (SyntaxError, MemoryError): - return False - return True - return False + return False diff --git a/contrib/python/ipython/py3/IPython/core/autocall.py b/contrib/python/ipython/py3/IPython/core/autocall.py index bab7f859c9..54beec3f58 100644 --- a/contrib/python/ipython/py3/IPython/core/autocall.py +++ b/contrib/python/ipython/py3/IPython/core/autocall.py @@ -40,10 +40,10 @@ class IPyAutocall(object): self._ip = ip def set_ip(self, ip): - """ Will be used to set _ip point to current ipython instance b/f call - + """Will be used to set _ip point to current ipython instance b/f call + Override this method if you don't want this to happen. - + """ self._ip = ip diff --git a/contrib/python/ipython/py3/IPython/core/compilerop.py b/contrib/python/ipython/py3/IPython/core/compilerop.py index 50672a1954..b43e570b3a 100644 --- a/contrib/python/ipython/py3/IPython/core/compilerop.py +++ b/contrib/python/ipython/py3/IPython/core/compilerop.py @@ -92,6 +92,10 @@ class CachingCompiler(codeop.Compile): # (otherwise we'd lose our tracebacks). linecache.checkcache = check_linecache_ipython + # Caching a dictionary { filename: execution_count } for nicely + # rendered tracebacks. The filename corresponds to the filename + # argument used for the builtins.compile function. + self._filename_map = {} def ast_parse(self, source, filename='<unknown>', symbol='exec'): """Parse code to an AST with the current compiler flags active. @@ -118,12 +122,12 @@ class CachingCompiler(codeop.Compile): Parameters ---------- raw_code : str - The raw cell code. + The raw cell code. transformed_code : str - The executable Python source code to cache and compile. + The executable Python source code to cache and compile. number : int - A number which forms part of the code's name. Used for the execution - counter. + A number which forms part of the code's name. Used for the execution + counter. Returns ------- @@ -137,12 +141,12 @@ class CachingCompiler(codeop.Compile): Parameters ---------- transformed_code : str - The executable Python source code to cache and compile. + The executable Python source code to cache and compile. number : int - A number which forms part of the code's name. Used for the execution - counter. + A number which forms part of the code's name. Used for the execution + counter. raw_code : str - The raw code before transformation, if None, set to `transformed_code`. + The raw code before transformation, if None, set to `transformed_code`. Returns ------- @@ -153,6 +157,10 @@ class CachingCompiler(codeop.Compile): raw_code = transformed_code name = self.get_code_name(raw_code, transformed_code, number) + + # Save the execution count + self._filename_map[name] = number + entry = ( len(transformed_code), time.time(), diff --git a/contrib/python/ipython/py3/IPython/core/completer.py b/contrib/python/ipython/py3/IPython/core/completer.py index 6c6fa7e7e5..fcc9d20d59 100644 --- a/contrib/python/ipython/py3/IPython/core/completer.py +++ b/contrib/python/ipython/py3/IPython/core/completer.py @@ -35,7 +35,7 @@ or using unicode completion: .. code:: - \\greek small letter alpha<tab> + \\GREEK SMALL LETTER ALPHA<tab> α @@ -50,7 +50,7 @@ Backward latex completion It is sometime challenging to know how to type a character, if you are using IPython, or any compatible frontend you can prepend backslash to the character -and press `<tab>` to expand it to its latex form. +and press ``<tab>`` to expand it to its latex form. .. code:: @@ -121,26 +121,29 @@ import string import sys import time import unicodedata +import uuid import warnings from contextlib import contextmanager from importlib import import_module from types import SimpleNamespace -from typing import Iterable, Iterator, List, Tuple +from typing import Iterable, Iterator, List, Tuple, Union, Any, Sequence, Dict, NamedTuple, Pattern, Optional from IPython.core.error import TryNext from IPython.core.inputtransformer2 import ESC_MAGIC from IPython.core.latex_symbols import latex_symbols, reverse_latex_symbol from IPython.core.oinspect import InspectColors +from IPython.testing.skipdoctest import skip_doctest from IPython.utils import generics from IPython.utils.dir2 import dir2, get_real_method +from IPython.utils.path import ensure_dir_exists from IPython.utils.process import arg_split -from traitlets import Bool, Enum, Int, observe +from traitlets import Bool, Enum, Int, List as ListTrait, Unicode, default, observe from traitlets.config.configurable import Configurable import __main__ # skip module docstests -skip_doctest = True +__skip_doctest__ = True try: import jedi @@ -154,6 +157,14 @@ except ImportError: # Globals #----------------------------------------------------------------------------- +# ranges where we have most of the valid unicode names. We could be more finer +# grained but is it worth it for performance While unicode have character in the +# range 0, 0x110000, we seem to have name for about 10% of those. (131808 as I +# write this). With below range we cover them all, with a density of ~67% +# biggest next gap we consider only adds up about 1% density and there are 600 +# gaps that would need hard coding. +_UNICODE_RANGES = [(32, 0x3134b), (0xe0001, 0xe01f0)] + # Public API __all__ = ['Completer','IPCompleter'] @@ -167,14 +178,6 @@ else: MATCHES_LIMIT = 500 -class Sentinel: - def __repr__(self): - return "<deprecated sentinel>" - - -_deprecation_readline_sentinel = Sentinel() - - class ProvisionalCompleterWarning(FutureWarning): """ Exception raise by an experimental feature in this module. @@ -186,11 +189,11 @@ class ProvisionalCompleterWarning(FutureWarning): warnings.filterwarnings('error', category=ProvisionalCompleterWarning) + +@skip_doctest @contextmanager def provisionalcompleter(action='ignore'): """ - - This context manager has to be used in any place where unstable completer behavior and API may be called. @@ -260,17 +263,17 @@ def expand_user(path:str) -> Tuple[str, bool, str]: Parameters ---------- path : str - String to be expanded. If no ~ is present, the output is the same as the - input. + String to be expanded. If no ~ is present, the output is the same as the + input. Returns ------- newpath : str - Result of ~ expansion in the input path. + Result of ~ expansion in the input path. tilde_expand : bool - Whether any expansion was performed or not. + Whether any expansion was performed or not. tilde_val : str - The value that ~ was replaced with. + The value that ~ was replaced with. """ # Default values tilde_expand = False @@ -429,20 +432,17 @@ def _deduplicate_completions(text: str, completions: _IC)-> _IC: Parameters ---------- - text: str + text : str text that should be completed. - completions: Iterator[Completion] + completions : Iterator[Completion] iterator over the completions to deduplicate Yields ------ `Completions` objects - - Completions coming from multiple sources, may be different but end up having the same effect when applied to ``text``. If this is the case, this will consider completions as equal and only emit the first encountered. - Not folded in `completions()` yet for debugging purpose, and to detect when the IPython completer does return things that Jedi does not, but should be at some point. @@ -462,7 +462,7 @@ def _deduplicate_completions(text: str, completions: _IC)-> _IC: seen.add(new_text) -def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: +def rectify_completions(text: str, completions: _IC, *, _debug: bool = False) -> _IC: """ Rectify a set of completions to all have the same ``start`` and ``end`` @@ -475,12 +475,15 @@ def rectify_completions(text: str, completions: _IC, *, _debug=False)->_IC: Parameters ---------- - text: str + text : str text that should be completed. - completions: Iterator[Completion] + completions : Iterator[Completion] iterator over the completions to rectify + _debug : bool + Log failed completion - + Notes + ----- :any:`jedi.api.classes.Completion` s returned by Jedi may not have the same start and end, though the Jupyter Protocol requires them to behave like so. This will readjust the completion to have the same ``start`` and ``end`` by padding both @@ -582,11 +585,11 @@ class Completer(Configurable): greedy = Bool(False, help="""Activate greedy completion - PENDING DEPRECTION. this is now mostly taken care of with Jedi. + PENDING DEPRECATION. this is now mostly taken care of with Jedi. This will enable completion on elements of lists, results of function calls, etc., but can be unsafe because the code is actually evaluated on TAB. - """ + """, ).tag(config=True) use_jedi = Bool(default_value=JEDI_INSTALLED, @@ -609,8 +612,6 @@ class Completer(Configurable): "Includes completion of latex commands, unicode names, and expanding " "unicode characters back to latex commands.").tag(config=True) - - def __init__(self, namespace=None, global_namespace=None, **kwargs): """Create a new completer for the command line. @@ -757,44 +758,77 @@ def get__all__entries(obj): return [w for w in words if isinstance(w, str)] -def match_dict_keys(keys: List[str], prefix: str, delims: str): +def match_dict_keys(keys: List[Union[str, bytes, Tuple[Union[str, bytes]]]], prefix: str, delims: str, + extra_prefix: Optional[Tuple[str, bytes]]=None) -> Tuple[str, int, List[str]]: """Used by dict_key_matches, matching the prefix to a list of keys Parameters - ========== - keys: + ---------- + keys list of keys in dictionary currently being completed. - prefix: - Part of the text already typed by the user. e.g. `mydict[b'fo` - delims: + prefix + Part of the text already typed by the user. E.g. `mydict[b'fo` + delims String of delimiters to consider when finding the current key. + extra_prefix : optional + Part of the text already typed in multi-key index cases. E.g. for + `mydict['foo', "bar", 'b`, this would be `('foo', 'bar')`. Returns - ======= - + ------- A tuple of three elements: ``quote``, ``token_start``, ``matched``, with ``quote`` being the quote that need to be used to close current string. ``token_start`` the position where the replacement should start occurring, ``matches`` a list of replacement/completion """ + prefix_tuple = extra_prefix if extra_prefix else () + Nprefix = len(prefix_tuple) + def filter_prefix_tuple(key): + # Reject too short keys + if len(key) <= Nprefix: + return False + # Reject keys with non str/bytes in it + for k in key: + if not isinstance(k, (str, bytes)): + return False + # Reject keys that do not match the prefix + for k, pt in zip(key, prefix_tuple): + if k != pt: + return False + # All checks passed! + return True + + filtered_keys:List[Union[str,bytes]] = [] + def _add_to_filtered_keys(key): + if isinstance(key, (str, bytes)): + filtered_keys.append(key) + + for k in keys: + if isinstance(k, tuple): + if filter_prefix_tuple(k): + _add_to_filtered_keys(k[Nprefix]) + else: + _add_to_filtered_keys(k) + if not prefix: - return None, 0, [repr(k) for k in keys - if isinstance(k, (str, bytes))] + return '', 0, [repr(k) for k in filtered_keys] quote_match = re.search('["\']', prefix) + assert quote_match is not None # silence mypy quote = quote_match.group() try: prefix_str = eval(prefix + quote, {}) except Exception: - return None, 0, [] + return '', 0, [] pattern = '[^' + ''.join('\\' + c for c in delims) + ']*$' token_match = re.search(pattern, prefix, re.UNICODE) + assert token_match is not None # silence mypy token_start = token_match.start() token_prefix = token_match.group() - matched = [] - for key in keys: + matched:List[str] = [] + for key in filtered_keys: try: if not key.startswith(prefix_str): continue @@ -806,14 +840,6 @@ def match_dict_keys(keys: List[str], prefix: str, delims: str): rem = key[len(prefix_str):] # force repr wrapped in ' rem_repr = repr(rem + '"') if isinstance(rem, str) else repr(rem + b'"') - if rem_repr.startswith('u') and prefix[0] not in 'uU': - # Found key is unicode, but prefix is Py2 string. - # Therefore attempt to interpret key as string. - try: - rem_repr = repr(rem.encode('ascii') + '"') - except UnicodeEncodeError: - continue - rem_repr = rem_repr[1 + rem_repr.index("'"):-2] if quote == '"': # The entered prefix is quoted with ", @@ -828,13 +854,11 @@ def match_dict_keys(keys: List[str], prefix: str, delims: str): def cursor_to_position(text:str, line:int, column:int)->int: """ - Convert the (line,column) position of the cursor in text to an offset in a string. Parameters ---------- - text : str The text in which to calculate the cursor offset line : int @@ -842,13 +866,13 @@ def cursor_to_position(text:str, line:int, column:int)->int: column : int Column of the cursor 0-indexed - Return - ------ - Position of the cursor in ``text``, 0-indexed. + Returns + ------- + Position of the cursor in ``text``, 0-indexed. See Also -------- - position_to_cursor: reciprocal of this function + position_to_cursor : reciprocal of this function """ lines = text.split('\n') @@ -865,23 +889,20 @@ def position_to_cursor(text:str, offset:int)->Tuple[int, int]: Parameters ---------- - text : str The text in which to calculate the cursor offset offset : int Position of the cursor in ``text``, 0-indexed. - Return - ------ + Returns + ------- (line, column) : (int, int) Line of the cursor; 0-indexed, column of the cursor 0-indexed - See Also -------- cursor_to_position : reciprocal of this function - """ assert 0 <= offset <= len(text) , "0 <= %s <= %s" % (offset , len(text)) @@ -899,9 +920,8 @@ def _safe_isinstance(obj, module, class_name): return (module in sys.modules and isinstance(obj, getattr(import_module(module), class_name))) - -def back_unicode_name_matches(text): - u"""Match unicode characters back to unicode name +def back_unicode_name_matches(text:str) -> Tuple[str, Sequence[str]]: + """Match Unicode characters back to Unicode name This does ``☃`` -> ``\\snowman`` @@ -910,52 +930,60 @@ def back_unicode_name_matches(text): This will not either back-complete standard sequences like \\n, \\b ... - Used on Python 3 only. + Returns + ======= + + Return a tuple with two elements: + + - The Unicode character that was matched (preceded with a backslash), or + empty string, + - a sequence (of 1), name for the match Unicode character, preceded by + backslash, or empty if no match. + """ if len(text)<2: - return u'', () + return '', () maybe_slash = text[-2] if maybe_slash != '\\': - return u'', () + return '', () char = text[-1] # no expand on quote for completion in strings. # nor backcomplete standard ascii keys - if char in string.ascii_letters or char in ['"',"'"]: - return u'', () + if char in string.ascii_letters or char in ('"',"'"): + return '', () try : unic = unicodedata.name(char) - return '\\'+char,['\\'+unic] + return '\\'+char,('\\'+unic,) except KeyError: pass - return u'', () + return '', () -def back_latex_name_matches(text:str): +def back_latex_name_matches(text:str) -> Tuple[str, Sequence[str]] : """Match latex characters back to unicode name This does ``\\ℵ`` -> ``\\aleph`` - Used on Python 3 only. """ if len(text)<2: - return u'', () + return '', () maybe_slash = text[-2] if maybe_slash != '\\': - return u'', () + return '', () char = text[-1] # no expand on quote for completion in strings. # nor backcomplete standard ascii keys - if char in string.ascii_letters or char in ['"',"'"]: - return u'', () + if char in string.ascii_letters or char in ('"',"'"): + return '', () try : latex = reverse_latex_symbol[char] # '\\' replace the \ as well return '\\'+char,[latex] except KeyError: pass - return u'', () + return '', () def _formatparamchildren(parameter) -> str: @@ -964,18 +992,15 @@ def _formatparamchildren(parameter) -> str: Jedi does not expose a simple way to get `param=value` from its API. - Parameter - ========= - - parameter: + Parameters + ---------- + parameter Jedi's function `Param` Returns - ======= - + ------- A string like 'a', 'b=1', '*args', '**kwargs' - """ description = parameter.description if not description.startswith('param '): @@ -987,15 +1012,13 @@ def _make_signature(completion)-> str: """ Make the signature from a jedi completion - Parameter - ========= - - completion: jedi.Completion + Parameters + ---------- + completion : jedi.Completion object does not complete a function type Returns - ======= - + ------- a string consisting of the function signature, with the parenthesis but without the function name. example: `(a, *args, b=1, **kwargs)` @@ -1015,10 +1038,18 @@ def _make_signature(completion)-> str: return '(%s)'% ', '.join([f for f in (_formatparamchildren(p) for signature in completion.get_signatures() for p in signature.defined_names()) if f]) + +class _CompleteResult(NamedTuple): + matched_text : str + matches: Sequence[str] + matches_origin: Sequence[str] + jedi_matches: Any + + class IPCompleter(Completer): """Extension of the completer class with IPython-specific features""" - _names = None + __dict_key_regexps: Optional[Dict[bool,Pattern]] = None @observe('greedy') def _greedy_changed(self, change): @@ -1064,6 +1095,16 @@ class IPCompleter(Completer): """, ).tag(config=True) + profile_completions = Bool( + default_value=False, + help="If True, emit profiling data for completion subsystem using cProfile." + ).tag(config=True) + + profiler_output_dir = Unicode( + default_value=".completion_profiles", + help="Template for path at which to output profile data for completions." + ).tag(config=True) + @observe('limit_to__all__') def _limit_to_all_changed(self, change): warnings.warn('`IPython.core.IPCompleter.limit_to__all__` configuration ' @@ -1071,42 +1112,41 @@ class IPCompleter(Completer): 'no effects and then removed in future version of IPython.', UserWarning) - def __init__(self, shell=None, namespace=None, global_namespace=None, - use_readline=_deprecation_readline_sentinel, config=None, **kwargs): + def __init__( + self, shell=None, namespace=None, global_namespace=None, config=None, **kwargs + ): """IPCompleter() -> completer Return a completer object. Parameters ---------- - shell a pointer to the ipython shell itself. This is needed because this completer knows about magic functions, and those can only be accessed via the ipython instance. - namespace : dict, optional an optional dict where completions are performed. - global_namespace : dict, optional secondary optional dict for completions, to handle cases (such as IPython embedded inside functions) where both Python scopes are visible. - - use_readline : bool, optional - DEPRECATED, ignored since IPython 6.0, will have no effects + config : Config + traitlet's config object + **kwargs + passed to super class unmodified. """ self.magic_escape = ESC_MAGIC self.splitter = CompletionSplitter() - if use_readline is not _deprecation_readline_sentinel: - warnings.warn('The `use_readline` parameter is deprecated and ignored since IPython 6.0.', - DeprecationWarning, stacklevel=2) - # _greedy_changed() depends on splitter and readline being defined: - Completer.__init__(self, namespace=namespace, global_namespace=global_namespace, - config=config, **kwargs) + super().__init__( + namespace=namespace, + global_namespace=global_namespace, + config=config, + **kwargs + ) # List where completion matches will be stored self.matches = [] @@ -1141,8 +1181,14 @@ class IPCompleter(Completer): # This is set externally by InteractiveShell self.custom_completers = None + # This is a list of names of unicode characters that can be completed + # into their corresponding unicode value. The list is large, so we + # lazily initialize it on first use. Consuming code should access this + # attribute through the `@unicode_names` property. + self._unicode_names = None + @property - def matchers(self): + def matchers(self) -> List[Any]: """All active matcher routines for completion""" if self.dict_keys_only: return [self.dict_key_matches] @@ -1164,7 +1210,7 @@ class IPCompleter(Completer): self.python_func_kw_matches, ] - def all_completions(self, text) -> List[str]: + def all_completions(self, text:str) -> List[str]: """ Wrapper around the completion methods for the benefit of emacs. """ @@ -1175,14 +1221,14 @@ class IPCompleter(Completer): return self.complete(text)[1] - def _clean_glob(self, text): + def _clean_glob(self, text:str): return self.glob("%s*" % text) - def _clean_glob_win32(self,text): + def _clean_glob_win32(self, text:str): return [f.replace("\\","/") for f in self.glob("%s*" % text)] - def file_matches(self, text): + def file_matches(self, text:str)->List[str]: """Match filenames, expanding ~USER type strings. Most of the seemingly convoluted logic in this completer is an @@ -1264,7 +1310,7 @@ class IPCompleter(Completer): # Mark directories in input list by appending '/' to their names. return [x+'/' if os.path.isdir(x) else x for x in matches] - def magic_matches(self, text): + def magic_matches(self, text:str): """Match magics""" # Get all shell magics now rather than statically, so magics loaded at # runtime show up too. @@ -1355,9 +1401,8 @@ class IPCompleter(Completer): if color.startswith(prefix) ] return [] - def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str): + def _jedi_matches(self, cursor_column:int, cursor_line:int, text:str) -> Iterable[Any]: """ - Return a list of :any:`jedi.api.Completions` object from a ``text`` and cursor position. @@ -1370,9 +1415,8 @@ class IPCompleter(Completer): text : str text to complete - Debugging - --------- - + Notes + ----- If ``IPCompleter.debug`` is ``True`` may return a :any:`_FakeJediCompletion` object containing a string with the Jedi debug information attached. """ @@ -1429,7 +1473,7 @@ class IPCompleter(Completer): else: return [] - def python_matches(self, text): + def python_matches(self, text:str)->List[str]: """Match attributes or global python names""" if "." in text: try: @@ -1511,7 +1555,7 @@ class IPCompleter(Completer): return list(set(ret)) - def python_func_kw_matches(self,text): + def python_func_kw_matches(self, text): """Match named parameters (kwargs) of the last open function""" if "." in text: # a parameter cannot be dotted @@ -1581,42 +1625,54 @@ class IPCompleter(Completer): # Remove used named arguments from the list, no need to show twice for namedArg in set(namedArgs) - usedNamedArgs: if namedArg.startswith(text): - argMatches.append(u"%s=" %namedArg) + argMatches.append("%s=" %namedArg) except: pass return argMatches - def dict_key_matches(self, text): + @staticmethod + def _get_keys(obj: Any) -> List[Any]: + # Objects can define their own completions by defining an + # _ipy_key_completions_() method. + method = get_real_method(obj, '_ipython_key_completions_') + if method is not None: + return method() + + # Special case some common in-memory dict-like types + if isinstance(obj, dict) or\ + _safe_isinstance(obj, 'pandas', 'DataFrame'): + try: + return list(obj.keys()) + except Exception: + return [] + elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ + _safe_isinstance(obj, 'numpy', 'void'): + return obj.dtype.names or [] + return [] + + def dict_key_matches(self, text:str) -> List[str]: "Match string keys in a dictionary, after e.g. 'foo[' " - def get_keys(obj): - # Objects can define their own completions by defining an - # _ipy_key_completions_() method. - method = get_real_method(obj, '_ipython_key_completions_') - if method is not None: - return method() - - # Special case some common in-memory dict-like types - if isinstance(obj, dict) or\ - _safe_isinstance(obj, 'pandas', 'DataFrame'): - try: - return list(obj.keys()) - except Exception: - return [] - elif _safe_isinstance(obj, 'numpy', 'ndarray') or\ - _safe_isinstance(obj, 'numpy', 'void'): - return obj.dtype.names or [] - return [] - try: + + if self.__dict_key_regexps is not None: regexps = self.__dict_key_regexps - except AttributeError: + else: dict_key_re_fmt = r'''(?x) ( # match dict-referring expression wrt greedy setting %s ) \[ # open bracket \s* # and optional whitespace + # Capture any number of str-like objects (e.g. "a", "b", 'c') + ((?:[uUbB]? # string prefix (r not handled) + (?: + '(?:[^']|(?<!\\)\\')*' + | + "(?:[^"]|(?<!\\)\\")*" + ) + \s*,\s* + )*) ([uUbB]? # string prefix (r not handled) (?: # unclosed string '(?:[^']|(?<!\\)\\')* @@ -1638,10 +1694,11 @@ class IPCompleter(Completer): } match = regexps[self.greedy].search(self.text_until_cursor) + if match is None: return [] - expr, prefix = match.groups() + expr, prefix0, prefix = match.groups() try: obj = eval(expr, self.namespace) except Exception: @@ -1650,10 +1707,13 @@ class IPCompleter(Completer): except Exception: return [] - keys = get_keys(obj) + keys = self._get_keys(obj) if not keys: return keys - closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims) + + extra_prefix = eval(prefix0) if prefix0 != '' else None + + closing_quote, token_offset, matches = match_dict_keys(keys, prefix, self.splitter.delims, extra_prefix=extra_prefix) if not matches: return matches @@ -1663,7 +1723,7 @@ class IPCompleter(Completer): # - the start of the completion text_start = len(self.text_until_cursor) - len(text) if prefix: - key_start = match.start(2) + key_start = match.start(3) completion_start = key_start + token_offset else: key_start = completion_start = match.end() @@ -1695,16 +1755,15 @@ class IPCompleter(Completer): return [leading + k + suf for k in matches] - def unicode_name_matches(self, text): - u"""Match Latex-like syntax for unicode characters base + @staticmethod + def unicode_name_matches(text:str) -> Tuple[str, List[str]] : + """Match Latex-like syntax for unicode characters base on the name of the character. This does ``\\GREEK SMALL LETTER ETA`` -> ``η`` Works only on valid python 3 identifier, or on combining characters that will combine to form a valid identifier. - - Used on Python 3 only. """ slashpos = text.rfind('\\') if slashpos > -1: @@ -1716,11 +1775,11 @@ class IPCompleter(Completer): return '\\'+s,[unic] except KeyError: pass - return u'', [] + return '', [] - def latex_matches(self, text): - u"""Match Latex syntax for unicode characters. + def latex_matches(self, text:str) -> Tuple[str, Sequence[str]]: + """Match Latex syntax for unicode characters. This does both ``\\alp`` -> ``\\alpha`` and ``\\alpha`` -> ``α`` """ @@ -1737,7 +1796,7 @@ class IPCompleter(Completer): matches = [k for k in latex_symbols if k.startswith(s)] if matches: return s, matches - return u'', [] + return '', () def dispatch_custom_completer(self, text): if not self.custom_completers: @@ -1800,18 +1859,18 @@ class IPCompleter(Completer): Parameters ---------- - - text:str + text : str Full text of the current input, multi line string. - offset:int + offset : int Integer representing the position of the cursor in ``text``. Offset is 0-based indexed. Yields ------ - :any:`Completion` object - + Completion + Notes + ----- The cursor on a text can either be seen as being "in between" characters or "On" a character depending on the interface visible to the user. For consistency the cursor being on "in between" characters X @@ -1821,7 +1880,6 @@ class IPCompleter(Completer): Combining characters may span more that one position in the text. - .. note:: If ``IPCompleter.debug`` is :any:`True` will yield a ``--jedi/ipython--`` @@ -1840,7 +1898,15 @@ class IPCompleter(Completer): category=ProvisionalCompleterWarning, stacklevel=2) seen = set() + profiler:Optional[cProfile.Profile] try: + if self.profile_completions: + import cProfile + profiler = cProfile.Profile() + profiler.enable() + else: + profiler = None + for c in self._completions(text, offset, _timeout=self.jedi_compute_type_timeout/1000): if c and (c in seen): continue @@ -1850,13 +1916,19 @@ class IPCompleter(Completer): """if completions take too long and users send keyboard interrupt, do not crash and return ASAP. """ pass - - def _completions(self, full_text: str, offset: int, *, _timeout)->Iterator[Completion]: + finally: + if profiler is not None: + profiler.disable() + ensure_dir_exists(self.profiler_output_dir) + output_path = os.path.join(self.profiler_output_dir, str(uuid.uuid4())) + print("Writing profiler output to", output_path) + profiler.dump_stats(output_path) + + def _completions(self, full_text: str, offset: int, *, _timeout) -> Iterator[Completion]: """ Core completion module.Same signature as :any:`completions`, with the extra `timeout` parameter (in seconds). - Computing jedi's completion ``.type`` can be quite expensive (it is a lazy property) and can require some warm-up, more warm up than just computing the ``name`` of a completion. The warm-up can be : @@ -1936,7 +2008,7 @@ class IPCompleter(Completer): yield Completion(start=start_offset, end=offset, text=m, _origin=t, signature='', type='<unknown>') - def complete(self, text=None, line_buffer=None, cursor_pos=None): + def complete(self, text=None, line_buffer=None, cursor_pos=None) -> Tuple[str, Sequence[str]]: """Find completions for the given text and line context. Note that both the text and the line_buffer are optional, but at least @@ -1944,35 +2016,31 @@ class IPCompleter(Completer): Parameters ---------- - text : string, optional + text : string, optional Text to perform the completion on. If not given, the line buffer is split using the instance's CompletionSplitter object. - - line_buffer : string, optional + line_buffer : string, optional If not given, the completer attempts to obtain the current line buffer via readline. This keyword allows clients which are requesting for text completions in non-readline contexts to inform the completer of the entire text. - - cursor_pos : int, optional + cursor_pos : int, optional Index of the cursor in the full line buffer. Should be provided by remote frontends where kernel has no access to frontend state. Returns ------- + Tuple of two items: text : str - Text that was actually used in the completion. - + Text that was actually used in the completion. matches : list - A list of completion matches. - - - .. note:: + A list of completion matches. + Notes + ----- This API is likely to be deprecated and replaced by :any:`IPCompleter.completions` in the future. - """ warnings.warn('`Completer.complete` is pending deprecation since ' 'IPython 6.0 and will be replaced by `Completer.completions`.', @@ -1982,21 +2050,46 @@ class IPCompleter(Completer): return self._complete(line_buffer=line_buffer, cursor_pos=cursor_pos, text=text, cursor_line=0)[:2] def _complete(self, *, cursor_line, cursor_pos, line_buffer=None, text=None, - full_text=None) -> Tuple[str, List[str], List[str], Iterable[_FakeJediCompletion]]: + full_text=None) -> _CompleteResult: """ - Like complete but can also returns raw jedi completions as well as the origin of the completion text. This could (and should) be made much cleaner but that will be simpler once we drop the old (and stateful) :any:`complete` API. - With current provisional API, cursor_pos act both (depending on the caller) as the offset in the ``text`` or ``line_buffer``, or as the ``column`` when passing multiline strings this could/should be renamed but would add extra noise. + + Parameters + ---------- + cursor_line + Index of the line the cursor is on. 0 indexed. + cursor_pos + Position of the cursor in the current line/line_buffer/text. 0 + indexed. + line_buffer : optional, str + The current line the cursor is in, this is mostly due to legacy + reason that readline could only give a us the single current line. + Prefer `full_text`. + text : str + The current "token" the cursor is in, mostly also for historical + reasons. as the completer would trigger only after the current line + was parsed. + full_text : str + Full text of the current cell. + + Returns + ------- + A tuple of N elements which are (likely): + matched_text: ? the text that the complete matched + matches: list of completions ? + matches_origin: ? list same length as matches, and where each completion came from + jedi_matches: list of Jedi matches, have it's own structure. """ + # if the cursor position isn't given, the only sane assumption we can # make is that it's at the end of the line (the common case) if cursor_pos is None: @@ -2014,17 +2107,16 @@ class IPCompleter(Completer): if self.backslash_combining_completions: # allow deactivation of these on windows. base_text = text if not line_buffer else line_buffer[:cursor_pos] - latex_text, latex_matches = self.latex_matches(base_text) - if latex_matches: - return latex_text, latex_matches, ['latex_matches']*len(latex_matches), () - name_text = '' - name_matches = [] - # need to add self.fwd_unicode_match() function here when done - for meth in (self.unicode_name_matches, back_latex_name_matches, back_unicode_name_matches, self.fwd_unicode_match): + + for meth in (self.latex_matches, + self.unicode_name_matches, + back_latex_name_matches, + back_unicode_name_matches, + self.fwd_unicode_match): name_text, name_matches = meth(base_text) if name_text: - return name_text, name_matches[:MATCHES_LIMIT], \ - [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), () + return _CompleteResult(name_text, name_matches[:MATCHES_LIMIT], \ + [meth.__qualname__]*min(len(name_matches), MATCHES_LIMIT), ()) # If no line buffer is given, assume the input text is all there was @@ -2039,22 +2131,23 @@ class IPCompleter(Completer): matches = list(matcher(line_buffer))[:MATCHES_LIMIT] if matches: origins = [matcher.__qualname__] * len(matches) - return text, matches, origins, () + return _CompleteResult(text, matches, origins, ()) # Start with a clean slate of completions matches = [] - + # FIXME: we should extend our api to return a dict with completions for # different types of objects. The rlcomplete() method could then # simply collapse the dict into a list for readline, but we'd have # richer completion semantics in other environments. - completions = () - if self.use_jedi: + is_magic_prefix = len(text) > 0 and text[0] == "%" + completions: Iterable[Any] = [] + if self.use_jedi and not is_magic_prefix: if not full_text: full_text = line_buffer completions = self._jedi_matches( cursor_pos, cursor_line, full_text) - + if self.merge_completions: matches = [] for matcher in self.matchers: @@ -2092,27 +2185,89 @@ class IPCompleter(Completer): self.matches = _matches - return text, _matches, origins, completions + return _CompleteResult(text, _matches, origins, completions) - def fwd_unicode_match(self, text:str) -> Tuple[str, list]: - if self._names is None: - self._names = [] - for c in range(0,0x10FFFF + 1): - try: - self._names.append(unicodedata.name(chr(c))) - except ValueError: - pass + def fwd_unicode_match(self, text:str) -> Tuple[str, Sequence[str]]: + """ + Forward match a string starting with a backslash with a list of + potential Unicode completions. + + Will compute list list of Unicode character names on first call and cache it. + + Returns + ------- + At tuple with: + - matched text (empty if no matches) + - list of potential completions, empty tuple otherwise) + """ + # TODO: self.unicode_names is here a list we traverse each time with ~100k elements. + # We could do a faster match using a Trie. + + # Using pygtrie the following seem to work: + + # s = PrefixSet() + + # for c in range(0,0x10FFFF + 1): + # try: + # s.add(unicodedata.name(chr(c))) + # except ValueError: + # pass + # [''.join(k) for k in s.iter(prefix)] + + # But need to be timed and adds an extra dependency. slashpos = text.rfind('\\') # if text starts with slash if slashpos > -1: - s = text[slashpos+1:] - candidates = [x for x in self._names if x.startswith(s)] + # PERF: It's important that we don't access self._unicode_names + # until we're inside this if-block. _unicode_names is lazily + # initialized, and it takes a user-noticeable amount of time to + # initialize it, so we don't want to initialize it unless we're + # actually going to use it. + s = text[slashpos + 1 :] + sup = s.upper() + candidates = [x for x in self.unicode_names if x.startswith(sup)] + if candidates: + return s, candidates + candidates = [x for x in self.unicode_names if sup in x] + if candidates: + return s, candidates + splitsup = sup.split(" ") + candidates = [ + x for x in self.unicode_names if all(u in x for u in splitsup) + ] if candidates: return s, candidates - else: - return '', () + + return "", () # if text does not start with slash else: - return u'', () + return '', () + + @property + def unicode_names(self) -> List[str]: + """List of names of unicode code points that can be completed. + + The list is lazily initialized on first access. + """ + if self._unicode_names is None: + names = [] + for c in range(0,0x10FFFF + 1): + try: + names.append(unicodedata.name(chr(c))) + except ValueError: + pass + self._unicode_names = _unicode_name_compute(_UNICODE_RANGES) + + return self._unicode_names + +def _unicode_name_compute(ranges:List[Tuple[int,int]]) -> List[str]: + names = [] + for start,stop in ranges: + for c in range(start, stop) : + try: + names.append(unicodedata.name(chr(c))) + except ValueError: + pass + return names diff --git a/contrib/python/ipython/py3/IPython/core/completerlib.py b/contrib/python/ipython/py3/IPython/core/completerlib.py index bda665d8a2..65efa42254 100644 --- a/contrib/python/ipython/py3/IPython/core/completerlib.py +++ b/contrib/python/ipython/py3/IPython/core/completerlib.py @@ -201,6 +201,17 @@ def is_importable(module, attr, only_modules): else: return not(attr[:2] == '__' and attr[-2:] == '__') +def is_possible_submodule(module, attr): + try: + obj = getattr(module, attr) + except AttributeError: + # Is possilby an unimported submodule + return True + except TypeError: + # https://github.com/ipython/ipython/issues/9678 + return False + return inspect.ismodule(obj) + def try_import(mod: str, only_modules=False) -> List[str]: """ @@ -220,7 +231,12 @@ def try_import(mod: str, only_modules=False) -> List[str]: completions.extend( [attr for attr in dir(m) if is_importable(m, attr, only_modules)]) - completions.extend(getattr(m, '__all__', [])) + m_all = getattr(m, "__all__", []) + if only_modules: + completions.extend(attr for attr in m_all if is_possible_submodule(m, attr)) + else: + completions.extend(m_all) + if m_is_init: completions.extend(arcadia_module_list(mod)) completions_set = {c for c in completions if isinstance(c, str)} diff --git a/contrib/python/ipython/py3/IPython/core/crashhandler.py b/contrib/python/ipython/py3/IPython/core/crashhandler.py index 1e0b429d09..4af39361e8 100644 --- a/contrib/python/ipython/py3/IPython/core/crashhandler.py +++ b/contrib/python/ipython/py3/IPython/core/crashhandler.py @@ -23,6 +23,7 @@ import os import sys import traceback from pprint import pformat +from pathlib import Path from IPython.core import ultratb from IPython.core.release import author_email @@ -31,6 +32,8 @@ from IPython.utils.py3compat import input from IPython.core.release import __version__ as version +from typing import Optional + #----------------------------------------------------------------------------- # Code #----------------------------------------------------------------------------- @@ -94,34 +97,40 @@ class CrashHandler(object): message_template = _default_message_template section_sep = '\n\n'+'*'*75+'\n\n' - def __init__(self, app, contact_name=None, contact_email=None, - bug_tracker=None, show_crash_traceback=True, call_pdb=False): + def __init__( + self, + app, + contact_name: Optional[str] = None, + contact_email: Optional[str] = None, + bug_tracker: Optional[str] = None, + show_crash_traceback: bool = True, + call_pdb: bool = False, + ): """Create a new crash handler Parameters ---------- - app : Application + app : Application A running :class:`Application` instance, which will be queried at crash time for internal information. - contact_name : str A string with the name of the person to contact. - contact_email : str A string with the email address of the contact. - bug_tracker : str A string with the URL for your project's bug tracker. - show_crash_traceback : bool If false, don't print the crash traceback on stderr, only generate the on-disk report + call_pdb + Whether to call pdb on crash - Non-argument instance attributes: - + Attributes + ---------- These instances contain some non-argument attributes which allow for further customization of the crash handler's behavior. Please see the source for further details. + """ self.crash_report_fname = "Crash_report_%s.txt" % app.name self.app = app @@ -151,10 +160,10 @@ class CrashHandler(object): try: rptdir = self.app.ipython_dir except: - rptdir = os.getcwd() - if rptdir is None or not os.path.isdir(rptdir): - rptdir = os.getcwd() - report_name = os.path.join(rptdir,self.crash_report_fname) + rptdir = Path.cwd() + if rptdir is None or not Path.is_dir(rptdir): + rptdir = Path.cwd() + report_name = rptdir / self.crash_report_fname # write the report filename into the instance dict so it can get # properly expanded out in the user message template self.crash_report_fname = report_name @@ -176,7 +185,7 @@ class CrashHandler(object): # and generate a complete report on disk try: - report = open(report_name,'w') + report = open(report_name, "w", encoding="utf-8") except: print('Could not create crash report on disk.', file=sys.stderr) return diff --git a/contrib/python/ipython/py3/IPython/core/debugger.py b/contrib/python/ipython/py3/IPython/core/debugger.py index 1744bdb8a8..8e3dd9678c 100644 --- a/contrib/python/ipython/py3/IPython/core/debugger.py +++ b/contrib/python/ipython/py3/IPython/core/debugger.py @@ -69,8 +69,8 @@ or configure it in your ``.pdbrc`` -Licencse --------- +License +------- Modified from the standard pdb.Pdb class to avoid including readline, so that the command line completion of other programs which include this isn't @@ -102,7 +102,6 @@ All the changes since then are under the same license as IPython. #***************************************************************************** import bdb -import functools import inspect import linecache import sys @@ -114,12 +113,13 @@ from IPython import get_ipython from IPython.utils import PyColorize from IPython.utils import coloransi, py3compat from IPython.core.excolors import exception_colors -from IPython.testing.skipdoctest import skip_doctest +# skip module docstests +__skip_doctest__ = True prompt = 'ipdb> ' -#We have to check this directly from sys.argv, config struct not yet available +# We have to check this directly from sys.argv, config struct not yet available from pdb import Pdb as OldPdb # Allow the set_trace code to operate outside of an ipython instance, even if @@ -144,112 +144,15 @@ def BdbQuit_excepthook(et, ev, tb, excepthook=None): All other exceptions are processed using the `excepthook` parameter. """ - warnings.warn("`BdbQuit_excepthook` is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - if et==bdb.BdbQuit: - print('Exiting Debugger.') - elif excepthook is not None: - excepthook(et, ev, tb) - else: - # Backwards compatibility. Raise deprecation warning? - BdbQuit_excepthook.excepthook_ori(et,ev,tb) - - -def BdbQuit_IPython_excepthook(self,et,ev,tb,tb_offset=None): - warnings.warn( - "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - print('Exiting Debugger.') - - -class Tracer(object): - """ - DEPRECATED - - Class for local debugging, similar to pdb.set_trace. - - Instances of this class, when called, behave like pdb.set_trace, but - providing IPython's enhanced capabilities. - - This is implemented as a class which must be initialized in your own code - and not as a standalone function because we need to detect at runtime - whether IPython is already active or not. That detection is done in the - constructor, ensuring that this code plays nicely with a running IPython, - while functioning acceptably (though with limitations) if outside of it. - """ - - @skip_doctest - def __init__(self, colors=None): - """ - DEPRECATED - - Create a local debugger instance. - - Parameters - ---------- + raise ValueError( + "`BdbQuit_excepthook` is deprecated since version 5.1", + ) - colors : str, optional - The name of the color scheme to use, it must be one of IPython's - valid color schemes. If not given, the function will default to - the current IPython scheme when running inside IPython, and to - 'NoColor' otherwise. - Examples - -------- - :: - - from IPython.core.debugger import Tracer; debug_here = Tracer() - - Later in your code:: - - debug_here() # -> will open up the debugger at that point. - - Once the debugger activates, you can use all of its regular commands to - step through code, set breakpoints, etc. See the pdb documentation - from the Python standard library for usage details. - """ - warnings.warn("`Tracer` is deprecated since version 5.1, directly use " - "`IPython.core.debugger.Pdb.set_trace()`", - DeprecationWarning, stacklevel=2) - - ip = get_ipython() - if ip is None: - # Outside of ipython, we set our own exception hook manually - sys.excepthook = functools.partial(BdbQuit_excepthook, - excepthook=sys.excepthook) - def_colors = 'NoColor' - else: - # In ipython, we use its custom exception handler mechanism - def_colors = ip.colors - ip.set_custom_exc((bdb.BdbQuit,), BdbQuit_IPython_excepthook) - - if colors is None: - colors = def_colors - - # The stdlib debugger internally uses a modified repr from the `repr` - # module, that limits the length of printed strings to a hardcoded - # limit of 30 characters. That much trimming is too aggressive, let's - # at least raise that limit to 80 chars, which should be enough for - # most interactive uses. - try: - from reprlib import aRepr - aRepr.maxstring = 80 - except: - # This is only a user-facing convenience, so any error we encounter - # here can be warned about but can be otherwise ignored. These - # printouts will tell us about problems if this API changes - import traceback - traceback.print_exc() - - self.debugger = Pdb(colors) - - def __call__(self): - """Starts an interactive debugger at the point where called. - - This is similar to the pdb.set_trace() function from the std lib, but - using IPython's enhanced debugger.""" - - self.debugger.set_trace(sys._getframe().f_back) +def BdbQuit_IPython_excepthook(self, et, ev, tb, tb_offset=None): + raise ValueError( + "`BdbQuit_IPython_excepthook` is deprecated since version 5.1", + DeprecationWarning, stacklevel=2) RGX_EXTRA_INDENT = re.compile(r'(?<=\n)\s+') @@ -291,14 +194,11 @@ class Pdb(OldPdb): "debuggerskip": True, } - def __init__(self, color_scheme=None, completekey=None, - stdin=None, stdout=None, context=5, **kwargs): + def __init__(self, completekey=None, stdin=None, stdout=None, context=5, **kwargs): """Create a new IPython debugger. Parameters ---------- - color_scheme : default None - Deprecated, do not use. completekey : default None Passed to pdb.Pdb. stdin : default None @@ -322,8 +222,8 @@ class Pdb(OldPdb): self.context = int(context) if self.context <= 0: raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e # `kwargs` ensures full compatibility with stdlib's `pdb.Pdb`. OldPdb.__init__(self, completekey, stdin, stdout, **kwargs) @@ -339,14 +239,10 @@ class Pdb(OldPdb): self.shell = TerminalInteractiveShell.instance() # needed by any code which calls __import__("__main__") after # the debugger was entered. See also #9941. - sys.modules['__main__'] = save_main + sys.modules["__main__"] = save_main - if color_scheme is not None: - warnings.warn( - "The `color_scheme` argument is deprecated since version 5.1", - DeprecationWarning, stacklevel=2) - else: - color_scheme = self.shell.colors + + color_scheme = self.shell.colors self.aliases = {} @@ -374,7 +270,6 @@ class Pdb(OldPdb): cst['Neutral'].colors.breakpoint_enabled = C.LightRed cst['Neutral'].colors.breakpoint_disabled = C.Red - # Add a python parser so we can syntax highlight source while # debugging. self.parser = PyColorize.Parser(style=color_scheme) @@ -423,14 +318,14 @@ class Pdb(OldPdb): def hidden_frames(self, stack): """ - Given an index in the stack return wether it should be skipped. + Given an index in the stack return whether it should be skipped. This is used in up/down and where to skip frames. """ # The f_locals dictionary is updated from the actual frame # locals whenever the .f_locals accessor is called, so we # avoid calling it here to preserve self.curframe_locals. - # Futhermore, there is no good reason to hide the current frame. + # Furthermore, there is no good reason to hide the current frame. ip_hide = [self._hidden_predicate(s[0]) for s in stack] ip_start = [i for i, s in enumerate(ip_hide) if s == "__ipython_bottom__"] if ip_start and self._predicates["ipython_internal"]: @@ -443,13 +338,25 @@ class Pdb(OldPdb): except KeyboardInterrupt: self.stdout.write("\n" + self.shell.get_exception_only()) + def precmd(self, line): + """Perform useful escapes on the command before it is executed.""" + + if line.endswith("??"): + line = "pinfo2 " + line[:-2] + elif line.endswith("?"): + line = "pinfo " + line[:-1] + + line = super().precmd(line) + + return line + def new_do_frame(self, arg): OldPdb.do_frame(self, arg) def new_do_quit(self, arg): if hasattr(self, 'old_all_completions'): - self.shell.Completer.all_completions=self.old_all_completions + self.shell.Completer.all_completions = self.old_all_completions return OldPdb.do_quit(self, arg) @@ -467,11 +374,11 @@ class Pdb(OldPdb): if context is None: context = self.context try: - context=int(context) + context = int(context) if context <= 0: raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e try: skipped = 0 for hidden, frame_lineno in zip(self.hidden_frames(self.stack), self.stack): @@ -496,11 +403,11 @@ class Pdb(OldPdb): if context is None: context = self.context try: - context=int(context) + context = int(context) if context <= 0: raise ValueError("Context must be a positive integer") - except (TypeError, ValueError): - raise ValueError("Context must be a positive integer") + except (TypeError, ValueError) as e: + raise ValueError("Context must be a positive integer") from e print(self.format_stack_entry(frame_lineno, '', context), file=self.stdout) # vds: >> @@ -511,8 +418,8 @@ class Pdb(OldPdb): def _get_frame_locals(self, frame): """ " - Acessing f_local of current frame reset the namespace, so we want to avoid - that or the following can happend + Accessing f_local of current frame reset the namespace, so we want to avoid + that or the following can happen ipdb> foo "old" @@ -535,25 +442,22 @@ class Pdb(OldPdb): if context is None: context = self.context try: - context=int(context) + context = int(context) if context <= 0: print("Context must be a positive integer", file=self.stdout) except (TypeError, ValueError): print("Context must be a positive integer", file=self.stdout) - try: - import reprlib # Py 3 - except ImportError: - import repr as reprlib # Py 2 + + import reprlib ret = [] Colors = self.color_scheme_table.active_colors ColorsNormal = Colors.Normal - tpl_link = u'%s%%s%s' % (Colors.filenameEm, ColorsNormal) - tpl_call = u'%s%%s%s%%s%s' % (Colors.vName, Colors.valEm, ColorsNormal) - tpl_line = u'%%s%s%%s %s%%s' % (Colors.lineno, ColorsNormal) - tpl_line_em = u'%%s%s%%s %s%%s%s' % (Colors.linenoEm, Colors.line, - ColorsNormal) + tpl_link = "%s%%s%s" % (Colors.filenameEm, ColorsNormal) + tpl_call = "%s%%s%s%%s%s" % (Colors.vName, Colors.valEm, ColorsNormal) + tpl_line = "%%s%s%%s %s%%s" % (Colors.lineno, ColorsNormal) + tpl_line_em = "%%s%s%%s %s%%s%s" % (Colors.linenoEm, Colors.line, ColorsNormal) frame, lineno = frame_lineno @@ -587,8 +491,8 @@ class Pdb(OldPdb): if frame is self.curframe: ret.append('> ') else: - ret.append(' ') - ret.append(u'%s(%s)%s\n' % (link,lineno,call)) + ret.append(" ") + ret.append("%s(%s)%s\n" % (link, lineno, call)) start = lineno - 1 - context//2 lines = linecache.getlines(filename) @@ -596,17 +500,17 @@ class Pdb(OldPdb): start = max(start, 0) lines = lines[start : start + context] - for i,line in enumerate(lines): - show_arrow = (start + 1 + i == lineno) - linetpl = (frame is self.curframe or show_arrow) \ - and tpl_line_em \ - or tpl_line - ret.append(self.__format_line(linetpl, filename, - start + 1 + i, line, - arrow = show_arrow) ) - return ''.join(ret) - - def __format_line(self, tpl_line, filename, lineno, line, arrow = False): + for i, line in enumerate(lines): + show_arrow = start + 1 + i == lineno + linetpl = (frame is self.curframe or show_arrow) and tpl_line_em or tpl_line + ret.append( + self.__format_line( + linetpl, filename, start + 1 + i, line, arrow=show_arrow + ) + ) + return "".join(ret) + + def __format_line(self, tpl_line, filename, lineno, line, arrow=False): bp_mark = "" bp_mark_color = "" @@ -636,7 +540,6 @@ class Pdb(OldPdb): return tpl_line % (bp_mark_color + bp_mark, num, line) - def print_list_lines(self, filename, first, last): """The printing (as opposed to the parsing part of a 'list' command.""" @@ -655,9 +558,13 @@ class Pdb(OldPdb): break if lineno == self.curframe.f_lineno: - line = self.__format_line(tpl_line_em, filename, lineno, line, arrow = True) + line = self.__format_line( + tpl_line_em, filename, lineno, line, arrow=True + ) else: - line = self.__format_line(tpl_line, filename, lineno, line, arrow = False) + line = self.__format_line( + tpl_line, filename, lineno, line, arrow=False + ) src.append(line) self.lineno = lineno @@ -891,7 +798,6 @@ class Pdb(OldPdb): def break_anywhere(self, frame): """ - _stop_in_decorator_internals is overly restrictive, as we may still want to trace function calls, so we need to also update break_anywhere so that is we don't `stop_here`, because of debugger skip, we may still @@ -909,13 +815,10 @@ class Pdb(OldPdb): return True return False - @skip_doctest def _is_in_decorator_internal_and_should_skip(self, frame): """ Utility to tell us whether we are in a decorator internal and should stop. - - """ # if we are disabled don't skip @@ -937,9 +840,6 @@ class Pdb(OldPdb): return False def stop_here(self, frame): - """Check if pdb should stop here""" - if not super().stop_here(frame): - return False if self._is_in_decorator_internal_and_should_skip(frame) is True: return False @@ -951,9 +851,10 @@ class Pdb(OldPdb): if self.report_skipped: Colors = self.color_scheme_table.active_colors ColorsNormal = Colors.Normal - print(f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n") - return False - return True + print( + f"{Colors.excName} [... skipped 1 hidden frame]{ColorsNormal}\n" + ) + return super().stop_here(frame) def do_up(self, arg): """u(p) [count] @@ -976,11 +877,9 @@ class Pdb(OldPdb): if count < 0: _newframe = 0 else: - _newindex = self.curindex counter = 0 hidden_frames = self.hidden_frames(self.stack) for i in range(self.curindex - 1, -1, -1): - frame = self.stack[i][0] if hidden_frames[i] and self.skip_hidden: skipped += 1 continue @@ -988,8 +887,10 @@ class Pdb(OldPdb): if counter >= count: break else: - # if no break occured. - self.error("all frames above hidden") + # if no break occurred. + self.error( + "all frames above hidden, use `skip_hidden False` to get get into those." + ) return Colors = self.color_scheme_table.active_colors @@ -1019,12 +920,10 @@ class Pdb(OldPdb): if count < 0: _newframe = len(self.stack) - 1 else: - _newindex = self.curindex counter = 0 skipped = 0 hidden_frames = self.hidden_frames(self.stack) for i in range(self.curindex + 1, len(self.stack)): - frame = self.stack[i][0] if hidden_frames[i] and self.skip_hidden: skipped += 1 continue @@ -1032,7 +931,9 @@ class Pdb(OldPdb): if counter >= count: break else: - self.error("all frames bellow hidden") + self.error( + "all frames below hidden, use `skip_hidden False` to get get into those." + ) return Colors = self.color_scheme_table.active_colors diff --git a/contrib/python/ipython/py3/IPython/core/display.py b/contrib/python/ipython/py3/IPython/core/display.py index f45e7599c9..933295ad6c 100644 --- a/contrib/python/ipython/py3/IPython/core/display.py +++ b/contrib/python/ipython/py3/IPython/core/display.py @@ -5,12 +5,12 @@ # Distributed under the terms of the Modified BSD License. -from binascii import b2a_hex, b2a_base64, hexlify +from binascii import b2a_base64, hexlify +import html import json import mimetypes import os import struct -import sys import warnings from copy import deepcopy from os.path import splitext @@ -18,14 +18,37 @@ from pathlib import Path, PurePath from IPython.utils.py3compat import cast_unicode from IPython.testing.skipdoctest import skip_doctest +from . import display_functions + + +__all__ = ['display_pretty', 'display_html', 'display_markdown', + 'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', + 'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', + 'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', + 'GeoJSON', 'Javascript', 'Image', 'set_matplotlib_formats', + 'set_matplotlib_close', + 'Video'] + +_deprecated_names = ["display", "clear_output", "publish_display_data", "update_display", "DisplayHandle"] + +__all__ = __all__ + _deprecated_names + + +# ----- warn to import from IPython.display ----- + +from warnings import warn + + +def __getattr__(name): + if name in _deprecated_names: + warn(f"Importing {name} from IPython.core.display is deprecated since IPython 7.14, please import from IPython display", DeprecationWarning, stacklevel=2) + return getattr(display_functions, name) + + if name in globals().keys(): + return globals()[name] + else: + raise AttributeError(f"module {__name__} has no attribute {name}") -__all__ = ['display', 'display_pretty', 'display_html', 'display_markdown', -'display_svg', 'display_png', 'display_jpeg', 'display_latex', 'display_json', -'display_javascript', 'display_pdf', 'DisplayObject', 'TextDisplayObject', -'Pretty', 'HTML', 'Markdown', 'Math', 'Latex', 'SVG', 'ProgressBar', 'JSON', -'GeoJSON', 'Javascript', 'Image', 'clear_output', 'set_matplotlib_formats', -'set_matplotlib_close', 'publish_display_data', 'update_display', 'DisplayHandle', -'Video'] #----------------------------------------------------------------------------- # utility functions @@ -38,17 +61,6 @@ def _safe_exists(path): except Exception: return False -def _merge(d1, d2): - """Like update, but merges sub-dicts instead of clobbering at the top level. - - Updates d1 in-place - """ - - if not isinstance(d2, dict) or not isinstance(d1, dict): - return d2 - for key, value in d2.items(): - d1[key] = _merge(d1.get(key), value) - return d1 def _display_mimetype(mimetype, objs, raw=False, metadata=None): """internal implementation of all display_foo methods @@ -71,334 +83,12 @@ def _display_mimetype(mimetype, objs, raw=False, metadata=None): if raw: # turn list of pngdata into list of { 'image/png': pngdata } objs = [ {mimetype: obj} for obj in objs ] - display(*objs, raw=raw, metadata=metadata, include=[mimetype]) + display_functions.display(*objs, raw=raw, metadata=metadata, include=[mimetype]) #----------------------------------------------------------------------------- # Main functions #----------------------------------------------------------------------------- -# use * to indicate transient is keyword-only -def publish_display_data(data, metadata=None, source=None, *, transient=None, **kwargs): - """Publish data and metadata to all frontends. - - See the ``display_data`` message in the messaging documentation for - more details about this message type. - - Keys of data and metadata can be any mime-type. - - Parameters - ---------- - data : dict - A dictionary having keys that are valid MIME types (like - 'text/plain' or 'image/svg+xml') and values that are the data for - that MIME type. The data itself must be a JSON'able data - structure. Minimally all data should have the 'text/plain' data, - which can be displayed by all frontends. If more than the plain - text is given, it is up to the frontend to decide which - representation to use. - metadata : dict - A dictionary for metadata related to the data. This can contain - arbitrary key, value pairs that frontends can use to interpret - the data. mime-type keys matching those in data can be used - to specify metadata about particular representations. - source : str, deprecated - Unused. - transient : dict, keyword-only - A dictionary of transient data, such as display_id. - """ - from IPython.core.interactiveshell import InteractiveShell - - display_pub = InteractiveShell.instance().display_pub - - # only pass transient if supplied, - # to avoid errors with older ipykernel. - # TODO: We could check for ipykernel version and provide a detailed upgrade message. - if transient: - kwargs['transient'] = transient - - display_pub.publish( - data=data, - metadata=metadata, - **kwargs - ) - - -def _new_id(): - """Generate a new random text id with urandom""" - return b2a_hex(os.urandom(16)).decode('ascii') - - -def display(*objs, include=None, exclude=None, metadata=None, transient=None, display_id=None, **kwargs): - """Display a Python object in all frontends. - - By default all representations will be computed and sent to the frontends. - Frontends can decide which representation is used and how. - - In terminal IPython this will be similar to using :func:`print`, for use in richer - frontends see Jupyter notebook examples with rich display logic. - - Parameters - ---------- - *objs : object - The Python objects to display. - raw : bool, optional - Are the objects to be displayed already mimetype-keyed dicts of raw display data, - or Python objects that need to be formatted before display? [default: False] - include : list, tuple or set, optional - A list of format type strings (MIME types) to include in the - format data dict. If this is set *only* the format types included - in this list will be computed. - exclude : list, tuple or set, optional - A list of format type strings (MIME types) to exclude in the format - data dict. If this is set all format types will be computed, - except for those included in this argument. - metadata : dict, optional - A dictionary of metadata to associate with the output. - mime-type keys in this dictionary will be associated with the individual - representation formats, if they exist. - transient : dict, optional - A dictionary of transient data to associate with the output. - Data in this dict should not be persisted to files (e.g. notebooks). - display_id : str, bool optional - Set an id for the display. - This id can be used for updating this display area later via update_display. - If given as `True`, generate a new `display_id` - clear : bool, optional - Should the output area be cleared before displaying anything? If True, - this will wait for additional output before clearing. [default: False] - kwargs: additional keyword-args, optional - Additional keyword-arguments are passed through to the display publisher. - - Returns - ------- - - handle: DisplayHandle - Returns a handle on updatable displays for use with :func:`update_display`, - if `display_id` is given. Returns :any:`None` if no `display_id` is given - (default). - - Examples - -------- - - >>> class Json(object): - ... def __init__(self, json): - ... self.json = json - ... def _repr_pretty_(self, pp, cycle): - ... import json - ... pp.text(json.dumps(self.json, indent=2)) - ... def __repr__(self): - ... return str(self.json) - ... - - >>> d = Json({1:2, 3: {4:5}}) - - >>> print(d) - {1: 2, 3: {4: 5}} - - >>> display(d) - { - "1": 2, - "3": { - "4": 5 - } - } - - >>> def int_formatter(integer, pp, cycle): - ... pp.text('I'*integer) - - >>> plain = get_ipython().display_formatter.formatters['text/plain'] - >>> plain.for_type(int, int_formatter) - <function _repr_pprint at 0x...> - >>> display(7-5) - II - - >>> del plain.type_printers[int] - >>> display(7-5) - 2 - - See Also - -------- - - :func:`update_display` - - Notes - ----- - - In Python, objects can declare their textual representation using the - `__repr__` method. IPython expands on this idea and allows objects to declare - other, rich representations including: - - - HTML - - JSON - - PNG - - JPEG - - SVG - - LaTeX - - A single object can declare some or all of these representations; all are - handled by IPython's display system. - - The main idea of the first approach is that you have to implement special - display methods when you define your class, one for each representation you - want to use. Here is a list of the names of the special methods and the - values they must return: - - - `_repr_html_`: return raw HTML as a string, or a tuple (see below). - - `_repr_json_`: return a JSONable dict, or a tuple (see below). - - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). - - `_repr_png_`: return raw PNG data, or a tuple (see below). - - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). - - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", - or a tuple (see below). - - `_repr_mimebundle_`: return a full mimebundle containing the mapping - from all mimetypes to data. - Use this for any mime-type not listed above. - - The above functions may also return the object's metadata alonside the - data. If the metadata is available, the functions will return a tuple - containing the data and metadata, in that order. If there is no metadata - available, then the functions will return the data only. - - When you are directly writing your own classes, you can adapt them for - display in IPython by following the above approach. But in practice, you - often need to work with existing classes that you can't easily modify. - - You can refer to the documentation on integrating with the display system in - order to register custom formatters for already existing types - (:ref:`integrating_rich_display`). - - .. versionadded:: 5.4 display available without import - .. versionadded:: 6.1 display available without import - - Since IPython 5.4 and 6.1 :func:`display` is automatically made available to - the user without import. If you are using display in a document that might - be used in a pure python context or with older version of IPython, use the - following import at the top of your file:: - - from IPython.display import display - - """ - from IPython.core.interactiveshell import InteractiveShell - - if not InteractiveShell.initialized(): - # Directly print objects. - print(*objs) - return - - raw = kwargs.pop("raw", False) - clear = kwargs.pop("clear", False) - if transient is None: - transient = {} - if metadata is None: - metadata={} - if display_id: - if display_id is True: - display_id = _new_id() - transient['display_id'] = display_id - if kwargs.get('update') and 'display_id' not in transient: - raise TypeError('display_id required for update_display') - if transient: - kwargs['transient'] = transient - - if not objs and display_id: - # if given no objects, but still a request for a display_id, - # we assume the user wants to insert an empty output that - # can be updated later - objs = [{}] - raw = True - - if not raw: - format = InteractiveShell.instance().display_formatter.format - - if clear: - clear_output(wait=True) - - for obj in objs: - if raw: - publish_display_data(data=obj, metadata=metadata, **kwargs) - else: - format_dict, md_dict = format(obj, include=include, exclude=exclude) - if not format_dict: - # nothing to display (e.g. _ipython_display_ took over) - continue - if metadata: - # kwarg-specified metadata gets precedence - _merge(md_dict, metadata) - publish_display_data(data=format_dict, metadata=md_dict, **kwargs) - if display_id: - return DisplayHandle(display_id) - - -# use * for keyword-only display_id arg -def update_display(obj, *, display_id, **kwargs): - """Update an existing display by id - - Parameters - ---------- - - obj: - The object with which to update the display - display_id: keyword-only - The id of the display to update - - See Also - -------- - - :func:`display` - """ - kwargs['update'] = True - display(obj, display_id=display_id, **kwargs) - - -class DisplayHandle(object): - """A handle on an updatable display - - Call `.update(obj)` to display a new object. - - Call `.display(obj`) to add a new instance of this display, - and update existing instances. - - See Also - -------- - - :func:`display`, :func:`update_display` - - """ - - def __init__(self, display_id=None): - if display_id is None: - display_id = _new_id() - self.display_id = display_id - - def __repr__(self): - return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) - - def display(self, obj, **kwargs): - """Make a new display with my id, updating existing instances. - - Parameters - ---------- - - obj: - object to display - **kwargs: - additional keyword arguments passed to display - """ - display(obj, display_id=self.display_id, **kwargs) - - def update(self, obj, **kwargs): - """Update existing displays with my id - - Parameters - ---------- - - obj: - object to display - **kwargs: - additional keyword arguments passed to update_display - """ - update_display(obj, display_id=self.display_id, **kwargs) - def display_pretty(*objs, **kwargs): """Display the pretty (default) representation of an object. @@ -659,7 +349,8 @@ class DisplayObject(object): def reload(self): """Reload the raw data from file or URL.""" if self.filename is not None: - with open(self.filename, self._read_flags) as f: + encoding = None if "b" in self._read_flags else "utf-8" + with open(self.filename, self._read_flags, encoding=encoding) as f: self.data = f.read() elif self.url is not None: # Deferred import @@ -679,7 +370,11 @@ class DisplayObject(object): if 'gzip' in response.headers['content-encoding']: import gzip from io import BytesIO - with gzip.open(BytesIO(data), 'rt', encoding=encoding) as fp: + + # assume utf-8 if encoding is not specified + with gzip.open( + BytesIO(data), "rt", encoding=encoding or "utf-8" + ) as fp: encoding = None data = fp.read() @@ -792,16 +487,16 @@ class SVG(DisplayObject): pass svg = cast_unicode(svg) self._data = svg - + def _repr_svg_(self): return self._data_and_metadata() class ProgressBar(DisplayObject): - """Progressbar supports displaying a progressbar like element + """Progressbar supports displaying a progressbar like element """ def __init__(self, total): """Creates a new progressbar - + Parameters ---------- total : int @@ -827,10 +522,10 @@ class ProgressBar(DisplayObject): self.html_width, self.total, self.progress) def display(self): - display(self, display_id=self._display_id) + display_functions.display(self, display_id=self._display_id) def update(self): - display(self, display_id=self._display_id, update=True) + display_functions.display(self, display_id=self._display_id, update=True) @property def progress(self): @@ -878,10 +573,10 @@ class JSON(DisplayObject): Path to a local file to load the data from. expanded : boolean Metadata to control whether a JSON display component is expanded. - metadata: dict + metadata : dict Specify extra metadata to attach to the json display object. root : str - The name of the root element of the JSON tree + The name of the root element of the JSON tree """ self.metadata = { 'expanded': expanded, @@ -944,7 +639,7 @@ class GeoJSON(JSON): Scalar types (None, number, string) are not allowed, only dict containers. """ - + def __init__(self, *args, **kwargs): """Create a GeoJSON display object given raw data. @@ -962,12 +657,11 @@ class GeoJSON(JSON): A URL to download the data from. filename : unicode Path to a local file to load the data from. - metadata: dict + metadata : dict Specify extra metadata to attach to the json display object. Examples -------- - The following will display an interactive map of Mars with a point of interest on frontend that do support GeoJSON display. @@ -993,7 +687,7 @@ class GeoJSON(JSON): the GeoJSON object. """ - + super(GeoJSON, self).__init__(*args, **kwargs) @@ -1005,7 +699,7 @@ class GeoJSON(JSON): metadata = { 'application/geo+json': self.metadata } - display(bundle, metadata=metadata, raw=True) + display_functions.display(bundle, metadata=metadata, raw=True) class Javascript(TextDisplayObject): @@ -1034,7 +728,7 @@ class Javascript(TextDisplayObject): running the source code. The full URLs of the libraries should be given. A single Javascript library URL can also be given as a string. - css: : list or str + css : list or str A sequence of css files to load before running the source code. The full URLs of the css files should be given. A single css URL can also be given as a string. @@ -1112,9 +806,20 @@ class Image(DisplayObject): _FMT_GIF: 'image/gif', } - def __init__(self, data=None, url=None, filename=None, format=None, - embed=None, width=None, height=None, retina=False, - unconfined=False, metadata=None): + def __init__( + self, + data=None, + url=None, + filename=None, + format=None, + embed=None, + width=None, + height=None, + retina=False, + unconfined=False, + metadata=None, + alt=None, + ): """Create a PNG/JPEG/GIF image object given raw data. When this object is returned by an input cell or passed to the @@ -1126,15 +831,19 @@ class Image(DisplayObject): data : unicode, str or bytes The raw image data or a URL or filename to load the data from. This always results in embedded image data. + url : unicode A URL to download the data from. If you specify `url=`, the image data will not be embedded unless you also specify `embed=True`. + filename : unicode Path to a local file to load the data from. Images from a file are always embedded. + format : unicode The format of the image data (png/jpeg/jpg/gif). If a filename or URL is given for format will be inferred from the filename extension. + embed : bool Should the image data be embedded using a data URI (True) or be loaded using an <img> tag. Set this to True if you want the image @@ -1144,10 +853,13 @@ class Image(DisplayObject): default value is `False`. Note that QtConsole is not able to display images if `embed` is set to `False` + width : int Width in pixels to which to constrain the image in html + height : int Height in pixels to which to constrain the image in html + retina : bool Automatically set the width and height to half of the measured width and height. @@ -1155,25 +867,38 @@ class Image(DisplayObject): from image data. For non-embedded images, you can just set the desired display width and height directly. - unconfined: bool + + unconfined : bool Set unconfined=True to disable max-width confinement of the image. - metadata: dict + + metadata : dict Specify extra metadata to attach to the image. + alt : unicode + Alternative text for the image, for use by screen readers. + Examples -------- - # embedded image data, works in qtconsole and notebook - # when passed positionally, the first arg can be any of raw image data, - # a URL, or a filename from which to load image data. - # The result is always embedding image data for inline images. - Image('http://www.google.fr/images/srpr/logo3w.png') - Image('/path/to/image.jpg') - Image(b'RAW_PNG_DATA...') - - # Specifying Image(url=...) does not embed the image data, - # it only generates `<img>` tag with a link to the source. - # This will not work in the qtconsole or offline. - Image(url='http://www.google.fr/images/srpr/logo3w.png') + embedded image data, works in qtconsole and notebook + when passed positionally, the first arg can be any of raw image data, + a URL, or a filename from which to load image data. + The result is always embedding image data for inline images. + + >>> Image('https://www.google.fr/images/srpr/logo3w.png') # doctest: +SKIP + <IPython.core.display.Image object> + + >>> Image('/path/to/image.jpg') + <IPython.core.display.Image object> + + >>> Image(b'RAW_PNG_DATA...') + <IPython.core.display.Image object> + + Specifying Image(url=...) does not embed the image data, + it only generates ``<img>`` tag with a link to the source. + This will not work in the qtconsole or offline. + + >>> Image(url='https://www.google.fr/images/srpr/logo3w.png') + <IPython.core.display.Image object> """ if isinstance(data, (Path, PurePath)): @@ -1228,7 +953,8 @@ class Image(DisplayObject): self.height = height self.retina = retina self.unconfined = unconfined - super(Image, self).__init__(data=data, url=url, filename=filename, + self.alt = alt + super(Image, self).__init__(data=data, url=url, filename=filename, metadata=metadata) if self.width is None and self.metadata.get('width', {}): @@ -1237,6 +963,9 @@ class Image(DisplayObject): if self.height is None and self.metadata.get('height', {}): self.height = metadata['height'] + if self.alt is None and self.metadata.get("alt", {}): + self.alt = metadata["alt"] + if retina: self._retina_shape() @@ -1266,18 +995,21 @@ class Image(DisplayObject): def _repr_html_(self): if not self.embed: - width = height = klass = '' + width = height = klass = alt = "" if self.width: width = ' width="%d"' % self.width if self.height: height = ' height="%d"' % self.height if self.unconfined: klass = ' class="unconfined"' - return u'<img src="{url}"{width}{height}{klass}/>'.format( + if self.alt: + alt = ' alt="%s"' % html.escape(self.alt) + return '<img src="{url}"{width}{height}{klass}{alt}/>'.format( url=self.url, width=width, height=height, klass=klass, + alt=alt, ) def _repr_mimebundle_(self, include=None, exclude=None): @@ -1298,9 +1030,9 @@ class Image(DisplayObject): """shortcut for returning metadata with shape information, if defined""" try: b64_data = b2a_base64(self.data).decode('ascii') - except TypeError: + except TypeError as e: raise FileNotFoundError( - "No such file or directory: '%s'" % (self.data)) + "No such file or directory: '%s'" % (self.data)) from e md = {} if self.metadata: md.update(self.metadata) @@ -1310,6 +1042,8 @@ class Image(DisplayObject): md['height'] = self.height if self.unconfined: md['unconfined'] = self.unconfined + if self.alt: + md["alt"] = self.alt if md or always_both: return b64_data, md else: @@ -1348,12 +1082,15 @@ class Video(DisplayObject): data : unicode, str or bytes The raw video data or a URL or filename to load the data from. Raw data will require passing ``embed=True``. + url : unicode A URL for the video. If you specify ``url=``, the image data will not be embedded. + filename : unicode Path to a local file containing the video. Will be interpreted as a local URL unless ``embed=True``. + embed : bool Should the video be embedded using a data URI (True) or be loaded using a <video> tag (False). @@ -1365,15 +1102,18 @@ class Video(DisplayObject): Video('./video.mp4') - mimetype: unicode + mimetype : unicode Specify the mimetype for embedded videos. Default will be guessed from file extension, if available. + width : int Width in pixels to which to constrain the video in HTML. If not supplied, defaults to the width of the video. + height : int Height in pixels to which to constrain the video in html. If not supplied, defaults to the height of the video. + html_attributes : str Attributes for the HTML ``<video>`` block. Default: ``"controls"`` to get video controls. @@ -1382,7 +1122,6 @@ class Video(DisplayObject): Examples -------- - :: Video('https://archive.org/download/Sita_Sings_the_Blues/Sita_Sings_the_Blues_small.mp4') @@ -1397,7 +1136,7 @@ class Video(DisplayObject): if url is None and isinstance(data, str) and data.startswith(('http:', 'https:')): url = data data = None - elif os.path.exists(data): + elif data is not None and os.path.exists(data): filename = data data = None @@ -1459,23 +1198,6 @@ class Video(DisplayObject): pass -def clear_output(wait=False): - """Clear the output of the current cell receiving output. - - Parameters - ---------- - wait : bool [default: false] - Wait to clear the output until new output is available to replace it.""" - from IPython.core.interactiveshell import InteractiveShell - if InteractiveShell.initialized(): - InteractiveShell.instance().display_pub.clear_output(wait) - else: - print('\033[2K\r', end='') - sys.stdout.flush() - print('\033[2K\r', end='') - sys.stderr.flush() - - @skip_doctest def set_matplotlib_formats(*formats, **kwargs): """ @@ -1498,7 +1220,7 @@ def set_matplotlib_formats(*formats, **kwargs): ---------- *formats : strs One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'. - **kwargs : + **kwargs Keyword args will be relayed to ``figure.canvas.print_figure``. """ warnings.warn( @@ -1521,7 +1243,6 @@ def set_matplotlib_close(close=True): use `matplotlib_inline.backend_inline.set_matplotlib_close()` - Set whether the inline backend closes all figures automatically or not. By default, the inline backend used in the IPython Notebook will close all diff --git a/contrib/python/ipython/py3/IPython/core/display_functions.py b/contrib/python/ipython/py3/IPython/core/display_functions.py new file mode 100644 index 0000000000..567cf3fa60 --- /dev/null +++ b/contrib/python/ipython/py3/IPython/core/display_functions.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- +"""Top-level display functions for displaying object in different formats.""" + +# Copyright (c) IPython Development Team. +# Distributed under the terms of the Modified BSD License. + + +from binascii import b2a_hex +import os +import sys +import warnings + +__all__ = ['display', 'clear_output', 'publish_display_data', 'update_display', 'DisplayHandle'] + +#----------------------------------------------------------------------------- +# utility functions +#----------------------------------------------------------------------------- + + +def _merge(d1, d2): + """Like update, but merges sub-dicts instead of clobbering at the top level. + + Updates d1 in-place + """ + + if not isinstance(d2, dict) or not isinstance(d1, dict): + return d2 + for key, value in d2.items(): + d1[key] = _merge(d1.get(key), value) + return d1 + + +#----------------------------------------------------------------------------- +# Main functions +#----------------------------------------------------------------------------- + +class _Sentinel: + def __repr__(self): + return "<deprecated>" + + +_sentinel = _Sentinel() + +# use * to indicate transient is keyword-only +def publish_display_data( + data, metadata=None, source=_sentinel, *, transient=None, **kwargs +): + """Publish data and metadata to all frontends. + + See the ``display_data`` message in the messaging documentation for + more details about this message type. + + Keys of data and metadata can be any mime-type. + + Parameters + ---------- + data : dict + A dictionary having keys that are valid MIME types (like + 'text/plain' or 'image/svg+xml') and values that are the data for + that MIME type. The data itself must be a JSON'able data + structure. Minimally all data should have the 'text/plain' data, + which can be displayed by all frontends. If more than the plain + text is given, it is up to the frontend to decide which + representation to use. + metadata : dict + A dictionary for metadata related to the data. This can contain + arbitrary key, value pairs that frontends can use to interpret + the data. mime-type keys matching those in data can be used + to specify metadata about particular representations. + source : str, deprecated + Unused. + transient : dict, keyword-only + A dictionary of transient data, such as display_id. + """ + from IPython.core.interactiveshell import InteractiveShell + + if source is not _sentinel: + warnings.warn( + "The `source` parameter emit a deprecation warning since" + " IPython 8.0, it had no effects for a long time and will " + " be removed in future versions.", + DeprecationWarning, + stacklevel=2, + ) + display_pub = InteractiveShell.instance().display_pub + + # only pass transient if supplied, + # to avoid errors with older ipykernel. + # TODO: We could check for ipykernel version and provide a detailed upgrade message. + if transient: + kwargs['transient'] = transient + + display_pub.publish( + data=data, + metadata=metadata, + **kwargs + ) + + +def _new_id(): + """Generate a new random text id with urandom""" + return b2a_hex(os.urandom(16)).decode('ascii') + + +def display( + *objs, + include=None, + exclude=None, + metadata=None, + transient=None, + display_id=None, + raw=False, + clear=False, + **kwargs +): + """Display a Python object in all frontends. + + By default all representations will be computed and sent to the frontends. + Frontends can decide which representation is used and how. + + In terminal IPython this will be similar to using :func:`print`, for use in richer + frontends see Jupyter notebook examples with rich display logic. + + Parameters + ---------- + *objs : object + The Python objects to display. + raw : bool, optional + Are the objects to be displayed already mimetype-keyed dicts of raw display data, + or Python objects that need to be formatted before display? [default: False] + include : list, tuple or set, optional + A list of format type strings (MIME types) to include in the + format data dict. If this is set *only* the format types included + in this list will be computed. + exclude : list, tuple or set, optional + A list of format type strings (MIME types) to exclude in the format + data dict. If this is set all format types will be computed, + except for those included in this argument. + metadata : dict, optional + A dictionary of metadata to associate with the output. + mime-type keys in this dictionary will be associated with the individual + representation formats, if they exist. + transient : dict, optional + A dictionary of transient data to associate with the output. + Data in this dict should not be persisted to files (e.g. notebooks). + display_id : str, bool optional + Set an id for the display. + This id can be used for updating this display area later via update_display. + If given as `True`, generate a new `display_id` + clear : bool, optional + Should the output area be cleared before displaying anything? If True, + this will wait for additional output before clearing. [default: False] + **kwargs : additional keyword-args, optional + Additional keyword-arguments are passed through to the display publisher. + + Returns + ------- + handle: DisplayHandle + Returns a handle on updatable displays for use with :func:`update_display`, + if `display_id` is given. Returns :any:`None` if no `display_id` is given + (default). + + Examples + -------- + >>> class Json(object): + ... def __init__(self, json): + ... self.json = json + ... def _repr_pretty_(self, pp, cycle): + ... import json + ... pp.text(json.dumps(self.json, indent=2)) + ... def __repr__(self): + ... return str(self.json) + ... + + >>> d = Json({1:2, 3: {4:5}}) + + >>> print(d) + {1: 2, 3: {4: 5}} + + >>> display(d) + { + "1": 2, + "3": { + "4": 5 + } + } + + >>> def int_formatter(integer, pp, cycle): + ... pp.text('I'*integer) + + >>> plain = get_ipython().display_formatter.formatters['text/plain'] + >>> plain.for_type(int, int_formatter) + <function _repr_pprint at 0x...> + >>> display(7-5) + II + + >>> del plain.type_printers[int] + >>> display(7-5) + 2 + + See Also + -------- + :func:`update_display` + + Notes + ----- + In Python, objects can declare their textual representation using the + `__repr__` method. IPython expands on this idea and allows objects to declare + other, rich representations including: + + - HTML + - JSON + - PNG + - JPEG + - SVG + - LaTeX + + A single object can declare some or all of these representations; all are + handled by IPython's display system. + + The main idea of the first approach is that you have to implement special + display methods when you define your class, one for each representation you + want to use. Here is a list of the names of the special methods and the + values they must return: + + - `_repr_html_`: return raw HTML as a string, or a tuple (see below). + - `_repr_json_`: return a JSONable dict, or a tuple (see below). + - `_repr_jpeg_`: return raw JPEG data, or a tuple (see below). + - `_repr_png_`: return raw PNG data, or a tuple (see below). + - `_repr_svg_`: return raw SVG data as a string, or a tuple (see below). + - `_repr_latex_`: return LaTeX commands in a string surrounded by "$", + or a tuple (see below). + - `_repr_mimebundle_`: return a full mimebundle containing the mapping + from all mimetypes to data. + Use this for any mime-type not listed above. + + The above functions may also return the object's metadata alonside the + data. If the metadata is available, the functions will return a tuple + containing the data and metadata, in that order. If there is no metadata + available, then the functions will return the data only. + + When you are directly writing your own classes, you can adapt them for + display in IPython by following the above approach. But in practice, you + often need to work with existing classes that you can't easily modify. + + You can refer to the documentation on integrating with the display system in + order to register custom formatters for already existing types + (:ref:`integrating_rich_display`). + + .. versionadded:: 5.4 display available without import + .. versionadded:: 6.1 display available without import + + Since IPython 5.4 and 6.1 :func:`display` is automatically made available to + the user without import. If you are using display in a document that might + be used in a pure python context or with older version of IPython, use the + following import at the top of your file:: + + from IPython.display import display + + """ + from IPython.core.interactiveshell import InteractiveShell + + if not InteractiveShell.initialized(): + # Directly print objects. + print(*objs) + return + + if transient is None: + transient = {} + if metadata is None: + metadata={} + if display_id: + if display_id is True: + display_id = _new_id() + transient['display_id'] = display_id + if kwargs.get('update') and 'display_id' not in transient: + raise TypeError('display_id required for update_display') + if transient: + kwargs['transient'] = transient + + if not objs and display_id: + # if given no objects, but still a request for a display_id, + # we assume the user wants to insert an empty output that + # can be updated later + objs = [{}] + raw = True + + if not raw: + format = InteractiveShell.instance().display_formatter.format + + if clear: + clear_output(wait=True) + + for obj in objs: + if raw: + publish_display_data(data=obj, metadata=metadata, **kwargs) + else: + format_dict, md_dict = format(obj, include=include, exclude=exclude) + if not format_dict: + # nothing to display (e.g. _ipython_display_ took over) + continue + if metadata: + # kwarg-specified metadata gets precedence + _merge(md_dict, metadata) + publish_display_data(data=format_dict, metadata=md_dict, **kwargs) + if display_id: + return DisplayHandle(display_id) + + +# use * for keyword-only display_id arg +def update_display(obj, *, display_id, **kwargs): + """Update an existing display by id + + Parameters + ---------- + obj + The object with which to update the display + display_id : keyword-only + The id of the display to update + + See Also + -------- + :func:`display` + """ + kwargs['update'] = True + display(obj, display_id=display_id, **kwargs) + + +class DisplayHandle(object): + """A handle on an updatable display + + Call `.update(obj)` to display a new object. + + Call `.display(obj`) to add a new instance of this display, + and update existing instances. + + See Also + -------- + + :func:`display`, :func:`update_display` + + """ + + def __init__(self, display_id=None): + if display_id is None: + display_id = _new_id() + self.display_id = display_id + + def __repr__(self): + return "<%s display_id=%s>" % (self.__class__.__name__, self.display_id) + + def display(self, obj, **kwargs): + """Make a new display with my id, updating existing instances. + + Parameters + ---------- + obj + object to display + **kwargs + additional keyword arguments passed to display + """ + display(obj, display_id=self.display_id, **kwargs) + + def update(self, obj, **kwargs): + """Update existing displays with my id + + Parameters + ---------- + obj + object to display + **kwargs + additional keyword arguments passed to update_display + """ + update_display(obj, display_id=self.display_id, **kwargs) + + +def clear_output(wait=False): + """Clear the output of the current cell receiving output. + + Parameters + ---------- + wait : bool [default: false] + Wait to clear the output until new output is available to replace it.""" + from IPython.core.interactiveshell import InteractiveShell + if InteractiveShell.initialized(): + InteractiveShell.instance().display_pub.clear_output(wait) + else: + print('\033[2K\r', end='') + sys.stdout.flush() + print('\033[2K\r', end='') + sys.stderr.flush() diff --git a/contrib/python/ipython/py3/IPython/core/displayhook.py b/contrib/python/ipython/py3/IPython/core/displayhook.py index 3c06675e86..578e783ab8 100644 --- a/contrib/python/ipython/py3/IPython/core/displayhook.py +++ b/contrib/python/ipython/py3/IPython/core/displayhook.py @@ -146,7 +146,7 @@ class DisplayHook(Configurable): MIME type representation of the object. md_dict is a :class:`dict` with the same MIME type keys of metadata associated with each output. - + """ return self.shell.display_formatter.format(result) diff --git a/contrib/python/ipython/py3/IPython/core/displaypub.py b/contrib/python/ipython/py3/IPython/core/displaypub.py index 1da0458cf0..74028ec79e 100644 --- a/contrib/python/ipython/py3/IPython/core/displaypub.py +++ b/contrib/python/ipython/py3/IPython/core/displaypub.py @@ -22,7 +22,7 @@ from traitlets.config.configurable import Configurable from traitlets import List # This used to be defined here - it is imported for backwards compatibility -from .display import publish_display_data +from .display_functions import publish_display_data #----------------------------------------------------------------------------- # Main payload class @@ -94,11 +94,11 @@ class DisplayPublisher(Configurable): the data itself. source : str, deprecated Unused. - transient: dict, keyword-only + transient : dict, keyword-only A dictionary for transient data. Data in this dictionary should not be persisted as part of saving this output. Examples include 'display_id'. - update: bool, keyword-only, default: False + update : bool, keyword-only, default: False If True, only update existing outputs with the same display_id, rather than creating a new output. """ diff --git a/contrib/python/ipython/py3/IPython/core/events.py b/contrib/python/ipython/py3/IPython/core/events.py index 1af13ca406..73fc181ae5 100644 --- a/contrib/python/ipython/py3/IPython/core/events.py +++ b/contrib/python/ipython/py3/IPython/core/events.py @@ -28,34 +28,34 @@ class EventManager(object): """ def __init__(self, shell, available_events): """Initialise the :class:`CallbackManager`. - + Parameters ---------- shell - The :class:`~IPython.core.interactiveshell.InteractiveShell` instance - available_callbacks - An iterable of names for callback events. + The :class:`~IPython.core.interactiveshell.InteractiveShell` instance + available_events + An iterable of names for callback events. """ self.shell = shell self.callbacks = {n:[] for n in available_events} def register(self, event, function): """Register a new event callback. - + Parameters ---------- event : str - The event for which to register this callback. + The event for which to register this callback. function : callable - A function to be called on the given event. It should take the same - parameters as the appropriate callback prototype. - + A function to be called on the given event. It should take the same + parameters as the appropriate callback prototype. + Raises ------ TypeError - If ``function`` is not callable. + If ``function`` is not callable. KeyError - If ``event`` is not one of the known events. + If ``event`` is not one of the known events. """ if not callable(function): raise TypeError('Need a callable, got %r' % function) @@ -80,7 +80,7 @@ class EventManager(object): def trigger(self, event, *args, **kwargs): """Call callbacks for ``event``. - + Any additional arguments are passed to all callbacks registered for this event. Exceptions raised by callbacks are caught, and a message printed. """ @@ -109,7 +109,7 @@ def _define_event(callback_function): @_define_event def pre_execute(): """Fires before code is executed in response to user/frontend action. - + This includes comm and widget messages and silent execution, as well as user code cells. """ @@ -122,14 +122,14 @@ def pre_run_cell(info): Parameters ---------- info : :class:`~IPython.core.interactiveshell.ExecutionInfo` - An object containing information used for the code execution. + An object containing information used for the code execution. """ pass @_define_event def post_execute(): """Fires after code is executed in response to user/frontend action. - + This includes comm and widget messages and silent execution, as well as user code cells. """ @@ -142,20 +142,20 @@ def post_run_cell(result): Parameters ---------- result : :class:`~IPython.core.interactiveshell.ExecutionResult` - The object which will be returned as the execution result. + The object which will be returned as the execution result. """ pass @_define_event def shell_initialized(ip): """Fires after initialisation of :class:`~IPython.core.interactiveshell.InteractiveShell`. - + This is before extensions and startup scripts are loaded, so it can only be set by subclassing. - + Parameters ---------- ip : :class:`~IPython.core.interactiveshell.InteractiveShell` - The newly initialised shell. + The newly initialised shell. """ pass diff --git a/contrib/python/ipython/py3/IPython/core/excolors.py b/contrib/python/ipython/py3/IPython/core/excolors.py index 487bde18c8..c47ce922c4 100644 --- a/contrib/python/ipython/py3/IPython/core/excolors.py +++ b/contrib/python/ipython/py3/IPython/core/excolors.py @@ -164,21 +164,3 @@ def exception_colors(): ex_colors.add_scheme(ex_colors['Linux'].copy('Neutral')) return ex_colors - -class Deprec(object): - - def __init__(self, wrapped_obj): - self.wrapped=wrapped_obj - - def __getattr__(self, name): - val = getattr(self.wrapped, name) - warnings.warn("Using ExceptionColors global is deprecated and will be removed in IPython 6.0", - DeprecationWarning, stacklevel=2) - # using getattr after warnings break ipydoctest in weird way for 3.5 - return val - -# For backwards compatibility, keep around a single global object. Note that -# this should NOT be used, the factory function should be used instead, since -# these objects are stateful and it's very easy to get strange bugs if any code -# modifies the module-level object's state. -ExceptionColors = Deprec(exception_colors()) diff --git a/contrib/python/ipython/py3/IPython/core/extensions.py b/contrib/python/ipython/py3/IPython/core/extensions.py index bf5e0ad06c..4ca266869c 100644 --- a/contrib/python/ipython/py3/IPython/core/extensions.py +++ b/contrib/python/ipython/py3/IPython/core/extensions.py @@ -19,6 +19,9 @@ from traitlets import Instance # Main class #----------------------------------------------------------------------------- +BUILTINS_EXTS = {"storemagic": False, "autoreload": False} + + class ExtensionManager(Configurable): """A class to manage IPython extensions. @@ -62,13 +65,22 @@ class ExtensionManager(Configurable): def _on_ipython_dir_changed(self, change): ensure_dir_exists(self.ipython_extension_dir) - def load_extension(self, module_str): + def load_extension(self, module_str: str): """Load an IPython extension by its module name. Returns the string "already loaded" if the extension is already loaded, "no load function" if the module doesn't have a load_ipython_extension function, or None if it succeeded. """ + try: + return self._load_extension(module_str) + except ModuleNotFoundError: + if module_str in BUILTINS_EXTS: + BUILTINS_EXTS[module_str] = True + return self._load_extension("IPython.extensions." + module_str) + raise + + def _load_extension(self, module_str: str): if module_str in self.loaded: return "already loaded" @@ -89,19 +101,21 @@ class ExtensionManager(Configurable): else: return "no load function" - def unload_extension(self, module_str): + def unload_extension(self, module_str: str): """Unload an IPython extension by its module name. This function looks up the extension's name in ``sys.modules`` and simply calls ``mod.unload_ipython_extension(self)``. - + Returns the string "no unload function" if the extension doesn't define a function to unload itself, "not loaded" if the extension isn't loaded, otherwise None. """ + if BUILTINS_EXTS.get(module_str, False) is True: + module_str = "IPython.extensions." + module_str if module_str not in self.loaded: return "not loaded" - + if module_str in sys.modules: mod = sys.modules[module_str] if self._call_unload_ipython_extension(mod): @@ -109,7 +123,7 @@ class ExtensionManager(Configurable): else: return "no unload function" - def reload_extension(self, module_str): + def reload_extension(self, module_str: str): """Reload an IPython extension by calling reload. If the module has not been loaded before, @@ -119,6 +133,9 @@ class ExtensionManager(Configurable): """ from IPython.utils.syspathcontext import prepended_to_syspath + if BUILTINS_EXTS.get(module_str, False) is True: + module_str = "IPython.extensions." + module_str + if (module_str in self.loaded) and (module_str in sys.modules): self.unload_extension(module_str) mod = sys.modules[module_str] diff --git a/contrib/python/ipython/py3/IPython/core/formatters.py b/contrib/python/ipython/py3/IPython/core/formatters.py index c13caab91a..4e0b9e455a 100644 --- a/contrib/python/ipython/py3/IPython/core/formatters.py +++ b/contrib/python/ipython/py3/IPython/core/formatters.py @@ -121,19 +121,17 @@ class DisplayFormatter(Configurable): Returns ------- (format_dict, metadata_dict) : tuple of two dicts - format_dict is a dictionary of key/value pairs, one of each format that was generated for the object. The keys are the format types, which will usually be MIME type strings and the values and JSON'able data structure containing the raw data for the representation in that format. - + metadata_dict is a dictionary of metadata about each mime-type output. Its keys will be a strict subset of the keys in format_dict. Notes ----- - If an object implement `_repr_mimebundle_` as well as various `_repr_*_`, the data returned by `_repr_mimebundle_` will take precedence and the corresponding `_repr_*_` for this mimetype will @@ -263,7 +261,7 @@ class FormatterABC(metaclass=abc.ABCMeta): def _mod_name_key(typ): """Return a (__module__, __name__) tuple for a type. - + Used as key in Formatter.deferred_printers. """ module = getattr(typ, '__module__', None) @@ -358,7 +356,7 @@ class BaseFormatter(Configurable): def _check_return(self, r, obj): """Check that a return value is appropriate - + Return the value if so, None otherwise, warning if invalid. """ if r is None or isinstance(r, self._return_type) or \ @@ -373,10 +371,10 @@ class BaseFormatter(Configurable): def lookup(self, obj): """Look up the formatter for a given instance. - + Parameters ---------- - obj : object instance + obj : object instance Returns ------- @@ -399,7 +397,7 @@ class BaseFormatter(Configurable): Parameters ---------- - typ : type or '__module__.__name__' string for a type + typ : type or '__module__.__name__' string for a type Returns ------- @@ -430,21 +428,22 @@ class BaseFormatter(Configurable): def for_type(self, typ, func=None): """Add a format function for a given type. - + Parameters ---------- typ : type or '__module__.__name__' string for a type The class of the object that will be formatted using `func`. + func : callable A callable for computing the format data. `func` will be called with the object to be formatted, and will return the raw data in this formatter's format. Subclasses may use a different call signature for the `func` argument. - + If `func` is None or not specified, there will be no change, only returning the current value. - + Returns ------- oldfunc : callable @@ -476,18 +475,20 @@ class BaseFormatter(Configurable): type_module : str The full dotted name of the module the type is defined in, like ``numpy``. + type_name : str The name of the type (the class name), like ``dtype`` + func : callable A callable for computing the format data. `func` will be called with the object to be formatted, and will return the raw data in this formatter's format. Subclasses may use a different call signature for the `func` argument. - + If `func` is None or unspecified, there will be no change, only returning the current value. - + Returns ------- oldfunc : callable @@ -636,24 +637,23 @@ class PlainTextFormatter(BaseFormatter): This parameter can be set via the '%precision' magic. """ - new = change['new'] if '%' in new: # got explicit format string fmt = new try: fmt%3.14159 - except Exception: - raise ValueError("Precision must be int or format string, not %r"%new) + except Exception as e: + raise ValueError("Precision must be int or format string, not %r"%new) from e elif new: # otherwise, should be an int try: i = int(new) assert i >= 0 - except ValueError: - raise ValueError("Precision must be int or format string, not %r"%new) - except AssertionError: - raise ValueError("int precision must be non-negative, not %r"%i) + except ValueError as e: + raise ValueError("Precision must be int or format string, not %r"%new) from e + except AssertionError as e: + raise ValueError("int precision must be non-negative, not %r"%i) from e fmt = '%%.%if'%i if 'numpy' in sys.modules: @@ -678,6 +678,11 @@ class PlainTextFormatter(BaseFormatter): def _type_printers_default(self): d = pretty._type_pprinters.copy() d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) + # if NumPy is used, set precision for its float64 type + if "numpy" in sys.modules: + import numpy + + d[numpy.float64] = lambda obj, p, cycle: p.text(self.float_format % obj) return d @default('deferred_printers') @@ -823,7 +828,7 @@ class JSONFormatter(BaseFormatter): def _check_return(self, r, obj): """Check that a return value is appropriate - + Return the value if so, None otherwise, warning if invalid. """ if r is None: @@ -832,13 +837,11 @@ class JSONFormatter(BaseFormatter): if isinstance(r, tuple): # unpack data, metadata tuple for type checking on first element r, md = r - - # handle deprecated JSON-as-string form from IPython < 3 - if isinstance(r, str): - warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", - FormatterWarning) - r = json.loads(r) - + + assert not isinstance( + r, str + ), "JSON-as-string has been deprecated since IPython < 3" + if md is not None: # put the tuple back together r = (r, md) diff --git a/contrib/python/ipython/py3/IPython/core/getipython.py b/contrib/python/ipython/py3/IPython/core/getipython.py index e6d8a4c91d..5e9b13cf3c 100644 --- a/contrib/python/ipython/py3/IPython/core/getipython.py +++ b/contrib/python/ipython/py3/IPython/core/getipython.py @@ -16,7 +16,7 @@ def get_ipython(): """Get the global InteractiveShell instance. - + Returns None if no InteractiveShell instance is registered. """ from IPython.core.interactiveshell import InteractiveShell diff --git a/contrib/python/ipython/py3/IPython/core/history.py b/contrib/python/ipython/py3/IPython/core/history.py index 98373f279c..9b0b2cbd04 100644 --- a/contrib/python/ipython/py3/IPython/core/history.py +++ b/contrib/python/ipython/py3/IPython/core/history.py @@ -6,15 +6,9 @@ import atexit import datetime -import os +from pathlib import Path import re -try: - import sqlite3 -except ImportError: - try: - from pysqlite2 import dbapi2 as sqlite3 - except ImportError: - sqlite3 = None +import sqlite3 import threading from traitlets.config.configurable import LoggingConfigurable @@ -22,10 +16,18 @@ from decorator import decorator from IPython.utils.decorators import undoc from IPython.paths import locate_profile from traitlets import ( - Any, Bool, Dict, Instance, Integer, List, Unicode, TraitError, - default, observe, + Any, + Bool, + Dict, + Instance, + Integer, + List, + Unicode, + Union, + TraitError, + default, + observe, ) -from warnings import warn #----------------------------------------------------------------------------- # Classes and functions @@ -34,42 +36,30 @@ from warnings import warn @undoc class DummyDB(object): """Dummy DB that will act as a black hole for history. - + Only used in the absence of sqlite""" def execute(*args, **kwargs): return [] - + def commit(self, *args, **kwargs): pass - + def __enter__(self, *args, **kwargs): pass - + def __exit__(self, *args, **kwargs): pass @decorator -def needs_sqlite(f, self, *a, **kw): +def only_when_enabled(f, self, *a, **kw): """Decorator: return an empty list in the absence of sqlite.""" - if sqlite3 is None or not self.enabled: + if not self.enabled: return [] else: return f(self, *a, **kw) -if sqlite3 is not None: - DatabaseError = sqlite3.DatabaseError - OperationalError = sqlite3.OperationalError -else: - @undoc - class DatabaseError(Exception): - "Dummy exception when sqlite could not be imported. Should never occur." - - @undoc - class OperationalError(Exception): - "Dummy exception when sqlite could not be imported. Should never occur." - # use 16kB as threshold for whether a corrupt history db should be saved # that should be at least 100 entries or so _SAVE_DB_SIZE = 16384 @@ -85,24 +75,25 @@ def catch_corrupt_db(f, self, *a, **kw): """ try: return f(self, *a, **kw) - except (DatabaseError, OperationalError) as e: + except (sqlite3.DatabaseError, sqlite3.OperationalError) as e: self._corrupt_db_counter += 1 self.log.error("Failed to open SQLite history %s (%s).", self.hist_file, e) if self.hist_file != ':memory:': if self._corrupt_db_counter > self._corrupt_db_limit: self.hist_file = ':memory:' self.log.error("Failed to load history too many times, history will not be saved.") - elif os.path.isfile(self.hist_file): + elif self.hist_file.is_file(): # move the file out of the way - base, ext = os.path.splitext(self.hist_file) - size = os.stat(self.hist_file).st_size + base = str(self.hist_file.parent / self.hist_file.stem) + ext = self.hist_file.suffix + size = self.hist_file.stat().st_size if size >= _SAVE_DB_SIZE: # if there's significant content, avoid clobbering now = datetime.datetime.now().isoformat().replace(':', '.') newpath = base + '-corrupt-' + now + ext # don't clobber previous corrupt backups for i in range(100): - if not os.path.isfile(newpath): + if not Path(newpath).exists(): break else: newpath = base + '-corrupt-' + now + (u'-%i' % i) + ext @@ -110,14 +101,15 @@ def catch_corrupt_db(f, self, *a, **kw): # not much content, possibly empty; don't worry about clobbering # maybe we should just delete it? newpath = base + '-corrupt' + ext - os.rename(self.hist_file, newpath) + self.hist_file.rename(newpath) self.log.error("History file was moved to %s and a new file created.", newpath) self.init_db() return [] else: # Failed with :memory:, something serious is wrong raise - + + class HistoryAccessorBase(LoggingConfigurable): """An abstract class for History Accessors """ @@ -137,7 +129,7 @@ class HistoryAccessorBase(LoggingConfigurable): class HistoryAccessor(HistoryAccessorBase): """Access the history database without adding to it. - + This is intended for use by standalone history tools. IPython shells use HistoryManager, below, which is a subclass of this.""" @@ -147,37 +139,39 @@ class HistoryAccessor(HistoryAccessorBase): _corrupt_db_limit = 2 # String holding the path to the history file - hist_file = Unicode( + hist_file = Union( + [Instance(Path), Unicode()], help="""Path to file to use for SQLite history database. - + By default, IPython will put the history database in the IPython profile directory. If you would rather share one history among profiles, you can set this value in each, so that they are consistent. - + Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. If you see IPython hanging, try setting this to something on a local disk, e.g:: - + ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite you can also use the specific value `:memory:` (including the colon at both end but not the back ticks), to avoid creating an history file. - - """).tag(config=True) - + + """, + ).tag(config=True) + enabled = Bool(True, help="""enable the SQLite history - + set enabled=False to disable the SQLite history, in which case there will be no stored history, no SQLite connection, and no background saving thread. This may be necessary in some threaded environments where IPython is embedded. - """ + """, ).tag(config=True) - + connection_options = Dict( help="""Options for configuring the SQLite connection - + These options are passed as keyword args to sqlite3.connect when establishing database connections. """ @@ -189,26 +183,24 @@ class HistoryAccessor(HistoryAccessorBase): def _db_changed(self, change): """validate the db, since it can be an Instance of two different types""" new = change['new'] - connection_types = (DummyDB,) - if sqlite3 is not None: - connection_types = (DummyDB, sqlite3.Connection) + connection_types = (DummyDB, sqlite3.Connection) if not isinstance(new, connection_types): msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \ (self.__class__.__name__, new) raise TraitError(msg) - - def __init__(self, profile='default', hist_file=u'', **traits): + + def __init__(self, profile="default", hist_file="", **traits): """Create a new history accessor. - + Parameters ---------- profile : str - The name of the profile from which to open history. + The name of the profile from which to open history. hist_file : str - Path to an SQLite history database stored by IPython. If specified, - hist_file overrides profile. + Path to an SQLite history database stored by IPython. If specified, + hist_file overrides profile. config : :class:`~traitlets.config.loader.Config` - Config object. hist_file can also be set through this. + Config object. hist_file can also be set through this. """ # We need a pointer back to the shell for various tasks. super(HistoryAccessor, self).__init__(**traits) @@ -217,53 +209,57 @@ class HistoryAccessor(HistoryAccessorBase): # set by config if hist_file: self.hist_file = hist_file - - if self.hist_file == u'': + + try: + self.hist_file + except TraitError: # No one has set the hist_file, yet. self.hist_file = self._get_hist_file_name(profile) - if sqlite3 is None and self.enabled: - warn("IPython History requires SQLite, your history will not be saved") - self.enabled = False - self.init_db() - + def _get_hist_file_name(self, profile='default'): """Find the history file for the given profile name. - + This is overridden by the HistoryManager subclass, to use the shell's active profile. - + Parameters ---------- profile : str - The name of a profile which has a history file. + The name of a profile which has a history file. """ - return os.path.join(locate_profile(profile), 'history.sqlite') - + return Path(locate_profile(profile)) / "history.sqlite" + @catch_corrupt_db def init_db(self): """Connect to the database, and create tables if necessary.""" if not self.enabled: self.db = DummyDB() return - + # use detect_types so that timestamps return datetime objects kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) kwargs.update(self.connection_options) - self.db = sqlite3.connect(self.hist_file, **kwargs) - self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer - primary key autoincrement, start timestamp, - end timestamp, num_cmds integer, remark text)""") - self.db.execute("""CREATE TABLE IF NOT EXISTS history - (session integer, line integer, source text, source_raw text, - PRIMARY KEY (session, line))""") - # Output history is optional, but ensure the table's there so it can be - # enabled later. - self.db.execute("""CREATE TABLE IF NOT EXISTS output_history - (session integer, line integer, output text, - PRIMARY KEY (session, line))""") - self.db.commit() + self.db = sqlite3.connect(str(self.hist_file), **kwargs) + with self.db: + self.db.execute( + """CREATE TABLE IF NOT EXISTS sessions (session integer + primary key autoincrement, start timestamp, + end timestamp, num_cmds integer, remark text)""" + ) + self.db.execute( + """CREATE TABLE IF NOT EXISTS history + (session integer, line integer, source text, source_raw text, + PRIMARY KEY (session, line))""" + ) + # Output history is optional, but ensure the table's there so it can be + # enabled later. + self.db.execute( + """CREATE TABLE IF NOT EXISTS output_history + (session integer, line integer, output text, + PRIMARY KEY (session, line))""" + ) # success! reset corrupt db count self._corrupt_db_counter = 0 @@ -275,17 +271,19 @@ class HistoryAccessor(HistoryAccessorBase): ## ------------------------------- ## Methods for retrieving history: ## ------------------------------- - def _run_sql(self, sql, params, raw=True, output=False): + def _run_sql(self, sql, params, raw=True, output=False, latest=False): """Prepares and runs an SQL query for the history database. Parameters ---------- sql : str - Any filtering expressions to go after SELECT ... FROM ... + Any filtering expressions to go after SELECT ... FROM ... params : tuple - Parameters passed to the SQL query (to replace "?") + Parameters passed to the SQL query (to replace "?") raw, output : bool - See :meth:`get_range` + See :meth:`get_range` + latest : bool + Select rows with max (session, line) Returns ------- @@ -296,36 +294,38 @@ class HistoryAccessor(HistoryAccessorBase): if output: sqlfrom = "history LEFT JOIN output_history USING (session, line)" toget = "history.%s, output_history.output" % toget - cur = self.db.execute("SELECT session, line, %s FROM %s " %\ - (toget, sqlfrom) + sql, params) + if latest: + toget += ", MAX(session * 128 * 1024 + line)" + this_querry = "SELECT session, line, %s FROM %s " % (toget, sqlfrom) + sql + cur = self.db.execute(this_querry, params) + if latest: + cur = (row[:-1] for row in cur) if output: # Regroup into 3-tuples, and parse JSON return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur) return cur - @needs_sqlite + @only_when_enabled @catch_corrupt_db def get_session_info(self, session): """Get info about a session. Parameters ---------- - session : int Session number to retrieve. Returns ------- - session_id : int - Session ID number + Session ID number start : datetime - Timestamp for the start of the session. + Timestamp for the start of the session. end : datetime - Timestamp for the end of the session, or None if IPython crashed. + Timestamp for the end of the session, or None if IPython crashed. num_cmds : int - Number of commands run, or None if IPython crashed. + Number of commands run, or None if IPython crashed. remark : unicode - A manually set description. + A manually set description. """ query = "SELECT * from sessions where session == ?" return self.db.execute(query, (session,)).fetchone() @@ -333,7 +333,7 @@ class HistoryAccessor(HistoryAccessorBase): @catch_corrupt_db def get_last_session_id(self): """Get the last session ID currently in the database. - + Within IPython, this should be the same as the value stored in :attr:`HistoryManager.session_number`. """ @@ -344,16 +344,21 @@ class HistoryAccessor(HistoryAccessorBase): def get_tail(self, n=10, raw=True, output=False, include_latest=False): """Get the last n lines from the history database. + Most recent entry last. + + Completion will be reordered so that that the last ones are when + possible from current session. + Parameters ---------- n : int - The number of lines to get + The number of lines to get raw, output : bool - See :meth:`get_range` + See :meth:`get_range` include_latest : bool - If False (default), n+1 lines are fetched, and the latest one - is discarded. This is intended to be used where the function - is called by a user command, which it should not return. + If False (default), n+1 lines are fetched, and the latest one + is discarded. This is intended to be used where the function + is called by a user command, which it should not return. Returns ------- @@ -362,11 +367,31 @@ class HistoryAccessor(HistoryAccessorBase): self.writeout_cache() if not include_latest: n += 1 - cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", - (n,), raw=raw, output=output) + # cursor/line/entry + this_cur = list( + self._run_sql( + "WHERE session == ? ORDER BY line DESC LIMIT ? ", + (self.session_number, n), + raw=raw, + output=output, + ) + ) + other_cur = list( + self._run_sql( + "WHERE session != ? ORDER BY session DESC, line DESC LIMIT ?", + (self.session_number, n), + raw=raw, + output=output, + ) + ) + + everything = this_cur + other_cur + + everything = everything[:n] + if not include_latest: - return reversed(list(cur)[1:]) - return reversed(list(cur)) + return list(everything)[:0:-1] + return list(everything)[::-1] @catch_corrupt_db def search(self, pattern="*", raw=True, search_raw=True, @@ -377,16 +402,16 @@ class HistoryAccessor(HistoryAccessorBase): Parameters ---------- pattern : str - The wildcarded pattern to match when searching + The wildcarded pattern to match when searching search_raw : bool - If True, search the raw input, otherwise, the parsed input + If True, search the raw input, otherwise, the parsed input raw, output : bool - See :meth:`get_range` + See :meth:`get_range` n : None or int - If an integer is given, it defines the limit of - returned entries. + If an integer is given, it defines the limit of + returned entries. unique : bool - When it is true, return only unique entries. + When it is true, return only unique entries. Returns ------- @@ -405,11 +430,11 @@ class HistoryAccessor(HistoryAccessorBase): params += (n,) elif unique: sqlform += " ORDER BY session, line" - cur = self._run_sql(sqlform, params, raw=raw, output=output) + cur = self._run_sql(sqlform, params, raw=raw, output=output, latest=unique) if n is not None: return reversed(list(cur)) return cur - + @catch_corrupt_db def get_range(self, session, start=1, stop=None, raw=True,output=False): """Retrieve input by session. @@ -434,9 +459,9 @@ class HistoryAccessor(HistoryAccessorBase): Returns ------- entries - An iterator over the desired lines. Each line is a 3-tuple, either - (session, line, input) if output is False, or - (session, line, (input, output)) if output is True. + An iterator over the desired lines. Each line is a 3-tuple, either + (session, line, input) if output is False, or + (session, line, (input, output)) if output is True. """ if stop: lineclause = "line >= ? AND line < ?" @@ -455,10 +480,13 @@ class HistoryAccessor(HistoryAccessorBase): Parameters ---------- rangestr : str - A string specifying ranges, e.g. "5 ~2/1-4". See - :func:`magic_history` for full details. + A string specifying ranges, e.g. "5 ~2/1-4". If empty string is used, + this will return everything from current session's history. + + See the documentation of :func:`%history` for the full details. + raw, output : bool - As :meth:`get_range` + As :meth:`get_range` Returns ------- @@ -486,7 +514,7 @@ class HistoryManager(HistoryAccessor): @default('dir_hist') def _dir_hist_default(self): try: - return [os.getcwd()] + return [Path.cwd()] except OSError: return [] @@ -498,7 +526,7 @@ class HistoryManager(HistoryAccessor): # The number of the current session in the history database session_number = Integer() - + db_log_output = Bool(False, help="Should the history database include output? (default: no)" ).tag(config=True) @@ -509,12 +537,12 @@ class HistoryManager(HistoryAccessor): # The input and output caches db_input_cache = List() db_output_cache = List() - + # History saving in separate thread save_thread = Instance('IPython.core.history.HistorySavingThread', allow_none=True) save_flag = Instance(threading.Event, allow_none=True) - + # Private interface # Variables used to store the three last inputs from the user. On each new # history update, we populate the user's namespace with these, shifted as @@ -538,37 +566,37 @@ class HistoryManager(HistoryAccessor): self.save_flag = threading.Event() self.db_input_cache_lock = threading.Lock() self.db_output_cache_lock = threading.Lock() - + try: self.new_session() - except OperationalError: + except sqlite3.OperationalError: self.log.error("Failed to create history session in %s. History will not be saved.", self.hist_file, exc_info=True) self.hist_file = ':memory:' - + if self.enabled and self.hist_file != ':memory:': self.save_thread = HistorySavingThread(self) self.save_thread.start() def _get_hist_file_name(self, profile=None): """Get default history file name based on the Shell's profile. - + The profile parameter is ignored, but must exist for compatibility with the parent class.""" profile_dir = self.shell.profile_dir.location - return os.path.join(profile_dir, 'history.sqlite') - - @needs_sqlite + return Path(profile_dir) / "history.sqlite" + + @only_when_enabled def new_session(self, conn=None): """Get a new session number.""" if conn is None: conn = self.db - + with conn: cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL, NULL, "") """, (datetime.datetime.now(),)) self.session_number = cur.lastrowid - + def end_session(self): """Close the database session, filling in the end time and line count.""" self.writeout_cache() @@ -577,20 +605,20 @@ class HistoryManager(HistoryAccessor): session==?""", (datetime.datetime.now(), len(self.input_hist_parsed)-1, self.session_number)) self.session_number = 0 - + def name_session(self, name): """Give the current session a name in the history database.""" with self.db: self.db.execute("UPDATE sessions SET remark=? WHERE session==?", (name, self.session_number)) - + def reset(self, new_session=True): """Clear the session history, releasing all object references, and optionally open a new session.""" self.output_hist.clear() # The directory history can't be completely empty - self.dir_hist[:] = [os.getcwd()] - + self.dir_hist[:] = [Path.cwd()] + if new_session: if self.session_number: self.end_session() @@ -606,24 +634,22 @@ class HistoryManager(HistoryAccessor): Parameters ---------- - session : int Session number to retrieve. The current session is 0, and negative numbers count back from current session, so -1 is the previous session. Returns ------- - session_id : int - Session ID number + Session ID number start : datetime - Timestamp for the start of the session. + Timestamp for the start of the session. end : datetime - Timestamp for the end of the session, or None if IPython crashed. + Timestamp for the end of the session, or None if IPython crashed. num_cmds : int - Number of commands run, or None if IPython crashed. + Number of commands run, or None if IPython crashed. remark : unicode - A manually set description. + A manually set description. """ if session <= 0: session += self.session_number @@ -634,7 +660,7 @@ class HistoryManager(HistoryAccessor): """Get input and output history from the current session. Called by get_range, and takes similar parameters.""" input_hist = self.input_hist_raw if raw else self.input_hist_parsed - + n = len(input_hist) if start < 0: start += n @@ -642,17 +668,17 @@ class HistoryManager(HistoryAccessor): stop = n elif stop < 0: stop += n - + for i in range(start, stop): if output: line = (input_hist[i], self.output_hist_reprs.get(i)) else: line = input_hist[i] yield (0, i, line) - + def get_range(self, session=0, start=1, stop=None, raw=True,output=False): """Retrieve input by session. - + Parameters ---------- session : int @@ -670,13 +696,13 @@ class HistoryManager(HistoryAccessor): objects for the current session, or text reprs from previous sessions if db_log_output was enabled at the time. Where no output is found, None is used. - + Returns ------- entries - An iterator over the desired lines. Each line is a 3-tuple, either - (session, line, input) if output is False, or - (session, line, (input, output)) if output is True. + An iterator over the desired lines. Each line is a 3-tuple, either + (session, line, input) if output is False, or + (session, line, (input, output)) if output is True. """ if session <= 0: session += self.session_number @@ -695,14 +721,12 @@ class HistoryManager(HistoryAccessor): Parameters ---------- line_num : int - The prompt number of this input. - + The prompt number of this input. source : str - Python input. - + Python input. source_raw : str, optional - If given, this is the raw input without any IPython transformations - applied to it. If not given, ``source`` is used. + If given, this is the raw input without any IPython transformations + applied to it. If not given, ``source`` is used. """ if source_raw is None: source_raw = source @@ -734,7 +758,7 @@ class HistoryManager(HistoryAccessor): '_ii': self._ii, '_iii': self._iii, new_i : self._i00 } - + if self.shell is not None: self.shell.push(to_main, interactive=False) @@ -746,7 +770,7 @@ class HistoryManager(HistoryAccessor): Parameters ---------- line_num : int - The line number from which to save outputs + The line number from which to save outputs """ if (not self.db_log_output) or (line_num not in self.output_hist_reprs): return @@ -769,7 +793,7 @@ class HistoryManager(HistoryAccessor): conn.execute("INSERT INTO output_history VALUES (?, ?, ?)", (self.session_number,)+line) - @needs_sqlite + @only_when_enabled def writeout_cache(self, conn=None): """Write any entries in the cache to the database.""" if conn is None: @@ -818,12 +842,13 @@ class HistorySavingThread(threading.Thread): self.enabled = history_manager.enabled atexit.register(self.stop) - @needs_sqlite + @only_when_enabled def run(self): # We need a separate db connection per thread: try: - self.db = sqlite3.connect(self.history_manager.hist_file, - **self.history_manager.connection_options + self.db = sqlite3.connect( + str(self.history_manager.hist_file), + **self.history_manager.connection_options, ) while True: self.history_manager.save_flag.wait() @@ -860,11 +885,18 @@ $""", re.VERBOSE) def extract_hist_ranges(ranges_str): """Turn a string of history ranges into 3-tuples of (session, start, stop). + Empty string results in a `[(0, 1, None)]`, i.e. "everything from current + session". + Examples -------- >>> list(extract_hist_ranges("~8/5-~7/4 2")) [(-8, 5, None), (-7, 1, 5), (0, 2, 3)] """ + if ranges_str == "": + yield (0, 1, None) # Everything from current session + return + for range_str in ranges_str.split(): rmatch = range_re.match(range_str) if not rmatch: diff --git a/contrib/python/ipython/py3/IPython/core/historyapp.py b/contrib/python/ipython/py3/IPython/core/historyapp.py index a6437eff26..01a55343f8 100644 --- a/contrib/python/ipython/py3/IPython/core/historyapp.py +++ b/contrib/python/ipython/py3/IPython/core/historyapp.py @@ -5,8 +5,8 @@ An application for managing IPython history. To be invoked as the `ipython history` subcommand. """ -import os import sqlite3 +from pathlib import Path from traitlets.config.application import Application from .application import BaseIPythonApplication @@ -52,8 +52,8 @@ class HistoryTrim(BaseIPythonApplication): )) def start(self): - profile_dir = self.profile_dir.location - hist_file = os.path.join(profile_dir, 'history.sqlite') + profile_dir = Path(self.profile_dir.location) + hist_file = profile_dir / "history.sqlite" con = sqlite3.connect(hist_file) # Grab the recent history from the current database. @@ -77,12 +77,12 @@ class HistoryTrim(BaseIPythonApplication): con.close() # Create the new history database. - new_hist_file = os.path.join(profile_dir, 'history.sqlite.new') + new_hist_file = profile_dir / "history.sqlite.new" i = 0 - while os.path.exists(new_hist_file): + while new_hist_file.exists(): # Make sure we don't interfere with an existing file. i += 1 - new_hist_file = os.path.join(profile_dir, 'history.sqlite.new'+str(i)) + new_hist_file = profile_dir / ("history.sqlite.new" + str(i)) new_db = sqlite3.connect(new_hist_file) new_db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer primary key autoincrement, start timestamp, @@ -106,16 +106,16 @@ class HistoryTrim(BaseIPythonApplication): if self.backup: i = 1 - backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) - while os.path.exists(backup_hist_file): + backup_hist_file = profile_dir / ("history.sqlite.old.%d" % i) + while backup_hist_file.exists(): i += 1 - backup_hist_file = os.path.join(profile_dir, 'history.sqlite.old.%d' % i) - os.rename(hist_file, backup_hist_file) + backup_hist_file = profile_dir / ("history.sqlite.old.%d" % i) + hist_file.rename(backup_hist_file) print("Backed up longer history file to", backup_hist_file) else: - os.remove(hist_file) + hist_file.unlink() - os.rename(new_hist_file, hist_file) + new_hist_file.rename(hist_file) class HistoryClear(HistoryTrim): description = clear_hist_help diff --git a/contrib/python/ipython/py3/IPython/core/hooks.py b/contrib/python/ipython/py3/IPython/core/hooks.py index fa732f7ba8..09b08d942e 100644 --- a/contrib/python/ipython/py3/IPython/core/hooks.py +++ b/contrib/python/ipython/py3/IPython/core/hooks.py @@ -44,10 +44,13 @@ from .error import TryNext # List here all the default hooks. For now it's just the editor functions # but over time we'll move here all the public API for user-accessible things. -__all__ = ['editor', 'synchronize_with_editor', - 'shutdown_hook', 'late_startup_hook', - 'show_in_pager','pre_prompt_hook', - 'pre_run_code_hook', 'clipboard_get'] +__all__ = [ + "editor", + "synchronize_with_editor", + "show_in_pager", + "pre_prompt_hook", + "clipboard_get", +] deprecated = {'pre_run_code_hook': "a callback for the 'pre_execute' or 'pre_run_cell' event", 'late_startup_hook': "a callback for the 'shell_initialized' event", @@ -132,23 +135,6 @@ class CommandChainDispatcher: return iter(self.chain) -def shutdown_hook(self): - """ default shutdown hook - - Typically, shutdown hooks should raise TryNext so all shutdown ops are done - """ - - #print "default shutdown hook ok" # dbg - return - - -def late_startup_hook(self): - """ Executed after ipython has been constructed and configured - - """ - #print "default startup hook ok" # dbg - - def show_in_pager(self, data, start, screen_lines): """ Run a string through pager """ # raising TryNext here will use the default paging functionality @@ -165,11 +151,6 @@ def pre_prompt_hook(self): return None -def pre_run_code_hook(self): - """ Executed before running the (prefiltered) code in IPython """ - return None - - def clipboard_get(self): """ Get text from the clipboard. """ diff --git a/contrib/python/ipython/py3/IPython/core/inputsplitter.py b/contrib/python/ipython/py3/IPython/core/inputsplitter.py index e7bc6e7f5a..01b9c8a5e6 100644 --- a/contrib/python/ipython/py3/IPython/core/inputsplitter.py +++ b/contrib/python/ipython/py3/IPython/core/inputsplitter.py @@ -210,7 +210,7 @@ def last_blank(src): Parameters ---------- src : string - A single or multiline string. + A single or multiline string. """ if not src: return False ll = src.splitlines()[-1] @@ -228,7 +228,7 @@ def last_two_blanks(src): Parameters ---------- src : string - A single or multiline string. + A single or multiline string. """ if not src: return False # The logic here is tricky: I couldn't get a regexp to work and pass all @@ -251,7 +251,7 @@ def remove_comments(src): Parameters ---------- src : string - A single or multiline input string. + A single or multiline input string. Returns ------- @@ -351,22 +351,22 @@ class InputSplitter(object): def check_complete(self, source): """Return whether a block of code is ready to execute, or should be continued - + This is a non-stateful API, and will reset the state of this InputSplitter. - + Parameters ---------- source : string - Python input code, which can be multiline. - + Python input code, which can be multiline. + Returns ------- status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. indent_spaces : int or None - The number of spaces by which to indent the next line of code. If - status is not 'incomplete', this is None. + The number of spaces by which to indent the next line of code. If + status is not 'incomplete', this is None. """ self.reset() try: @@ -397,15 +397,15 @@ class InputSplitter(object): Parameters ---------- lines : string - One or more lines of Python input. + One or more lines of Python input. Returns ------- is_complete : boolean - True if the current input source (the result of the current input - plus prior inputs) forms a complete Python execution block. Note that - this value is also stored as a private attribute (``_is_complete``), so it - can be queried at any time. + True if the current input source (the result of the current input + plus prior inputs) forms a complete Python execution block. Note that + this value is also stored as a private attribute (``_is_complete``), so it + can be queried at any time. """ assert isinstance(lines, str) self._store(lines) @@ -448,7 +448,7 @@ class InputSplitter(object): guess whether a block is complete or not based solely on prior and current input lines. The InputSplitter considers it has a complete interactive block and will not accept more input when either: - + * A SyntaxError is raised * The code is complete and consists of a single line or a single @@ -618,9 +618,9 @@ class IPythonInputSplitter(InputSplitter): def flush_transformers(self): def _flush(transform, outs): """yield transformed lines - + always strings, never None - + transform: the current transform outs: an iterable of previously transformed inputs. Each may be multiline, which will be passed @@ -690,15 +690,15 @@ class IPythonInputSplitter(InputSplitter): Parameters ---------- lines : string - One or more lines of Python input. + One or more lines of Python input. Returns ------- is_complete : boolean - True if the current input source (the result of the current input - plus prior inputs) forms a complete Python execution block. Note that - this value is also stored as a private attribute (_is_complete), so it - can be queried at any time. + True if the current input source (the result of the current input + plus prior inputs) forms a complete Python execution block. Note that + this value is also stored as a private attribute (_is_complete), so it + can be queried at any time. """ assert isinstance(lines, str) # We must ensure all input is pure unicode @@ -728,10 +728,10 @@ class IPythonInputSplitter(InputSplitter): def _transform_line(self, line): """Push a line of input code through the various transformers. - + Returns any output from the transformers, or None if a transformer is accumulating lines. - + Sets self.transformer_accumulating as a side effect. """ def _accumulating(dbg): diff --git a/contrib/python/ipython/py3/IPython/core/inputtransformer.py b/contrib/python/ipython/py3/IPython/core/inputtransformer.py index 14a351d40a..77f69f388f 100644 --- a/contrib/python/ipython/py3/IPython/core/inputtransformer.py +++ b/contrib/python/ipython/py3/IPython/core/inputtransformer.py @@ -46,7 +46,7 @@ class InputTransformer(metaclass=abc.ABCMeta): def push(self, line): """Send a line of input to the transformer, returning the transformed input or None if the transformer is waiting for more input. - + Must be overridden by subclasses. Implementations may raise ``SyntaxError`` if the input is invalid. No @@ -58,7 +58,7 @@ class InputTransformer(metaclass=abc.ABCMeta): def reset(self): """Return, transformed any lines that the transformer has accumulated, and reset its internal state. - + Must be overridden by subclasses. """ pass @@ -313,7 +313,7 @@ def has_comment(src): Parameters ---------- src : string - A single line input string. + A single line input string. Returns ------- @@ -325,11 +325,11 @@ def has_comment(src): def ends_in_comment_or_string(src): """Indicates whether or not an input line ends in a comment or within a multiline string. - + Parameters ---------- src : string - A single line input string. + A single line input string. Returns ------- @@ -356,7 +356,7 @@ def help_end(line): @CoroutineInputTransformer.wrap def cellmagic(end_on_blank_line=False): """Captures & transforms cell magics. - + After a cell magic is started, this stores up any lines it gets until it is reset (sent None). """ @@ -395,7 +395,7 @@ def cellmagic(end_on_blank_line=False): def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None): """Remove matching input prompts from a block of input. - + Parameters ---------- prompt_re : regular expression @@ -405,9 +405,11 @@ def _strip_prompts(prompt_re, initial_re=None, turnoff_re=None): If no initial expression is given, prompt_re will be used everywhere. Used mainly for plain Python prompts, where the continuation prompt ``...`` is a valid Python expression in Python 3, so shouldn't be stripped. - - If initial_re and prompt_re differ, - only initial_re will be tested against the first line. + + Notes + ----- + If `initial_re` and `prompt_re differ`, + only `initial_re` will be tested against the first line. If any prompt is found on the first two lines, prompts will be stripped from the rest of the block. """ @@ -473,7 +475,7 @@ def ipy_prompt(): @CoroutineInputTransformer.wrap def leading_indent(): """Remove leading indentation. - + If the first line starts with a spaces or tabs, the same whitespace will be removed from each following line until it is reset. """ diff --git a/contrib/python/ipython/py3/IPython/core/inputtransformer2.py b/contrib/python/ipython/py3/IPython/core/inputtransformer2.py index c0bb39979d..a8f676f495 100644 --- a/contrib/python/ipython/py3/IPython/core/inputtransformer2.py +++ b/contrib/python/ipython/py3/IPython/core/inputtransformer2.py @@ -15,7 +15,7 @@ import sys from codeop import CommandCompiler, Compile import re import tokenize -from typing import List, Tuple, Union +from typing import List, Tuple, Optional, Any import warnings _indent_re = re.compile(r'^[ \t]+') @@ -91,7 +91,30 @@ classic_prompt = PromptStripper( initial_re=re.compile(r'^>>>( |$)') ) -ipython_prompt = PromptStripper(re.compile(r'^(In \[\d+\]: |\s*\.{3,}: ?)')) +ipython_prompt = PromptStripper( + re.compile( + r""" + ^( # Match from the beginning of a line, either: + + # 1. First-line prompt: + ((\[nav\]|\[ins\])?\ )? # Vi editing mode prompt, if it's there + In\ # The 'In' of the prompt, with a space + \[\d+\]: # Command index, as displayed in the prompt + \ # With a mandatory trailing space + + | # ... or ... + + # 2. The three dots of the multiline prompt + \s* # All leading whitespace characters + \.{3,}: # The three (or more) dots + \ ? # With an optional trailing space + + ) + """, + re.VERBOSE, + ) +) + def cell_magic(lines): if not lines or not lines[0].startswith('%%'): @@ -105,7 +128,7 @@ def cell_magic(lines): % (magic_name, first_line, body)] -def _find_assign_op(token_line) -> Union[int, None]: +def _find_assign_op(token_line) -> Optional[int]: """Get the index of the first assignment in the line ('=' not inside brackets) Note: We don't try to support multiple special assignment (a = b = %foo) @@ -120,6 +143,7 @@ def _find_assign_op(token_line) -> Union[int, None]: elif s in {')', ']', '}'}: if paren_level > 0: paren_level -= 1 + return None def find_end_of_continued_line(lines, start_line: int): """Find the last line of a line explicitly extended using backslashes. @@ -472,10 +496,15 @@ def make_tokens_by_line(lines:List[str]): # lines or comments. This is intentional - see https://bugs.python.org/issue17061 # We want to group the former case together but split the latter, so we # track parentheses level, similar to the internals of tokenize. - NEWLINE, NL = tokenize.NEWLINE, tokenize.NL - tokens_by_line = [[]] - if len(lines) > 1 and not lines[0].endswith(('\n', '\r', '\r\n', '\x0b', '\x0c')): - warnings.warn("`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified") + + # reexported from token on 3.7+ + NEWLINE, NL = tokenize.NEWLINE, tokenize.NL # type: ignore + tokens_by_line: List[List[Any]] = [[]] + if len(lines) > 1 and not lines[0].endswith(("\n", "\r", "\r\n", "\x0b", "\x0c")): + warnings.warn( + "`make_tokens_by_line` received a list of lines which do not have lineending markers ('\\n', '\\r', '\\r\\n', '\\x0b', '\\x0c'), behavior will be unspecified", + stacklevel=2, + ) parenlev = 0 try: for token in tokenize.generate_tokens(iter(lines).__next__): @@ -499,6 +528,20 @@ def make_tokens_by_line(lines:List[str]): return tokens_by_line + +def has_sunken_brackets(tokens: List[tokenize.TokenInfo]): + """Check if the depth of brackets in the list of tokens drops below 0""" + parenlev = 0 + for token in tokens: + if token.string in {"(", "[", "{"}: + parenlev += 1 + elif token.string in {")", "]", "}"}: + parenlev -= 1 + if parenlev < 0: + return True + return False + + def show_linewise_tokens(s: str): """For investigation and debugging""" if not s.endswith('\n'): @@ -592,17 +635,17 @@ class TransformerManager: Parameters ---------- - source : string - Python input code, which can be multiline. + cell : string + Python input code, which can be multiline. Returns ------- status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. indent_spaces : int or None - The number of spaces by which to indent the next line of code. If - status is not 'incomplete', this is None. + The number of spaces by which to indent the next line of code. If + status is not 'incomplete', this is None. """ # Remember if the lines ends in a new line. ends_with_newline = False @@ -653,6 +696,15 @@ class TransformerManager: tokens_by_line = make_tokens_by_line(lines) + # Bail if we got one line and there are more closing parentheses than + # the opening ones + if ( + len(lines) == 1 + and tokens_by_line + and has_sunken_brackets(tokens_by_line[0]) + ): + return "invalid", None + if not tokens_by_line: return 'incomplete', find_last_indent(lines) @@ -660,7 +712,7 @@ class TransformerManager: # We're in a multiline string or expression return 'incomplete', find_last_indent(lines) - newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} + newline_types = {tokenize.NEWLINE, tokenize.COMMENT, tokenize.ENDMARKER} # type: ignore # Pop the last line which only contains DEDENTs and ENDMARKER last_token_line = None @@ -726,19 +778,11 @@ class MaybeAsyncCompile(Compile): self.flags |= extra_flags - if sys.version_info < (3,8): - def __call__(self, *args, **kwds): - return compile(*args, **kwds) - - class MaybeAsyncCommandCompiler(CommandCompiler): def __init__(self, extra_flags=0): self.compiler = MaybeAsyncCompile(extra_flags=extra_flags) -if (sys.version_info.major, sys.version_info.minor) >= (3, 8): - _extra_flags = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT -else: - _extra_flags = ast.PyCF_ONLY_AST +_extra_flags = ast.PyCF_ALLOW_TOP_LEVEL_AWAIT compile_command = MaybeAsyncCommandCompiler(extra_flags=_extra_flags) diff --git a/contrib/python/ipython/py3/IPython/core/interactiveshell.py b/contrib/python/ipython/py3/IPython/core/interactiveshell.py index 8fd546b847..ea9f6310ba 100644 --- a/contrib/python/ipython/py3/IPython/core/interactiveshell.py +++ b/contrib/python/ipython/py3/IPython/core/interactiveshell.py @@ -21,34 +21,53 @@ import inspect import os import re import runpy +import subprocess import sys import tempfile import traceback import types -import subprocess import warnings +from ast import stmt from io import open as io_open - +from logging import error from pathlib import Path -from pickleshare import PickleShareDB +from typing import Callable +from typing import List as ListType +from typing import Optional, Tuple +from warnings import warn +from pickleshare import PickleShareDB +from tempfile import TemporaryDirectory +from traitlets import ( + Any, + Bool, + CaselessStrEnum, + Dict, + Enum, + Instance, + Integer, + List, + Type, + Unicode, + default, + observe, + validate, +) from traitlets.config.configurable import SingletonConfigurable from traitlets.utils.importstring import import_item -from IPython.core import oinspect -from IPython.core import magic -from IPython.core import page -from IPython.core import prefilter -from IPython.core import ultratb + +import IPython.core.hooks +from IPython.core import magic, oinspect, page, prefilter, ultratb from IPython.core.alias import Alias, AliasManager from IPython.core.autocall import ExitAutocall from IPython.core.builtin_trap import BuiltinTrap -from IPython.core.events import EventManager, available_events from IPython.core.compilerop import CachingCompiler, check_linecache_ipython from IPython.core.debugger import InterruptiblePdb from IPython.core.display_trap import DisplayTrap from IPython.core.displayhook import DisplayHook from IPython.core.displaypub import DisplayPublisher from IPython.core.error import InputRejected, UsageError +from IPython.core.events import EventManager, available_events from IPython.core.extensions import ExtensionManager from IPython.core.formatters import DisplayFormatter from IPython.core.history import HistoryManager @@ -60,36 +79,19 @@ from IPython.core.prefilter import PrefilterManager from IPython.core.profiledir import ProfileDir from IPython.core.usage import default_banner from IPython.display import display +from IPython.paths import get_ipython_dir from IPython.testing.skipdoctest import skip_doctest -from IPython.utils import PyColorize -from IPython.utils import io -from IPython.utils import py3compat -from IPython.utils import openpy +from IPython.utils import PyColorize, io, openpy, py3compat from IPython.utils.decorators import undoc from IPython.utils.io import ask_yes_no from IPython.utils.ipstruct import Struct -from IPython.paths import get_ipython_dir -from IPython.utils.path import get_home_dir, get_py_filename, ensure_dir_exists -from IPython.utils.process import system, getoutput +from IPython.utils.path import ensure_dir_exists, get_home_dir, get_py_filename +from IPython.utils.process import getoutput, system from IPython.utils.strdispatch import StrDispatch from IPython.utils.syspathcontext import prepended_to_syspath -from IPython.utils.text import format_screen, LSString, SList, DollarFormatter -from IPython.utils.tempdir import TemporaryDirectory -from traitlets import ( - Integer, Bool, CaselessStrEnum, Enum, List, Dict, Unicode, Instance, Type, - observe, default, validate, Any -) -from warnings import warn -from logging import error -import IPython.core.hooks +from IPython.utils.text import DollarFormatter, LSString, SList, format_screen -from typing import List as ListType, Tuple, Optional -from ast import AST - -# NoOpContext is deprecated, but ipykernel imports it from here. -# See https://github.com/ipython/ipykernel/issues/157 -# (2016, let's try to remove than in IPython 8.0) -from IPython.utils.contexts import NoOpContext +sphinxify: Optional[Callable] try: import docrepr.sphinxify as sphx @@ -115,118 +117,25 @@ class ProvisionalWarning(DeprecationWarning): """ pass -if sys.version_info > (3,8): - from ast import Module -else : - # mock the new API, ignore second argument - # see https://github.com/ipython/ipython/issues/11590 - from ast import Module as OriginalModule - Module = lambda nodelist, type_ignores: OriginalModule(nodelist) - -if sys.version_info > (3,6): - _assign_nodes = (ast.AugAssign, ast.AnnAssign, ast.Assign) - _single_targets_nodes = (ast.AugAssign, ast.AnnAssign) -else: - _assign_nodes = (ast.AugAssign, ast.Assign ) - _single_targets_nodes = (ast.AugAssign, ) +from ast import Module + +_assign_nodes = (ast.AugAssign, ast.AnnAssign, ast.Assign) +_single_targets_nodes = (ast.AugAssign, ast.AnnAssign) #----------------------------------------------------------------------------- # Await Helpers #----------------------------------------------------------------------------- -def removed_co_newlocals(function:types.FunctionType) -> types.FunctionType: - """Return a function that do not create a new local scope. - - Given a function, create a clone of this function where the co_newlocal flag - has been removed, making this function code actually run in the sourounding - scope. - - We need this in order to run asynchronous code in user level namespace. - """ - from types import CodeType, FunctionType - CO_NEWLOCALS = 0x0002 - code = function.__code__ - new_co_flags = code.co_flags & ~CO_NEWLOCALS - if sys.version_info > (3, 8, 0, 'alpha', 3): - new_code = code.replace(co_flags=new_co_flags) - else: - new_code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - new_co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - code.co_name, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars - ) - return FunctionType(new_code, globals(), function.__name__, function.__defaults__) - - # we still need to run things using the asyncio eventloop, but there is no # async integration -from .async_helpers import (_asyncio_runner, _asyncify, _pseudo_sync_runner) -from .async_helpers import _curio_runner, _trio_runner, _should_be_async - - -def _ast_asyncify(cell:str, wrapper_name:str) -> ast.Module: - """ - Parse a cell with top-level await and modify the AST to be able to run it later. - - Parameter - --------- - - cell: str - The code cell to asyncronify - wrapper_name: str - The name of the function to be used to wrap the passed `cell`. It is - advised to **not** use a python identifier in order to not pollute the - global namespace in which the function will be ran. - - Return - ------ - - A module object AST containing **one** function named `wrapper_name`. - - The given code is wrapped in a async-def function, parsed into an AST, and - the resulting function definition AST is modified to return the last - expression. - - The last expression or await node is moved into a return statement at the - end of the function, and removed from its original location. If the last - node is not Expr or Await nothing is done. - - The function `__code__` will need to be later modified (by - ``removed_co_newlocals``) in a subsequent step to not create new `locals()` - meaning that the local and global scope are the same, ie as if the body of - the function was at module level. - - Lastly a call to `locals()` is made just before the last expression of the - function, or just after the last assignment or statement to make sure the - global dict is updated as python function work with a local fast cache which - is updated only on `local()` calls. - """ +from .async_helpers import ( + _asyncio_runner, + _curio_runner, + _pseudo_sync_runner, + _should_be_async, + _trio_runner, +) - from ast import Expr, Await, Return - if sys.version_info >= (3,8): - return ast.parse(cell) - tree = ast.parse(_asyncify(cell)) - - function_def = tree.body[0] - function_def.name = wrapper_name - try_block = function_def.body[0] - lastexpr = try_block.body[-1] - if isinstance(lastexpr, (Expr, Await)): - try_block.body[-1] = Return(lastexpr.value) - ast.fix_missing_locations(tree) - return tree #----------------------------------------------------------------------------- # Globals #----------------------------------------------------------------------------- @@ -262,13 +171,6 @@ def no_op(*a, **kw): class SpaceInInput(Exception): pass -def get_default_colors(): - "DEPRECATED" - warn('get_default_color is deprecated since IPython 5.0, and returns `Neutral` on all platforms.', - DeprecationWarning, stacklevel=2) - return 'Neutral' - - class SeparateUnicode(Unicode): r"""A Unicode subclass to validate separate_in, separate_out, etc. @@ -329,7 +231,7 @@ class ExecutionResult(object): """ execution_count = None error_before_exec = None - error_in_exec = None + error_in_exec: Optional[BaseException] = None info = None result = None @@ -551,29 +453,6 @@ class InteractiveShell(SingletonConfigurable): will be displayed as regular output instead.""" ).tag(config=True) - # deprecated prompt traits: - - prompt_in1 = Unicode('In [\\#]: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompt_in2 = Unicode(' .\\D.: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompt_out = Unicode('Out[\\#]: ', - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - prompts_pad_left = Bool(True, - help="Deprecated since IPython 4.0 and ignored since 5.0, set TerminalInteractiveShell.prompts object directly." - ).tag(config=True) - - @observe('prompt_in1', 'prompt_in2', 'prompt_out', 'prompt_pad_left') - def _prompt_trait_changed(self, change): - name = change['name'] - warn("InteractiveShell.{name} is deprecated since IPython 4.0" - " and ignored since 5.0, set TerminalInteractiveShell.prompts" - " object directly.".format(name=name)) - - # protect against weird cases where self.config may not exist: show_rewritten_input = Bool(True, help="Show rewritten input, e.g. for autocall." @@ -707,8 +586,6 @@ class InteractiveShell(SingletonConfigurable): self.init_pdb() self.init_extension_manager() self.init_payload() - self.init_deprecation_warnings() - self.hooks.late_startup_hook() self.events.trigger('shell_initialized', self) atexit.register(self.atexit_operations) @@ -775,6 +652,7 @@ class InteractiveShell(SingletonConfigurable): self.meta = Struct() # Temporary files used for various purposes. Deleted at exit. + # The files here are stored with Path from Pathlib self.tempfiles = [] self.tempdirs = [] @@ -832,16 +710,6 @@ class InteractiveShell(SingletonConfigurable): elif self.logstart: self.magic('logstart') - def init_deprecation_warnings(self): - """ - register default filter for deprecation warning. - - This will allow deprecation warning of function used interactively to show - warning to users, and still hide deprecation warning from libraries import. - """ - if sys.version_info < (3,7): - warnings.filterwarnings("default", category=DeprecationWarning, module=self.user_ns.get("__name__")) - def init_builtins(self): # A single, static flag that we set to True. Its presence indicates @@ -862,16 +730,9 @@ class InteractiveShell(SingletonConfigurable): self.object_info_string_level) def init_io(self): - # This will just use sys.stdout and sys.stderr. If you want to - # override sys.stdout and sys.stderr themselves, you need to do that - # *before* instantiating this class, because io holds onto - # references to the underlying streams. - # io.std* are deprecated, but don't show our own deprecation warnings - # during initialization of the deprecated API. - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - io.stdout = io.IOStream(sys.stdout) - io.stderr = io.IOStream(sys.stderr) + # implemented in subclasses, TerminalInteractiveShell does call + # colorama.init(). + pass def init_prompts(self): # Set system prompts, so that scripts can decide if they are running @@ -907,13 +768,42 @@ class InteractiveShell(SingletonConfigurable): # the appropriate time. self.display_trap = DisplayTrap(hook=self.displayhook) + @staticmethod + def get_path_links(p: Path): + """Gets path links including all symlinks + + Examples + -------- + In [1]: from IPython.core.interactiveshell import InteractiveShell + + In [2]: import sys, pathlib + + In [3]: paths = InteractiveShell.get_path_links(pathlib.Path(sys.executable)) + + In [4]: len(paths) == len(set(paths)) + Out[4]: True + + In [5]: bool(paths) + Out[5]: True + """ + paths = [p] + while p.is_symlink(): + new_path = Path(os.readlink(p)) + if not new_path.is_absolute(): + new_path = p.parent / new_path + p = new_path + paths.append(p) + return paths + def init_virtualenv(self): """Add the current virtualenv to sys.path so the user can import modules from it. This isn't perfect: it doesn't use the Python interpreter with which the virtualenv was built, and it ignores the --no-site-packages option. A warning will appear suggesting the user installs IPython in the virtualenv, but for many cases, it probably works well enough. + Adapted from code snippets online. + http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv """ if 'VIRTUAL_ENV' not in os.environ: @@ -930,10 +820,7 @@ class InteractiveShell(SingletonConfigurable): # stdlib venv may symlink sys.executable, so we can't use realpath. # but others can symlink *to* the venv Python, so we can't just use sys.executable. # So we just check every item in the symlink tree (generally <= 3) - paths = [p] - while p.is_symlink(): - p = Path(os.readlink(p)) - paths.append(p.resolve()) + paths = self.get_path_links(p) # In Cygwin paths like "c:\..." and '\cygdrive\c\...' are possible if p_venv.parts[1] == "cygdrive": @@ -1029,13 +916,12 @@ class InteractiveShell(SingletonConfigurable): for hook_name in hooks.__all__: # default hooks have priority 100, i.e. low; user hooks should have # 0-100 priority - self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False) + self.set_hook(hook_name, getattr(hooks, hook_name), 100) if self.display_page: self.set_hook('show_in_pager', page.as_hook(page.display_page), 90) - def set_hook(self,name,hook, priority=50, str_key=None, re_key=None, - _warn_deprecated=True): + def set_hook(self, name, hook, priority=50, str_key=None, re_key=None): """set_hook(name,hook) -> sets an internal IPython hook. IPython exposes some of its internal API as user-modifiable hooks. By @@ -1065,9 +951,13 @@ class InteractiveShell(SingletonConfigurable): print("Warning! Hook '%s' is not one of %s" % \ (name, IPython.core.hooks.__all__ )) - if _warn_deprecated and (name in IPython.core.hooks.deprecated): + if name in IPython.core.hooks.deprecated: alternative = IPython.core.hooks.deprecated[name] - warn("Hook {} is deprecated. Use {} instead.".format(name, alternative), stacklevel=2) + raise ValueError( + "Hook {} has been deprecated since IPython 5.0. Use {} instead.".format( + name, alternative + ) + ) if not dp: dp = IPython.core.hooks.CommandChainDispatcher() @@ -1091,12 +981,13 @@ class InteractiveShell(SingletonConfigurable): def register_post_execute(self, func): """DEPRECATED: Use ip.events.register('post_run_cell', func) - + Register a function for calling after code execution. """ - warn("ip.register_post_execute is deprecated, use " - "ip.events.register('post_run_cell', func) instead.", stacklevel=2) - self.events.register('post_run_cell', func) + raise ValueError( + "ip.register_post_execute is deprecated since IPython 1.0, use " + "ip.events.register('post_run_cell', func) instead." + ) def _clear_warning_registry(self): # clear the warning registry, so that different code blocks with @@ -1111,14 +1002,14 @@ class InteractiveShell(SingletonConfigurable): def new_main_mod(self, filename, modname): """Return a new 'main' module object for user code execution. - + ``filename`` should be the path of the script which will be run in the module. Requests with the same filename will get the same module, with its namespace cleared. - + ``modname`` should be the module name - normally either '__main__' or the basename of the file without the extension. - + When scripts are executed via %run, we must keep a reference to their __main__ module around so that Python doesn't clear it, rendering references to module globals useless. @@ -1154,7 +1045,6 @@ class InteractiveShell(SingletonConfigurable): Examples -------- - In [15]: import IPython In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython') @@ -1304,10 +1194,10 @@ class InteractiveShell(SingletonConfigurable): def prepare_user_module(self, user_module=None, user_ns=None): """Prepare the module and namespace in which user code will be run. - + When IPython is started normally, both parameters are None: a new module is created automatically, and its __dict__ used as the namespace. - + If only user_module is provided, its __dict__ is used as the namespace. If only user_ns is provided, a dummy module is created, and user_ns becomes the global namespace. If both are provided (as they may be @@ -1428,7 +1318,7 @@ class InteractiveShell(SingletonConfigurable): def all_ns_refs(self): """Get a list of references to all the namespace dictionaries in which IPython might store a user-created object. - + Note that this does not include the displayhook, which also caches objects from the output.""" return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \ @@ -1524,8 +1414,8 @@ class InteractiveShell(SingletonConfigurable): else: # Delete by object try: obj = self.user_ns[varname] - except KeyError: - raise NameError("name '%s' is not defined" % varname) + except KeyError as e: + raise NameError("name '%s' is not defined" % varname) from e # Also check in output history ns_refs.append(self.history_manager.output_hist) for ns in ns_refs: @@ -1555,8 +1445,8 @@ class InteractiveShell(SingletonConfigurable): if regex is not None: try: m = re.compile(regex) - except TypeError: - raise TypeError('regex must be a string or compiled pattern') + except TypeError as e: + raise TypeError('regex must be a string or compiled pattern') from e # Search for keys in each namespace that match the given regex # If a match is found, delete the key/value pair. for ns in self.all_ns_refs: @@ -1615,15 +1505,15 @@ class InteractiveShell(SingletonConfigurable): def drop_by_id(self, variables): """Remove a dict of variables from the user namespace, if they are the same as the values in the dictionary. - + This is intended for use by extensions: variables that they've added can be taken back out if they are unloaded, without removing any that the user has overwritten. - + Parameters ---------- variables : dict - A dictionary mapping object names (as strings) to the objects. + A dictionary mapping object names (as strings) to the objects. """ for name, obj in variables.items(): if name in self.user_ns and self.user_ns[name] is obj: @@ -1820,7 +1710,7 @@ class InteractiveShell(SingletonConfigurable): """Get object info as formatted text""" return self.object_inspect_mime(oname, detail_level)['text/plain'] - def object_inspect_mime(self, oname, detail_level=0): + def object_inspect_mime(self, oname, detail_level=0, omit_sections=()): """Get object info as a mimebundle of formatted representations. A mimebundle is a dictionary, keyed by mime-type. @@ -1840,6 +1730,7 @@ class InteractiveShell(SingletonConfigurable): info=info, detail_level=detail_level, formatter=docformat, + omit_sections=omit_sections, ) else: raise KeyError(oname) @@ -1892,7 +1783,6 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - exc_tuple : tuple of exception classes A *tuple* of exception classes, for which to call the defined handler. It is very important that you use a tuple, and NOT A @@ -1922,10 +1812,11 @@ class InteractiveShell(SingletonConfigurable): Notes ----- - WARNING: by putting in your own exception handler into IPython's main execution loop, you run a very good chance of nasty crashes. This - facility should only be used if you really know what you are doing.""" + facility should only be used if you really know what you are doing. + """ + if not isinstance(exc_tuple, tuple): raise TypeError("The custom exceptions must be given as a tuple.") @@ -1937,10 +1828,10 @@ class InteractiveShell(SingletonConfigurable): def validate_stb(stb): """validate structured traceback return type - + return type of CustomTB *should* be a list of strings, but allow single strings or None, which are harmless. - + This function will *always* return a list of strings, and will raise a TypeError if stb is inappropriate. """ @@ -2095,10 +1986,19 @@ class InteractiveShell(SingletonConfigurable): # Exception classes can customise their traceback - we # use this in IPython.parallel for exceptions occurring # in the engines. This should return a list of strings. - stb = value._render_traceback_() + if hasattr(value, "_render_traceback_"): + stb = value._render_traceback_() + else: + stb = self.InteractiveTB.structured_traceback( + etype, value, tb, tb_offset=tb_offset + ) + except Exception: - stb = self.InteractiveTB.structured_traceback(etype, - value, tb, tb_offset=tb_offset) + print( + "Unexpected exception formatting exception. Falling back to standard exception" + ) + traceback.print_exc() + return None self._showtraceback(etype, value, stb) if self.call_pdb: @@ -2135,7 +2035,7 @@ class InteractiveShell(SingletonConfigurable): If the syntax error occurred when running a compiled code (i.e. running_compile_code=True), longer stack trace will be displayed. - """ + """ etype, value, last_traceback = self._get_exc_info() if filename and issubclass(etype, SyntaxError): @@ -2160,19 +2060,6 @@ class InteractiveShell(SingletonConfigurable): the %paste magic.""" self.showsyntaxerror() - #------------------------------------------------------------------------- - # Things related to readline - #------------------------------------------------------------------------- - - def init_readline(self): - """DEPRECATED - - Moved to terminal subclass, here only to simplify the init logic.""" - # Set a number of methods that depend on readline to be no-op - warnings.warn('`init_readline` is no-op since IPython 5.0 and is Deprecated', - DeprecationWarning, stacklevel=2) - self.set_custom_completer = no_op - @skip_doctest def set_next_input(self, s, replace=False): """ Sets the 'default' input string for the next command line. @@ -2201,8 +2088,12 @@ class InteractiveShell(SingletonConfigurable): (typically over the network by remote frontends). """ from IPython.core.completer import IPCompleter - from IPython.core.completerlib import (module_completer, - magic_run_completer, cd_completer, reset_completer) + from IPython.core.completerlib import ( + cd_completer, + magic_run_completer, + module_completer, + reset_completer, + ) self.Completer = IPCompleter(shell=self, namespace=self.user_ns, @@ -2229,26 +2120,24 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - - text : string - A string of text to be completed on. It can be given as empty and - instead a line/position pair are given. In this case, the - completer itself will split the line like readline does. - - line : string, optional - The complete line that text is part of. - - cursor_pos : int, optional - The position of the cursor on the input line. + text : string + A string of text to be completed on. It can be given as empty and + instead a line/position pair are given. In this case, the + completer itself will split the line like readline does. + line : string, optional + The complete line that text is part of. + cursor_pos : int, optional + The position of the cursor on the input line. Returns ------- - text : string + text : string The actual text that was completed. - - matches : list + matches : list A sorted list with all possible completions. + Notes + ----- The optional arguments allow the completion to take more context into account, and are part of the low-level completion API. @@ -2257,8 +2146,8 @@ class InteractiveShell(SingletonConfigurable): exposing it as a method, it can be used by other non-readline environments (such as GUIs) for text completion. - Simple usage example: - + Examples + -------- In [1]: x = 'hello' In [2]: _ip.complete('x.l') @@ -2341,7 +2230,7 @@ class InteractiveShell(SingletonConfigurable): func, magic_kind=magic_kind, magic_name=magic_name ) - def _find_with_lazy_load(self, type_, magic_name: str): + def _find_with_lazy_load(self, /, type_, magic_name: str): """ Try to find a magic potentially lazy-loading it. @@ -2374,14 +2263,12 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- magic_name : str - Name of the desired magic function, without '%' prefix. - + Name of the desired magic function, without '%' prefix. line : str - The rest of the input line as a single string. - + The rest of the input line as a single string. _stack_depth : int - If run_line_magic() is called from magic() then _stack_depth=2. - This is added to ensure backward compatibility for use of 'get_ipython().magic()' + If run_line_magic() is called from magic() then _stack_depth=2. + This is added to ensure backward compatibility for use of 'get_ipython().magic()' """ fn = self._find_with_lazy_load("line", magic_name) if fn is None: @@ -2423,7 +2310,7 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- stack_depth : int - Depth relative to calling frame + Depth relative to calling frame """ return sys._getframe(stack_depth + 1).f_locals @@ -2433,13 +2320,11 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- magic_name : str - Name of the desired magic function, without '%' prefix. - + Name of the desired magic function, without '%' prefix. line : str - The rest of the first input line as a single string. - + The rest of the first input line as a single string. cell : str - The body of the cell as a (possibly multiline) string. + The body of the cell as a (possibly multiline) string. """ fn = self._find_with_lazy_load("cell", magic_name) if fn is None: @@ -2491,7 +2376,11 @@ class InteractiveShell(SingletonConfigurable): return self.magics_manager.magics[magic_kind].get(magic_name) def magic(self, arg_s): - """DEPRECATED. Use run_line_magic() instead. + """ + DEPRECATED + + Deprecated since IPython 0.13 (warning added in + 8.1), use run_line_magic(magic_name, parameter_s). Call a magic function by name. @@ -2509,6 +2398,12 @@ class InteractiveShell(SingletonConfigurable): valid Python code you can type at the interpreter, including loops and compound statements. """ + warnings.warn( + "`magic(...)` is deprecated since IPython 0.13 (warning added in " + "8.1), use run_line_magic(magic_name, parameter_s).", + DeprecationWarning, + stacklevel=2, + ) # TODO: should we issue a loud deprecation warning here? magic_name, _, magic_arg_s = arg_s.partition(' ') magic_name = magic_name.lstrip(prefilter.ESC_MAGIC) @@ -2548,9 +2443,9 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- cmd : str - Command to execute (can not end in '&', as background processes are - not supported. Should not be a command that expects input - other than simple text. + Command to execute (can not end in '&', as background processes are + not supported. Should not be a command that expects input + other than simple text. """ if cmd.rstrip().endswith('&'): # this is *far* from a rigorous test @@ -2572,9 +2467,21 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- cmd : str - Command to execute. + Command to execute. """ cmd = self.var_expand(cmd, depth=1) + # warn if there is an IPython magic alternative. + main_cmd = cmd.split()[0] + has_magic_alternatives = ("pip", "conda", "cd") + + if main_cmd in has_magic_alternatives: + warnings.warn( + ( + "You executed the system command !{0} which may not work " + "as expected. Try the IPython magic %{0} instead." + ).format(main_cmd) + ) + # protect os.system from UNC paths on Windows, which it can't handle: if sys.platform == 'win32': from IPython.utils._process_win32 import AvoidUNCPath @@ -2623,18 +2530,18 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- cmd : str - Command to execute (can not end in '&', as background processes are - not supported. + Command to execute (can not end in '&', as background processes are + not supported. split : bool, optional - If True, split the output into an IPython SList. Otherwise, an - IPython LSString is returned. These are objects similar to normal - lists and strings, with a few convenience attributes for easier - manipulation of line-based output. You can use '?' on them for - details. + If True, split the output into an IPython SList. Otherwise, an + IPython LSString is returned. These are objects similar to normal + lists and strings, with a few convenience attributes for easier + manipulation of line-based output. You can use '?' on them for + details. depth : int, optional - How many frames above the caller are the local variables which should - be expanded in the command string? The default (0) assumes that the - expansion variables are in the stack frame calling this function. + How many frames above the caller are the local variables which should + be expanded in the command string? The default (0) assumes that the + expansion variables are in the stack frame calling this function. """ if cmd.rstrip().endswith('&'): # this is *far* from a rigorous test @@ -2717,10 +2624,10 @@ class InteractiveShell(SingletonConfigurable): stb = self.InteractiveTB.get_exception_only(etype, evalue) exc_info = { - u'status' : 'error', - u'traceback' : stb, - u'ename' : etype.__name__, - u'evalue' : py3compat.safe_unicode(evalue), + "status": "error", + "traceback": stb, + "ename": etype.__name__, + "evalue": py3compat.safe_unicode(evalue), } return exc_info @@ -2745,9 +2652,9 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- expressions : dict - A dict with string keys and string values. The expression values - should be valid Python expressions, each of which will be evaluated - in the user namespace. + A dict with string keys and string values. The expression values + should be valid Python expressions, each of which will be evaluated + in the user namespace. Returns ------- @@ -2794,7 +2701,7 @@ class InteractiveShell(SingletonConfigurable): ---------- fname : string The name of the file to be executed. - where : tuple + *where : tuple One or two namespaces, passed to execfile() as (globals,locals). If only one is given, it is passed as both. exit_ignore : bool (False) @@ -2809,11 +2716,11 @@ class InteractiveShell(SingletonConfigurable): __future__ imports are not shared in either direction. """ - fname = os.path.abspath(os.path.expanduser(fname)) + fname = Path(fname).expanduser().resolve() # Make sure we can open the file try: - with open(fname): + with fname.open("rb"): pass except: warn('Could not open file <%s> for safe execution.' % fname) @@ -2822,7 +2729,7 @@ class InteractiveShell(SingletonConfigurable): # Find things also in current directory. This is needed to mimic the # behavior of running a script from the system command line, where # Python inserts the script's directory into sys.path - dname = os.path.dirname(fname) + dname = str(fname.parent) with prepended_to_syspath(dname), self.builtin_trap: try: @@ -2867,11 +2774,11 @@ class InteractiveShell(SingletonConfigurable): raise_exceptions : bool (False) If True raise exceptions everywhere. Meant for testing. """ - fname = os.path.abspath(os.path.expanduser(fname)) + fname = Path(fname).expanduser().resolve() # Make sure we can open the file try: - with open(fname): + with fname.open("rb"): pass except: warn('Could not open file <%s> for safe execution.' % fname) @@ -2880,11 +2787,11 @@ class InteractiveShell(SingletonConfigurable): # Find things also in current directory. This is needed to mimic the # behavior of running a script from the system command line, where # Python inserts the script's directory into sys.path - dname = os.path.dirname(fname) - + dname = str(fname.parent) + def get_cells(): """generator for sequence of code blocks to run""" - if fname.endswith('.ipynb'): + if fname.suffix == ".ipynb": from nbformat import read nb = read(fname, as_version=4) if not nb.cells: @@ -2893,8 +2800,7 @@ class InteractiveShell(SingletonConfigurable): if cell.cell_type == 'code': yield cell.source else: - with open(fname) as f: - yield f.read() + yield fname.read_text(encoding="utf-8") with prepended_to_syspath(dname): try: @@ -2951,19 +2857,19 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- raw_cell : str - The code (including IPython code such as %magic functions) to run. + The code (including IPython code such as %magic functions) to run. store_history : bool - If True, the raw and translated cell will be stored in IPython's - history. For user code calling back into IPython's machinery, this - should be set to False. + If True, the raw and translated cell will be stored in IPython's + history. For user code calling back into IPython's machinery, this + should be set to False. silent : bool - If True, avoid side-effects, such as implicit displayhooks and - and logging. silent=True forces store_history=False. + If True, avoid side-effects, such as implicit displayhooks and + and logging. silent=True forces store_history=False. shell_futures : bool - If True, the code will share future statements with the interactive - shell. It will both be affected by previous __future__ imports, and - any __future__ imports in the code will affect the shell. If False, - __future__ imports are not shared in either direction. + If True, the code will share future statements with the interactive + shell. It will both be affected by previous __future__ imports, and + any __future__ imports in the code will affect the shell. If False, + __future__ imports are not shared in either direction. Returns ------- @@ -3035,7 +2941,6 @@ class InteractiveShell(SingletonConfigurable): result.error_in_exec = e self.showtraceback(running_compiled_code=True) return result - return def should_run_async( self, raw_cell: str, *, transformed_cell=None, preprocessing_exc_tuple=None @@ -3044,14 +2949,13 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - raw_cell: str + raw_cell : str The code to be executed Returns ------- result: bool Whether the code needs to be run with a coroutine runner or not - .. versionadded:: 7.0 """ if not self.autoawait: @@ -3175,9 +3079,8 @@ class InteractiveShell(SingletonConfigurable): cell = raw_cell # Store raw and processed history - if store_history: - self.history_manager.store_inputs(self.execution_count, - cell, raw_cell) + if store_history and raw_cell.strip(" %") != "paste": + self.history_manager.store_inputs(self.execution_count, cell, raw_cell) if not silent: self.logger.log(cell, raw_cell) @@ -3196,35 +3099,12 @@ class InteractiveShell(SingletonConfigurable): _run_async = False with self.builtin_trap: - cell_name = self.compile.cache( - cell, self.execution_count, raw_code=raw_cell - ) + cell_name = compiler.cache(cell, self.execution_count, raw_code=raw_cell) with self.display_trap: # Compile to bytecode try: - if sys.version_info < (3,8) and self.autoawait: - if _should_be_async(cell): - # the code AST below will not be user code: we wrap it - # in an `async def`. This will likely make some AST - # transformer below miss some transform opportunity and - # introduce a small coupling to run_code (in which we - # bake some assumptions of what _ast_asyncify returns. - # they are ways around (like grafting part of the ast - # later: - # - Here, return code_ast.body[0].body[1:-1], as well - # as last expression in return statement which is - # the user code part. - # - Let it go through the AST transformers, and graft - # - it back after the AST transform - # But that seem unreasonable, at least while we - # do not need it. - code_ast = _ast_asyncify(cell, 'async-def-wrapper') - _run_async = True - else: - code_ast = compiler.ast_parse(cell, filename=cell_name) - else: - code_ast = compiler.ast_parse(cell, filename=cell_name) + code_ast = compiler.ast_parse(cell, filename=cell_name) except self.custom_exceptions as e: etype, value, tb = sys.exc_info() self.CustomTB(etype, value, tb) @@ -3250,8 +3130,6 @@ class InteractiveShell(SingletonConfigurable): # Execute the user code interactivity = "none" if silent else self.ast_node_interactivity - if _run_async: - interactivity = 'async' has_raised = await self.run_ast_nodes(code_ast.body, cell_name, interactivity=interactivity, compiler=compiler, result=result) @@ -3308,8 +3186,8 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- node : ast.Node - The root node to be transformed. Typically called with the ast.Module - produced by parsing user input. + The root node to be transformed. Typically called with the ast.Module + produced by parsing user input. Returns ------- @@ -3356,8 +3234,14 @@ class InteractiveShell(SingletonConfigurable): return code return code.replace(co_name="<cell line: %s>" % (first_real_line,)) - async def run_ast_nodes(self, nodelist:ListType[AST], cell_name:str, interactivity='last_expr', - compiler=compile, result=None): + async def run_ast_nodes( + self, + nodelist: ListType[stmt], + cell_name: str, + interactivity="last_expr", + compiler=compile, + result=None, + ): """Run a sequence of AST nodes. The execution mode depends on the interactivity parameter. @@ -3377,11 +3261,6 @@ class InteractiveShell(SingletonConfigurable): or the last assignment. Other values for this parameter will raise a ValueError. - Experimental value: 'async' Will try to run top level interactive - async/await code in default runner, this will not respect the - interactivity setting and will only run the last node if it is an - expression. - compiler : callable A function with the same interface as the built-in compile(), to turn the AST nodes into code objects. Default is the built-in compile(). @@ -3396,6 +3275,7 @@ class InteractiveShell(SingletonConfigurable): if not nodelist: return + if interactivity == 'last_expr_or_assign': if isinstance(nodelist[-1], _assign_nodes): asg = nodelist[-1] @@ -3424,53 +3304,38 @@ class InteractiveShell(SingletonConfigurable): to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:] elif interactivity == 'all': to_run_exec, to_run_interactive = [], nodelist - elif interactivity == 'async': - to_run_exec, to_run_interactive = [], nodelist - _async = True else: raise ValueError("Interactivity was %r" % interactivity) try: - if _async and sys.version_info > (3,8): - raise ValueError("This branch should never happen on Python 3.8 and above, " - "please try to upgrade IPython and open a bug report with your case.") - if _async: - # If interactivity is async the semantics of run_code are - # completely different Skip usual machinery. - mod = Module(nodelist, []) - async_wrapper_code = compiler(mod, cell_name, 'exec') - exec(async_wrapper_code, self.user_global_ns, self.user_ns) - async_code = removed_co_newlocals(self.user_ns.pop('async-def-wrapper')).__code__ - if (await self.run_code(async_code, result, async_=True)): + + def compare(code): + is_async = inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE + return is_async + + # refactor that to just change the mod constructor. + to_run = [] + for node in to_run_exec: + to_run.append((node, "exec")) + + for node in to_run_interactive: + to_run.append((node, "single")) + + for node, mode in to_run: + if mode == "exec": + mod = Module([node], []) + elif mode == "single": + mod = ast.Interactive([node]) + with compiler.extra_flags( + getattr(ast, "PyCF_ALLOW_TOP_LEVEL_AWAIT", 0x0) + if self.autoawait + else 0x0 + ): + code = compiler(mod, cell_name, mode) + code = self._update_code_co_name(code) + asy = compare(code) + if await self.run_code(code, result, async_=asy): return True - else: - if sys.version_info > (3, 8): - def compare(code): - is_async = (inspect.CO_COROUTINE & code.co_flags == inspect.CO_COROUTINE) - return is_async - else: - def compare(code): - return _async - - # refactor that to just change the mod constructor. - to_run = [] - for node in to_run_exec: - to_run.append((node, 'exec')) - - for node in to_run_interactive: - to_run.append((node, 'single')) - - for node,mode in to_run: - if mode == 'exec': - mod = Module([node], []) - elif mode == 'single': - mod = ast.Interactive([node]) - with compiler.extra_flags(getattr(ast, 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 0x0) if self.autoawait else 0x0): - code = compiler(mod, cell_name, mode) - code = self._update_code_co_name(code) - asy = compare(code) - if (await self.run_code(code, result, async_=asy)): - return True # Flush softspace if softspace(sys.stdout, 0): @@ -3493,21 +3358,6 @@ class InteractiveShell(SingletonConfigurable): return False - def _async_exec(self, code_obj: types.CodeType, user_ns: dict): - """ - Evaluate an asynchronous code object using a code runner - - Fake asynchronous execution of code_object in a namespace via a proxy namespace. - - Returns coroutine object, which can be executed via async loop runner - - WARNING: The semantics of `async_exec` are quite different from `exec`, - in particular you can only pass a single namespace. It also return a - handle to the value of the last things returned by code_object. - """ - - return eval(code_obj, user_ns) - async def run_code(self, code_obj, result=None, *, async_=False): """Execute a code object. @@ -3541,12 +3391,7 @@ class InteractiveShell(SingletonConfigurable): outflag = True # happens in more places, so it's easier as default try: try: - self.hooks.pre_run_code_hook() - if async_ and sys.version_info < (3,8): - last_expr = (await self._async_exec(code_obj, self.user_ns)) - code = compile('last_expr', 'fake', "single") - exec(code, {'last_expr': last_expr}) - elif async_ : + if async_: await eval(code_obj, self.user_global_ns, self.user_ns) else: exec(code_obj, self.user_global_ns, self.user_ns) @@ -3579,17 +3424,17 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - source : string - Python input code, which can be multiline. + code : string + Python input code, which can be multiline. Returns ------- status : str - One of 'complete', 'incomplete', or 'invalid' if source is not a - prefix of valid code. + One of 'complete', 'incomplete', or 'invalid' if source is not a + prefix of valid code. indent : str - When status is 'incomplete', this is some whitespace to insert on - the next line of the prompt. + When status is 'incomplete', this is some whitespace to insert on + the next line of the prompt. """ status, nspaces = self.input_transformer_manager.check_complete(code) return status, ' ' * (nspaces or 0) @@ -3616,16 +3461,17 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- gui : optional, string - If given, dictates the choice of matplotlib GUI backend to use - (should be one of IPython's supported backends, 'qt', 'osx', 'tk', - 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by - matplotlib (as dictated by the matplotlib build-time options plus the - user's matplotlibrc configuration file). Note that not all backends - make sense in all contexts, for example a terminal ipython can't - display figures inline. + If given, dictates the choice of matplotlib GUI backend to use + (should be one of IPython's supported backends, 'qt', 'osx', 'tk', + 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by + matplotlib (as dictated by the matplotlib build-time options plus the + user's matplotlibrc configuration file). Note that not all backends + make sense in all contexts, for example a terminal ipython can't + display figures inline. """ - from IPython.core import pylabtools as pt from matplotlib_inline.backend_inline import configure_inline_support + + from IPython.core import pylabtools as pt gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select) if gui != 'inline': @@ -3662,18 +3508,18 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- gui : optional, string - If given, dictates the choice of matplotlib GUI backend to use - (should be one of IPython's supported backends, 'qt', 'osx', 'tk', - 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by - matplotlib (as dictated by the matplotlib build-time options plus the - user's matplotlibrc configuration file). Note that not all backends - make sense in all contexts, for example a terminal ipython can't - display figures inline. + If given, dictates the choice of matplotlib GUI backend to use + (should be one of IPython's supported backends, 'qt', 'osx', 'tk', + 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by + matplotlib (as dictated by the matplotlib build-time options plus the + user's matplotlibrc configuration file). Note that not all backends + make sense in all contexts, for example a terminal ipython can't + display figures inline. import_all : optional, bool, default: True - Whether to do `from numpy import *` and `from pylab import *` - in addition to module imports. + Whether to do `from numpy import *` and `from pylab import *` + in addition to module imports. welcome_message : deprecated - This argument is ignored, no welcome message will be displayed. + This argument is ignored, no welcome message will be displayed. """ from IPython.core.pylabtools import import_pylab @@ -3738,32 +3584,19 @@ class InteractiveShell(SingletonConfigurable): - data(None): if data is given, it gets written out to the temp file immediately, and the file is closed again.""" - dirname = tempfile.mkdtemp(prefix=prefix) - self.tempdirs.append(dirname) + dir_path = Path(tempfile.mkdtemp(prefix=prefix)) + self.tempdirs.append(dir_path) - handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname) + handle, filename = tempfile.mkstemp(".py", prefix, dir=str(dir_path)) os.close(handle) # On Windows, there can only be one open handle on a file - self.tempfiles.append(filename) + + file_path = Path(filename) + self.tempfiles.append(file_path) if data: - with open(filename, 'w') as tmp_file: - tmp_file.write(data) + file_path.write_text(data, encoding="utf-8") return filename - @undoc - def write(self,data): - """DEPRECATED: Write a string to the default output""" - warn('InteractiveShell.write() is deprecated, use sys.stdout instead', - DeprecationWarning, stacklevel=2) - sys.stdout.write(data) - - @undoc - def write_err(self,data): - """DEPRECATED: Write a string to the default error output""" - warn('InteractiveShell.write_err() is deprecated, use sys.stderr instead', - DeprecationWarning, stacklevel=2) - sys.stderr.write(data) - def ask_yes_no(self, prompt, default=None, interrupt=None): if self.quiet: return True @@ -3778,26 +3611,37 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - range_str : string + range_str : str The set of slices is given as a string, like "~5/6-~4/2 4:8 9", since this function is for use by magic functions which get their arguments as strings. The number before the / is the session number: ~n goes n back from the current session. + If empty string is given, returns history of current session + without the last input. + raw : bool, optional By default, the processed input is used. If this is true, the raw input history is used instead. Notes ----- - Slices can be described with two notations: * ``N:M`` -> standard python form, means including items N...(M-1). * ``N-M`` -> include items N..M (closed endpoint). """ lines = self.history_manager.get_range_by_str(range_str, raw=raw) - return "\n".join(x for _, _, x in lines) + text = "\n".join(x for _, _, x in lines) + + # Skip the last line, as it's probably the magic that called this + if not range_str: + if "\n" not in text: + text = "" + else: + text = text[: text.rfind("\n")] + + return text def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False): """Get a code string from history, file, url, or a string or macro. @@ -3806,26 +3650,26 @@ class InteractiveShell(SingletonConfigurable): Parameters ---------- - target : str + A string specifying code to retrieve. This will be tried respectively + as: ranges of input history (see %history for syntax), url, + corresponding .py file, filename, or an expression evaluating to a + string or Macro in the user namespace. - A string specifying code to retrieve. This will be tried respectively - as: ranges of input history (see %history for syntax), url, - corresponding .py file, filename, or an expression evaluating to a - string or Macro in the user namespace. + If empty string is given, returns complete history of current + session, without the last line. raw : bool - If true (default), retrieve raw history. Has no effect on the other - retrieval mechanisms. + If true (default), retrieve raw history. Has no effect on the other + retrieval mechanisms. py_only : bool (default False) - Only try to fetch python code, do not try alternative methods to decode file - if unicode fails. + Only try to fetch python code, do not try alternative methods to decode file + if unicode fails. Returns ------- A string of code. - ValueError is raised if nothing is found, and TypeError if it evaluates to an object of another type. In each case, .args[0] is a printable message. @@ -3836,13 +3680,13 @@ class InteractiveShell(SingletonConfigurable): try: if target.startswith(('http://', 'https://')): return openpy.read_py_url(target, skip_encoding_cookie=skip_encoding_cookie) - except UnicodeDecodeError: + except UnicodeDecodeError as e: if not py_only : # Deferred import from urllib.request import urlopen response = urlopen(target) return response.read().decode('latin1') - raise ValueError(("'%s' seem to be unreadable.") % target) + raise ValueError(("'%s' seem to be unreadable.") % target) from e potential_target = [target] try : @@ -3854,11 +3698,11 @@ class InteractiveShell(SingletonConfigurable): if os.path.isfile(tgt): # Read file try : return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie) - except UnicodeDecodeError : + except UnicodeDecodeError as e: if not py_only : with io_open(tgt,'r', encoding='latin1') as f : return f.read() - raise ValueError(("'%s' seem to be unreadable.") % target) + raise ValueError(("'%s' seem to be unreadable.") % target) from e elif os.path.isdir(os.path.expanduser(tgt)): raise ValueError("'%s' is a directory, not a regular file." % target) @@ -3870,9 +3714,9 @@ class InteractiveShell(SingletonConfigurable): try: # User namespace codeobj = eval(target, self.user_ns) - except Exception: + except Exception as e: raise ValueError(("'%s' was not found in history, as a file, url, " - "nor in the user namespace.") % target) + "nor in the user namespace.") % target) from e if isinstance(codeobj, str): return codeobj @@ -3882,6 +3726,22 @@ class InteractiveShell(SingletonConfigurable): raise TypeError("%s is neither a string nor a macro." % target, codeobj) + def _atexit_once(self): + """ + At exist operation that need to be called at most once. + Second call to this function per instance will do nothing. + """ + + if not getattr(self, "_atexit_once_called", False): + self._atexit_once_called = True + # Clear all user namespaces to release all references cleanly. + self.reset(new_session=False) + # Close the history session (this stores the end time and line count) + # this must be *before* the tempfile cleanup, in case of temporary + # history db + self.history_manager.end_session() + self.history_manager = None + #------------------------------------------------------------------------- # Things related to IPython exiting #------------------------------------------------------------------------- @@ -3896,29 +3756,28 @@ class InteractiveShell(SingletonConfigurable): code that has the appropriate information, rather than trying to clutter """ - # Close the history session (this stores the end time and line count) - # this must be *before* the tempfile cleanup, in case of temporary - # history db - self.history_manager.end_session() + self._atexit_once() # Cleanup all tempfiles and folders left around for tfile in self.tempfiles: try: - os.unlink(tfile) - except OSError: + tfile.unlink() + self.tempfiles.remove(tfile) + except FileNotFoundError: pass - + del self.tempfiles for tdir in self.tempdirs: try: - os.rmdir(tdir) - except OSError: + tdir.rmdir() + self.tempdirs.remove(tdir) + except FileNotFoundError: pass + del self.tempdirs - # Clear all user namespaces to release all references cleanly. - self.reset(new_session=False) - - # Run user hooks - self.hooks.shutdown_hook() + # Restore user's cursor + if hasattr(self, "editing_mode") and self.editing_mode == "vi": + sys.stdout.write("\x1b[0 q") + sys.stdout.flush() def cleanup(self): self.restore_sys_module_state() diff --git a/contrib/python/ipython/py3/IPython/core/magic.py b/contrib/python/ipython/py3/IPython/core/magic.py index b41a651f50..cedba61937 100644 --- a/contrib/python/ipython/py3/IPython/core/magic.py +++ b/contrib/python/ipython/py3/IPython/core/magic.py @@ -20,7 +20,6 @@ from traitlets.config.configurable import Configurable from . import oinspect from .error import UsageError from .inputtransformer2 import ESC_MAGIC, ESC_MAGIC2 -from decorator import decorator from ..utils.ipstruct import Struct from ..utils.process import arg_split from ..utils.text import dedent @@ -115,16 +114,13 @@ def record_magic(dct, magic_kind, magic_name, func): Parameters ---------- dct : dict - A dictionary with 'line' and 'cell' subdicts. - + A dictionary with 'line' and 'cell' subdicts. magic_kind : str - Kind of magic to be stored. - + Kind of magic to be stored. magic_name : str - Key to store the magic as. - + Key to store the magic as. func : function - Callable object to store. + Callable object to store. """ if magic_kind == 'line_cell': dct['line'][magic_name] = dct['cell'][magic_name] = func @@ -184,20 +180,18 @@ def _method_magic_marker(magic_kind): # This is a closure to capture the magic_kind. We could also use a class, # but it's overkill for just that one bit of state. def magic_deco(arg): - call = lambda f, *a, **k: f(*a, **k) - if callable(arg): # "Naked" decorator call (just @foo, no args) func = arg name = func.__name__ - retval = decorator(call, func) + retval = arg record_magic(magics, magic_kind, name, name) elif isinstance(arg, str): # Decorator called with arguments (@foo('bar')) name = arg def mark(func, *a, **kw): record_magic(magics, magic_kind, name, func.__name__) - return decorator(call, func) + return func retval = mark else: raise TypeError("Decorator can only be called with " @@ -217,8 +211,6 @@ def _function_magic_marker(magic_kind): # This is a closure to capture the magic_kind. We could also use a class, # but it's overkill for just that one bit of state. def magic_deco(arg): - call = lambda f, *a, **k: f(*a, **k) - # Find get_ipython() in the caller's namespace caller = sys._getframe(1) for ns in ['f_locals', 'f_globals', 'f_builtins']: @@ -236,13 +228,13 @@ def _function_magic_marker(magic_kind): func = arg name = func.__name__ ip.register_magic_function(func, magic_kind, name) - retval = decorator(call, func) + retval = arg elif isinstance(arg, str): # Decorator called with arguments (@foo('bar')) name = arg def mark(func, *a, **kw): ip.register_magic_function(func, magic_kind, name) - return decorator(call, func) + return func retval = mark else: raise TypeError("Decorator can only be called with " @@ -423,7 +415,7 @@ class MagicsManager(Configurable): def register(self, *magic_objects): """Register one or more instances of Magics. - Take one or more classes or instances of classes that subclass the main + Take one or more classes or instances of classes that subclass the main `core.Magic` class, and register them with IPython to use the magic functions they provide. The registration process will then ensure that any methods that have decorated to provide line and/or cell magics will @@ -438,7 +430,7 @@ class MagicsManager(Configurable): Parameters ---------- - magic_objects : one or more classes or instances + *magic_objects : one or more classes or instances """ # Start by validating them to ensure they have all had their magic # methods registered at the instance level @@ -461,7 +453,7 @@ class MagicsManager(Configurable): This will create an IPython magic (line, cell or both) from a standalone function. The functions should have the following - signatures: + signatures: * For line magics: `def f(line)` * For cell magics: `def f(line, cell)` @@ -473,14 +465,12 @@ class MagicsManager(Configurable): Parameters ---------- func : callable - Function to be registered as a magic. - + Function to be registered as a magic. magic_kind : str - Kind of magic, one of 'line', 'cell' or 'line_cell' - + Kind of magic, one of 'line', 'cell' or 'line_cell' magic_name : optional str - If given, the name the magic will have in the IPython namespace. By - default, the name of the function itself is used. + If given, the name the magic will have in the IPython namespace. By + default, the name of the function itself is used. """ # Create the new method in the user_magics and register it in the @@ -501,13 +491,11 @@ class MagicsManager(Configurable): Parameters ---------- alias_name : str - The name of the magic to be registered. - + The name of the magic to be registered. magic_name : str - The name of an existing magic. - + The name of an existing magic. magic_kind : str - Kind of magic, one of 'line' or 'cell' + Kind of magic, one of 'line' or 'cell' """ # `validate_type` is too permissive, as it allows 'line_cell' @@ -631,25 +619,20 @@ class Magics(Configurable): Parameters ---------- - arg_str : str - The arguments to parse. - + The arguments to parse. opt_str : str - The options specification. - + The options specification. mode : str, default 'string' - If given as 'list', the argument string is returned as a list (split - on whitespace) instead of a string. - + If given as 'list', the argument string is returned as a list (split + on whitespace) instead of a string. list_all : bool, default False - Put all option values in lists. Normally only options - appearing more than once are put in a list. - + Put all option values in lists. Normally only options + appearing more than once are put in a list. posix : bool, default True - Whether to split the input line in POSIX mode or not, as per the - conventions outlined in the :mod:`shlex` module from the standard - library. + Whether to split the input line in POSIX mode or not, as per the + conventions outlined in the :mod:`shlex` module from the standard + library. """ # inject default options at the beginning of the input line @@ -664,6 +647,9 @@ class Magics(Configurable): posix = kw.get('posix', os.name == 'posix') strict = kw.get('strict', True) + preserve_non_opts = kw.get("preserve_non_opts", False) + remainder_arg_str = arg_str + # Check if we have more than one argument to warrant extra processing: odict = {} # Dictionary with options args = arg_str.split() @@ -675,10 +661,18 @@ class Magics(Configurable): try: opts,args = getopt(argv, opt_str, long_opts) except GetoptError as e: - raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str, - " ".join(long_opts))) - for o,a in opts: - if o.startswith('--'): + raise UsageError( + '%s ( allowed: "%s" %s)' % (e.msg, opt_str, " ".join(long_opts)) + ) from e + for o, a in opts: + if mode == "string" and preserve_non_opts: + # remove option-parts from the original args-string and preserve remaining-part. + # This relies on the arg_split(...) and getopt(...)'s impl spec, that the parsed options are + # returned in the original order. + remainder_arg_str = remainder_arg_str.replace(o, "", 1).replace( + a, "", 1 + ) + if o.startswith("--"): o = o[2:] else: o = o[1:] @@ -695,7 +689,10 @@ class Magics(Configurable): # Prepare opts,args for return opts = Struct(odict) if mode == 'string': - args = ' '.join(args) + if preserve_non_opts: + args = remainder_arg_str.lstrip() + else: + args = " ".join(args) return opts,args diff --git a/contrib/python/ipython/py3/IPython/core/magic_arguments.py b/contrib/python/ipython/py3/IPython/core/magic_arguments.py index 9231609572..568abd82ae 100644 --- a/contrib/python/ipython/py3/IPython/core/magic_arguments.py +++ b/contrib/python/ipython/py3/IPython/core/magic_arguments.py @@ -37,6 +37,38 @@ arguments:: -o OPTION, --option OPTION An optional argument. +Here is an elaborated example that uses default parameters in `argument` and calls the `args` in the cell magic:: + + from IPython.core.magic import register_cell_magic + from IPython.core.magic_arguments import (argument, magic_arguments, + parse_argstring) + + + @magic_arguments() + @argument( + "--option", + "-o", + help=("Add an option here"), + ) + @argument( + "--style", + "-s", + default="foo", + help=("Add some style arguments"), + ) + @register_cell_magic + def my_cell_magic(line, cell): + args = parse_argstring(my_cell_magic, line) + print(f"{args.option=}") + print(f"{args.style=}") + print(f"{cell=}") + +In a jupyter notebook, this cell magic can be executed like this:: + + %%my_cell_magic -o Hello + print("bar") + i = 42 + Inheritance diagram: .. inheritance-diagram:: IPython.core.magic_arguments diff --git a/contrib/python/ipython/py3/IPython/core/magics/auto.py b/contrib/python/ipython/py3/IPython/core/magics/auto.py index a18542f43d..56aa4f72eb 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/auto.py +++ b/contrib/python/ipython/py3/IPython/core/magics/auto.py @@ -104,16 +104,32 @@ class AutoMagics(Magics): # all-random (note for auto-testing) """ + valid_modes = { + 0: "Off", + 1: "Smart", + 2: "Full", + } + + def errorMessage() -> str: + error = "Valid modes: " + for k, v in valid_modes.items(): + error += str(k) + "->" + v + ", " + error = error[:-2] # remove tailing `, ` after last element + return error + if parameter_s: + if not parameter_s in map(str, valid_modes.keys()): + error(errorMessage()) + return arg = int(parameter_s) else: arg = 'toggle' - if not arg in (0, 1, 2, 'toggle'): - error('Valid modes: (0->Off, 1->Smart, 2->Full') + if not arg in (*list(valid_modes.keys()), "toggle"): + error(errorMessage()) return - if arg in (0, 1, 2): + if arg in (valid_modes.keys()): self.shell.autocall = arg else: # toggle if self.shell.autocall: @@ -125,4 +141,4 @@ class AutoMagics(Magics): except AttributeError: self.shell.autocall = self._magic_state.autocall_save = 1 - print("Automatic calling is:",['OFF','Smart','Full'][self.shell.autocall]) + print("Automatic calling is:", list(valid_modes.values())[self.shell.autocall]) diff --git a/contrib/python/ipython/py3/IPython/core/magics/basic.py b/contrib/python/ipython/py3/IPython/core/magics/basic.py index 72cfc80414..c1f6945110 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/basic.py +++ b/contrib/python/ipython/py3/IPython/core/magics/basic.py @@ -4,6 +4,7 @@ import argparse from logging import error import io +import os from pprint import pformat import sys from warnings import warn @@ -45,7 +46,7 @@ class MagicsDisplay(object): def _jsonable(self): """turn magics dict into jsonable dict of the same structure - + replaces object instances with their class names as strings """ magic_dict = {} @@ -74,6 +75,7 @@ class BasicMagics(Magics): These are various magics that don't fit into specific categories but that are all part of the base 'IPython experience'.""" + @skip_doctest @magic_arguments.magic_arguments() @magic_arguments.argument( '-l', '--line', action='store_true', @@ -122,7 +124,7 @@ class BasicMagics(Magics): In [6]: %whereami Out[6]: u'/home/testuser' - + In [7]: %alias_magic h history "-p -l 30" --line Created `%h` as an alias for `%history -l 30`. """ @@ -366,7 +368,7 @@ Currently the magic system has the following functions:""", If called without arguments, acts as a toggle. - When in verbose mode the value --show (and --hide) + When in verbose mode the value --show (and --hide) will respectively show (or hide) frames with ``__tracebackhide__ = True`` value set. """ @@ -560,10 +562,6 @@ Currently the magic system has the following functions:""", @magic_arguments.magic_arguments() @magic_arguments.argument( - '-e', '--export', action='store_true', default=False, - help=argparse.SUPPRESS - ) - @magic_arguments.argument( 'filename', type=str, help='Notebook name or filename' ) @@ -573,11 +571,9 @@ Currently the magic system has the following functions:""", This function can export the current IPython history to a notebook file. For example, to export the history to "foo.ipynb" do "%notebook foo.ipynb". - - The -e or --export flag is deprecated in IPython 5.2, and will be - removed in the future. """ args = magic_arguments.parse_argstring(self.notebook, s) + outfname = os.path.expanduser(args.filename) from nbformat import write, v4 @@ -591,7 +587,7 @@ Currently the magic system has the following functions:""", source=source )) nb = v4.new_notebook(cells=cells) - with io.open(args.filename, 'w', encoding='utf-8') as f: + with io.open(outfname, "w", encoding="utf-8") as f: write(nb, f, version=4) @magics_class @@ -622,12 +618,11 @@ class AsyncMagics(BasicMagics): If the passed parameter does not match any of the above and is a python identifier, get said object from user namespace and set it as the - runner, and activate autoawait. + runner, and activate autoawait. If the object is a fully qualified object name, attempt to import it and set it as the runner, and activate autoawait. - - + The exact behavior of autoawait is experimental and subject to change across version of IPython and Python. """ diff --git a/contrib/python/ipython/py3/IPython/core/magics/code.py b/contrib/python/ipython/py3/IPython/core/magics/code.py index d446d35ac6..65ba52b8bb 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/code.py +++ b/contrib/python/ipython/py3/IPython/core/magics/code.py @@ -22,6 +22,7 @@ import ast from itertools import chain from urllib.request import Request, urlopen from urllib.parse import urlencode +from pathlib import Path # Our own packages from IPython.core.error import TryNext, StdinNotImplementedError, UsageError @@ -184,7 +185,7 @@ class CodeMagics(Magics): """Save a set of lines or a macro to a given filename. Usage:\\ - %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ... + %save [options] filename [history] Options: @@ -198,9 +199,12 @@ class CodeMagics(Magics): -a: append to the file instead of overwriting it. - This function uses the same syntax as %history for input ranges, + The history argument uses the same syntax as %history for input ranges, then saves the lines to the filename you specify. + If no ranges are specified, saves history of the current session up to + this point. + It adds a '.py' extension to the file if you don't do so yourself, and it asks for confirmation before overwriting existing files. @@ -218,6 +222,7 @@ class CodeMagics(Magics): fname, codefrom = args[0], " ".join(args[1:]) if not fname.endswith(('.py','.ipy')): fname += ext + fname = os.path.expanduser(fname) file_exists = os.path.isfile(fname) if file_exists and not force and not append: try: @@ -253,6 +258,9 @@ class CodeMagics(Magics): The argument can be an input history range, a filename, or the name of a string or macro. + If no arguments are given, uploads the history of this session up to + this point. + Options: -d: Pass a custom description. The default will say @@ -314,6 +322,9 @@ class CodeMagics(Magics): where source can be a filename, URL, input history range, macro, or element in the user namespace + If no arguments are given, loads the history of this session up to this + point. + Options: -r <lines>: Specify lines or ranges of lines to load from the source. @@ -332,6 +343,7 @@ class CodeMagics(Magics): confirmation before loading source with more than 200 000 characters, unless -y flag is passed or if the frontend does not support raw_input:: + %load %load myscript.py %load 7-27 %load myMacro @@ -343,13 +355,7 @@ class CodeMagics(Magics): %load -n my_module.wonder_function """ opts,args = self.parse_options(arg_s,'yns:r:') - - if not args: - raise UsageError('Missing filename, URL, input history range, ' - 'macro, or element in the user namespace.') - search_ns = 'n' in opts - contents = self.shell.find_user_code(args, search_ns=search_ns) if 's' in opts: @@ -460,10 +466,10 @@ class CodeMagics(Magics): return (None, None, None) use_temp = False - except DataIsObject: + except DataIsObject as e: # macros have a special edit function if isinstance(data, Macro): - raise MacroToEdit(data) + raise MacroToEdit(data) from e # For objects, try to edit the file where they are defined filename = find_file(data) @@ -487,8 +493,8 @@ class CodeMagics(Magics): m = ipython_input_pat.match(os.path.basename(filename)) if m: - raise InteractivelyDefined(int(m.groups()[0])) - + raise InteractivelyDefined(int(m.groups()[0])) from e + datafile = 1 if filename is None: filename = make_filename(args) @@ -532,8 +538,7 @@ class CodeMagics(Magics): self.shell.hooks.editor(filename) # and make a new macro object, to replace the old one - with open(filename) as mfile: - mvalue = mfile.read() + mvalue = Path(filename).read_text(encoding="utf-8") self.shell.user_ns[mname] = Macro(mvalue) @skip_doctest @@ -708,20 +713,22 @@ class CodeMagics(Magics): # do actual editing here print('Editing...', end=' ') sys.stdout.flush() + filepath = Path(filename) try: - # Quote filenames that may have spaces in them - if ' ' in filename: - filename = "'%s'" % filename - self.shell.hooks.editor(filename,lineno) + # Quote filenames that may have spaces in them when opening + # the editor + quoted = filename = str(filepath.absolute()) + if " " in quoted: + quoted = "'%s'" % quoted + self.shell.hooks.editor(quoted, lineno) except TryNext: warn('Could not open editor') return # XXX TODO: should this be generalized for all string vars? # For now, this is special-cased to blocks created by cpaste - if args.strip() == 'pasted_block': - with open(filename, 'r') as f: - self.shell.user_ns['pasted_block'] = f.read() + if args.strip() == "pasted_block": + self.shell.user_ns["pasted_block"] = filepath.read_text(encoding="utf-8") if 'x' in opts: # -x prevents actual execution print() @@ -729,10 +736,9 @@ class CodeMagics(Magics): print('done. Executing edited code...') with preserve_keys(self.shell.user_ns, '__file__'): if not is_temp: - self.shell.user_ns['__file__'] = filename - if 'r' in opts: # Untranslated IPython code - with open(filename, 'r') as f: - source = f.read() + self.shell.user_ns["__file__"] = filename + if "r" in opts: # Untranslated IPython code + source = filepath.read_text(encoding="utf-8") self.shell.run_cell(source, store_history=False) else: self.shell.safe_execfile(filename, self.shell.user_ns, @@ -740,10 +746,9 @@ class CodeMagics(Magics): if is_temp: try: - with open(filename) as f: - return f.read() + return filepath.read_text(encoding="utf-8") except IOError as msg: - if msg.filename == filename: + if Path(msg.filename) == filepath: warn('File not found. Did you forget to save?') return else: diff --git a/contrib/python/ipython/py3/IPython/core/magics/config.py b/contrib/python/ipython/py3/IPython/core/magics/config.py index 97b13df02e..c1387b601b 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/config.py +++ b/contrib/python/ipython/py3/IPython/core/magics/config.py @@ -54,44 +54,73 @@ class ConfigMagics(Magics): In [1]: %config Available objects for config: - TerminalInteractiveShell - HistoryManager - PrefilterManager AliasManager - IPCompleter DisplayFormatter + HistoryManager + IPCompleter + LoggingMagics + MagicsManager + OSMagics + PrefilterManager + ScriptMagics + TerminalInteractiveShell To view what is configurable on a given class, just pass the class name:: In [2]: %config IPCompleter - IPCompleter options - ----------------- - IPCompleter.omit__names=<Enum> - Current: 2 - Choices: (0, 1, 2) - Instruct the completer to omit private method names - Specifically, when completing on ``object.<tab>``. - When 2 [default]: all names that start with '_' will be excluded. - When 1: all 'magic' names (``__foo__``) will be excluded. - When 0: nothing will be excluded. - IPCompleter.merge_completions=<CBool> + IPCompleter(Completer) options + ---------------------------- + IPCompleter.backslash_combining_completions=<Bool> + Enable unicode completions, e.g. \\alpha<tab> . Includes completion of latex + commands, unicode names, and expanding unicode characters back to latex + commands. Current: True - Whether to merge completion results into a single list - If False, only the completion results from the first non-empty - completer will be returned. - IPCompleter.limit_to__all__=<CBool> + IPCompleter.debug=<Bool> + Enable debug for the Completer. Mostly print extra information for + experimental jedi integration. + Current: False + IPCompleter.greedy=<Bool> + Activate greedy completion + PENDING DEPRECATION. this is now mostly taken care of with Jedi. + This will enable completion on elements of lists, results of function calls, etc., + but can be unsafe because the code is actually evaluated on TAB. Current: False + IPCompleter.jedi_compute_type_timeout=<Int> + Experimental: restrict time (in milliseconds) during which Jedi can compute types. + Set to 0 to stop computing types. Non-zero value lower than 100ms may hurt + performance by preventing jedi to build its cache. + Current: 400 + IPCompleter.limit_to__all__=<Bool> + DEPRECATED as of version 5.0. Instruct the completer to use __all__ for the completion Specifically, when completing on ``object.<tab>``. When True: only those names in obj.__all__ will be included. When False [default]: the __all__ attribute is ignored - IPCompleter.greedy=<CBool> Current: False - Activate greedy completion - This will enable completion on elements of lists, results of - function calls, etc., but can be unsafe because the code is - actually evaluated on TAB. + IPCompleter.merge_completions=<Bool> + Whether to merge completion results into a single list + If False, only the completion results from the first non-empty + completer will be returned. + Current: True + IPCompleter.omit__names=<Enum> + Instruct the completer to omit private method names + Specifically, when completing on ``object.<tab>``. + When 2 [default]: all names that start with '_' will be excluded. + When 1: all 'magic' names (``__foo__``) will be excluded. + When 0: nothing will be excluded. + Choices: any of [0, 1, 2] + Current: 2 + IPCompleter.profile_completions=<Bool> + If True, emit profiling data for completion subsystem using cProfile. + Current: False + IPCompleter.profiler_output_dir=<Unicode> + Template for path at which to output profile data for completions. + Current: '.completion_profiles' + IPCompleter.use_jedi=<Bool> + Experimental: Use Jedi to generate autocompletions. Default to True if jedi + is installed. + Current: True but the real use is in setting values:: @@ -118,7 +147,7 @@ class ConfigMagics(Magics): # print available configurable names print("Available objects for config:") for name in classnames: - print(" ", name) + print(" ", name) return elif line in classnames: # `%config TerminalInteractiveShell` will print trait info for @@ -149,7 +178,7 @@ class ConfigMagics(Magics): # leave quotes on args when splitting, because we want # unquoted args to eval in user_ns cfg = Config() - exec("cfg."+line, locals(), self.shell.user_ns) + exec("cfg."+line, self.shell.user_ns, locals()) for configurable in configurables: try: diff --git a/contrib/python/ipython/py3/IPython/core/magics/display.py b/contrib/python/ipython/py3/IPython/core/magics/display.py index 0785394471..6c0eff6884 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/display.py +++ b/contrib/python/ipython/py3/IPython/core/magics/display.py @@ -12,7 +12,7 @@ #----------------------------------------------------------------------------- # Our own packages -from IPython.core.display import display, Javascript, Latex, SVG, HTML, Markdown +from IPython.display import display, Javascript, Latex, SVG, HTML, Markdown from IPython.core.magic import ( Magics, magics_class, cell_magic ) @@ -36,22 +36,33 @@ class DisplayMagics(Magics): """Run the cell block of Javascript code Alias of `%%javascript` + + Starting with IPython 8.0 %%javascript is pending deprecation to be replaced + by a more flexible system + + Please See https://github.com/ipython/ipython/issues/13376 """ self.javascript(line, cell) @cell_magic def javascript(self, line, cell): - """Run the cell block of Javascript code""" + """Run the cell block of Javascript code + + Starting with IPython 8.0 %%javascript is pending deprecation to be replaced + by a more flexible system + + Please See https://github.com/ipython/ipython/issues/13376 + """ display(Javascript(cell)) @cell_magic def latex(self, line, cell): - """Render the cell as a block of latex + """Render the cell as a block of LaTeX - The subset of latex which is support depends on the implementation in + The subset of LaTeX which is supported depends on the implementation in the client. In the Jupyter Notebook, this magic only renders the subset - of latex defined by MathJax + of LaTeX defined by MathJax [here](https://docs.mathjax.org/en/v2.5-latest/tex.html).""" display(Latex(cell)) diff --git a/contrib/python/ipython/py3/IPython/core/magics/execution.py b/contrib/python/ipython/py3/IPython/core/magics/execution.py index 6b651939f8..da7f780b9c 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/execution.py +++ b/contrib/python/ipython/py3/IPython/core/magics/execution.py @@ -8,55 +8,44 @@ import ast import bdb import builtins as builtin_mod +import cProfile as profile import gc import itertools +import math import os +import pstats +import re import shlex import sys import time import timeit -import math -import re +from ast import Module +from io import StringIO +from logging import error +from pathlib import Path from pdb import Restart +from warnings import warn -# cProfile was added in Python2.5 -try: - import cProfile as profile - import pstats -except ImportError: - # profile isn't bundled by default in Debian for license reasons - try: - import profile, pstats - except ImportError: - profile = pstats = None - -from IPython.core import oinspect -from IPython.core import magic_arguments -from IPython.core import page +from IPython.core import magic_arguments, oinspect, page from IPython.core.error import UsageError from IPython.core.macro import Macro -from IPython.core.magic import (Magics, magics_class, line_magic, cell_magic, - line_cell_magic, on_off, needs_local_scope, - no_var_expand) +from IPython.core.magic import ( + Magics, + cell_magic, + line_cell_magic, + line_magic, + magics_class, + needs_local_scope, + no_var_expand, + on_off, +) from IPython.testing.skipdoctest import skip_doctest -from IPython.utils.contexts import preserve_keys from IPython.utils.capture import capture_output +from IPython.utils.contexts import preserve_keys from IPython.utils.ipstruct import Struct from IPython.utils.module_paths import find_mod from IPython.utils.path import get_py_filename, shellglob from IPython.utils.timing import clock, clock2 -from warnings import warn -from logging import error -from io import StringIO - -if sys.version_info > (3,8): - from ast import Module -else : - # mock the new API, ignore second argument - # see https://github.com/ipython/ipython/issues/11590 - from ast import Module as OriginalModule - Module = lambda nodelist, type_ignores: OriginalModule(nodelist) - #----------------------------------------------------------------------------- # Magic implementation classes @@ -103,17 +92,15 @@ class TimeitResult(object): pm = u'\xb1' except: pass - return ( - u"{mean} {pm} {std} per loop (mean {pm} std. dev. of {runs} run{run_plural}, {loops} loop{loop_plural} each)" - .format( - pm = pm, - runs = self.repeat, - loops = self.loops, - loop_plural = "" if self.loops == 1 else "s", - run_plural = "" if self.repeat == 1 else "s", - mean = _format_time(self.average, self._precision), - std = _format_time(self.stdev, self._precision)) - ) + return "{mean} {pm} {std} per loop (mean {pm} std. dev. of {runs} run{run_plural}, {loops:,} loop{loop_plural} each)".format( + pm=pm, + runs=self.repeat, + loops=self.loops, + loop_plural="" if self.loops == 1 else "s", + run_plural="" if self.repeat == 1 else "s", + mean=_format_time(self.average, self._precision), + std=_format_time(self.stdev, self._precision), + ) def _repr_pretty_(self, p , cycle): unic = self.__str__() @@ -181,17 +168,9 @@ class ExecutionMagics(Magics): def __init__(self, shell): super(ExecutionMagics, self).__init__(shell) - if profile is None: - self.prun = self.profile_missing_notice # Default execution function used to actually run user code. self.default_runner = None - def profile_missing_notice(self, *args, **kwargs): - error("""\ -The profile module could not be found. It has been removed from the standard -python packages because of its non-free license. To use profiling, install the -python-profiler package from non-free.""") - @skip_doctest @no_var_expand @line_cell_magic @@ -375,18 +354,22 @@ python-profiler package from non-free.""") text_file = opts.T[0] if dump_file: prof.dump_stats(dump_file) - print('\n*** Profile stats marshalled to file',\ - repr(dump_file)+'.',sys_exit) + print( + f"\n*** Profile stats marshalled to file {repr(dump_file)}.{sys_exit}" + ) if text_file: - with open(text_file, 'w') as pfile: - pfile.write(output) - print('\n*** Profile printout saved to text file',\ - repr(text_file)+'.',sys_exit) + pfile = Path(text_file) + pfile.touch(exist_ok=True) + pfile.write_text(output, encoding="utf-8") + + print( + f"\n*** Profile printout saved to text file {repr(text_file)}.{sys_exit}" + ) if 'r' in opts: return stats - else: - return None + + return None @line_magic def pdb(self, parameter_s=''): @@ -423,7 +406,6 @@ python-profiler package from non-free.""") self.shell.call_pdb = new_pdb print('Automatic pdb calling has been turned',on_off(new_pdb)) - @skip_doctest @magic_arguments.magic_arguments() @magic_arguments.argument('--breakpoint', '-b', metavar='FILE:LINE', help=""" @@ -529,7 +511,7 @@ python-profiler package from non-free.""") """Run the named file inside IPython as a program. Usage:: - + %run [-n -i -e -G] [( -t [-N<N>] | -d [-b<N>] | -p [profile options] )] ( -m mod | filename ) [args] @@ -570,7 +552,7 @@ python-profiler package from non-free.""") *two* back slashes (e.g. ``\\\\*``) to suppress expansions. To completely disable these expansions, you can use -G flag. - On Windows systems, the use of single quotes `'` when specifying + On Windows systems, the use of single quotes `'` when specifying a file is not supported. Use double quotes `"`. Options: @@ -712,9 +694,9 @@ python-profiler package from non-free.""") fpath = None # initialize to make sure fpath is in scope later fpath = arg_lst[0] filename = file_finder(fpath) - except IndexError: + except IndexError as e: msg = 'you must provide at least a filename.' - raise Exception(msg) + raise Exception(msg) from e except IOError as e: try: msg = str(e) @@ -722,7 +704,7 @@ python-profiler package from non-free.""") msg = e.message if os.name == 'nt' and re.match(r"^'.*'$",fpath): warn('For Windows, use double quotes to wrap a filename: %run "mypath\\myfile.py"') - raise Exception(msg) + raise Exception(msg) from e except TypeError: if fpath in sys.meta_path: filename = "" @@ -751,7 +733,7 @@ python-profiler package from non-free.""") sys.argv = [filename] + args # put in the proper filename if 'n' in opts: - name = os.path.splitext(os.path.basename(filename))[0] + name = Path(filename).stem else: name = '__main__' @@ -1085,7 +1067,6 @@ python-profiler package from non-free.""") In [6]: %timeit -n1 time.sleep(2) - The times reported by %timeit will be slightly higher than those reported by the timeit.py script when variables are accessed. This is due to the fact that %timeit executes the statement in the namespace @@ -1094,8 +1075,9 @@ python-profiler package from non-free.""") does not matter as long as results from timeit.py are not mixed with those from %timeit.""" - opts, stmt = self.parse_options(line,'n:r:tcp:qo', - posix=False, strict=False) + opts, stmt = self.parse_options( + line, "n:r:tcp:qo", posix=False, strict=False, preserve_non_opts=True + ) if stmt == "" and cell is None: return @@ -1218,7 +1200,7 @@ python-profiler package from non-free.""") The CPU and wall clock times are printed, and the value of the expression (if any) is returned. Note that under Win32, system time is always reported as 0, since it can not be measured. - + This function can be used both as a line and cell magic: - In line mode you can time a single-line statement (though multiple @@ -1255,7 +1237,6 @@ python-profiler package from non-free.""") CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s Wall time: 0.00 - .. note:: The time needed by Python to compile the given expression will be reported if it is more than 0.1s. @@ -1345,19 +1326,22 @@ python-profiler package from non-free.""") wall_end = wtime() # Compute actual times and report - wall_time = wall_end-wall_st - cpu_user = end[0]-st[0] - cpu_sys = end[1]-st[1] - cpu_tot = cpu_user+cpu_sys - # On windows cpu_sys is always zero, so no new information to the next print - if sys.platform != 'win32': - print("CPU times: user %s, sys: %s, total: %s" % \ - (_format_time(cpu_user),_format_time(cpu_sys),_format_time(cpu_tot))) - print("Wall time: %s" % _format_time(wall_time)) + wall_time = wall_end - wall_st + cpu_user = end[0] - st[0] + cpu_sys = end[1] - st[1] + cpu_tot = cpu_user + cpu_sys + # On windows cpu_sys is always zero, so only total is displayed + if sys.platform != "win32": + print( + f"CPU times: user {_format_time(cpu_user)}, sys: {_format_time(cpu_sys)}, total: {_format_time(cpu_tot)}" + ) + else: + print(f"CPU times: total: {_format_time(cpu_tot)}") + print(f"Wall time: {_format_time(wall_time)}") if tc > tc_min: - print("Compiler : %s" % _format_time(tc)) + print(f"Compiler : {_format_time(tc)}") if tp > tp_min: - print("Parser : %s" % _format_time(tp)) + print(f"Parser : {_format_time(tp)}") return out @skip_doctest diff --git a/contrib/python/ipython/py3/IPython/core/magics/extension.py b/contrib/python/ipython/py3/IPython/core/magics/extension.py index ba93b3be75..2bc76b2d55 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/extension.py +++ b/contrib/python/ipython/py3/IPython/core/magics/extension.py @@ -41,7 +41,7 @@ class ExtensionMagics(Magics): @line_magic def unload_ext(self, module_str): """Unload an IPython extension by its module name. - + Not all extensions can be unloaded, only those which define an ``unload_ipython_extension`` function. """ diff --git a/contrib/python/ipython/py3/IPython/core/magics/history.py b/contrib/python/ipython/py3/IPython/core/magics/history.py index 5af09e5ce1..faa4335faa 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/history.py +++ b/contrib/python/ipython/py3/IPython/core/magics/history.py @@ -16,6 +16,7 @@ import os import sys from io import open as io_open +import fnmatch # Our own packages from IPython.core.error import StdinNotImplementedError @@ -104,7 +105,7 @@ class HistoryMagics(Magics): By default, all input history from the current session is displayed. Ranges of history can be indicated using the syntax: - + ``4`` Line 4, current session ``4-6`` @@ -116,7 +117,7 @@ class HistoryMagics(Magics): ``~8/1-~6/5`` From the first line of 8 sessions ago, to the fifth line of 6 sessions ago. - + Multiple ranges can be entered, separated by spaces The same syntax is used by %macro, %save, %edit, %rerun @@ -150,6 +151,7 @@ class HistoryMagics(Magics): # We don't want to close stdout at the end! close_at_end = False else: + outfname = os.path.expanduser(outfname) if os.path.exists(outfname): try: ans = io.ask_yes_no("File %r exists. Overwrite?" % outfname) @@ -170,7 +172,8 @@ class HistoryMagics(Magics): pattern = None limit = None if args.limit is _unspecified else args.limit - if args.pattern is not None: + range_pattern = False + if args.pattern is not None and not args.range: if args.pattern: pattern = "*" + " ".join(args.pattern) + "*" else: @@ -182,11 +185,12 @@ class HistoryMagics(Magics): n = 10 if limit is None else limit hist = history_manager.get_tail(n, raw=raw, output=get_output) else: - if args.range: # Get history by ranges - hist = history_manager.get_range_by_str(" ".join(args.range), - raw, get_output) - else: # Just get history for the current session - hist = history_manager.get_range(raw=raw, output=get_output) + if args.pattern: + range_pattern = "*" + " ".join(args.pattern) + "*" + print_nums = True + hist = history_manager.get_range_by_str( + " ".join(args.range), raw, get_output + ) # We could be displaying the entire history, so let's not try to pull # it into a list in memory. Anything that needs more space will just @@ -200,6 +204,9 @@ class HistoryMagics(Magics): # into an editor. if get_output: inline, output = inline + if range_pattern: + if not fnmatch.fnmatch(inline, range_pattern): + continue inline = inline.expandtabs(4).rstrip() multiline = "\n" in inline @@ -293,7 +300,19 @@ class HistoryMagics(Magics): """ opts, args = self.parse_options(parameter_s, 'l:g:', mode='string') if "l" in opts: # Last n lines - n = int(opts['l']) + try: + n = int(opts["l"]) + except ValueError: + print("Number of lines must be an integer") + return + + if n == 0: + print("Requested 0 last lines - nothing to run") + return + elif n < 0: + print("Number of lines to rerun cannot be negative") + return + hist = self.shell.history_manager.get_tail(n) elif "g" in opts: # Search p = "*"+opts['g']+"*" diff --git a/contrib/python/ipython/py3/IPython/core/magics/namespace.py b/contrib/python/ipython/py3/IPython/core/magics/namespace.py index 5cc2d81ca2..c86d3de9b6 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/namespace.py +++ b/contrib/python/ipython/py3/IPython/core/magics/namespace.py @@ -173,7 +173,7 @@ class NamespaceMagics(Magics): 'builtin', 'user', 'user_global','internal', 'alias', where 'builtin' and 'user' are the search defaults. Note that you should not use quotes when specifying namespaces. - + -l: List all available object types for object matching. This function can be used without arguments. @@ -203,9 +203,9 @@ class NamespaceMagics(Magics): Show objects beginning with a single _:: %psearch -a _* list objects beginning with a single underscore - + List available objects:: - + %psearch -l list all available object types """ # default namespaces to be searched @@ -252,7 +252,6 @@ class NamespaceMagics(Magics): Examples -------- - Define two variables and list them with who_ls:: In [1]: alpha = 123 @@ -367,7 +366,6 @@ class NamespaceMagics(Magics): Examples -------- - Define two variables and list them with whos:: In [1]: alpha = 123 @@ -484,24 +482,26 @@ class NamespaceMagics(Magics): Parameters ---------- - -f : force reset without asking for confirmation. - - -s : 'Soft' reset: Only clears your namespace, leaving history intact. + -f + force reset without asking for confirmation. + -s + 'Soft' reset: Only clears your namespace, leaving history intact. References to objects may be kept. By default (without this option), we do a 'hard' reset, giving you a new session and removing all references to objects from the current session. - - --aggressive: Try to aggressively remove modules from sys.modules ; this + --aggressive + Try to aggressively remove modules from sys.modules ; this may allow you to reimport Python modules that have been updated and pick up changes, but can have unattended consequences. - in : reset input history - - out : reset output history - - dhist : reset directory history - - array : reset only variables that are NumPy arrays + in + reset input history + out + reset output history + dhist + reset directory history + array + reset only variables that are NumPy arrays See Also -------- @@ -624,7 +624,6 @@ class NamespaceMagics(Magics): Examples -------- - We first fully reset the namespace so your output looks identical to this example for pedagogical reasons; in practice you do not need a full reset:: @@ -687,8 +686,8 @@ class NamespaceMagics(Magics): else: try: m = re.compile(regex) - except TypeError: - raise TypeError('regex must be a string or compiled pattern') + except TypeError as e: + raise TypeError('regex must be a string or compiled pattern') from e for i in self.who_ls(): if m.search(i): del(user_ns[i]) diff --git a/contrib/python/ipython/py3/IPython/core/magics/osm.py b/contrib/python/ipython/py3/IPython/core/magics/osm.py index 90da7e2280..41957a2850 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/osm.py +++ b/contrib/python/ipython/py3/IPython/core/magics/osm.py @@ -63,10 +63,9 @@ class OSMagics(Magics): super().__init__(shell=shell, **kwargs) - @skip_doctest def _isexec_POSIX(self, file): """ - Test for executable on a POSIX system + Test for executable on a POSIX system """ if os.access(file.path, os.X_OK): # will fail on maxOS if access is not X_OK @@ -75,17 +74,15 @@ class OSMagics(Magics): - @skip_doctest def _isexec_WIN(self, file): """ - Test for executable file on non POSIX system + Test for executable file on non POSIX system """ return file.is_file() and self.execre.match(file.name) is not None - @skip_doctest def isexec(self, file): """ - Test for executable file on non POSIX system + Test for executable file on non POSIX system """ if self.is_posix: return self._isexec_POSIX(file) @@ -130,7 +127,7 @@ class OSMagics(Magics): Aliases expand Python variables just like system calls using ! or !! do: all expressions prefixed with '$' get expanded. For details of the semantic rules, see PEP-215: - http://www.python.org/peps/pep-0215.html. This is the library used by + https://peps.python.org/pep-0215/. This is the library used by IPython for variable expansion. If you want to access a true shell variable, an extra $ is necessary to prevent its expansion by IPython:: @@ -293,8 +290,8 @@ class OSMagics(Magics): """ try: return os.getcwd() - except FileNotFoundError: - raise UsageError("CWD no longer exists - please use %cd to change directory.") + except FileNotFoundError as e: + raise UsageError("CWD no longer exists - please use %cd to change directory.") from e @skip_doctest @line_magic @@ -302,33 +299,34 @@ class OSMagics(Magics): """Change the current working directory. This command automatically maintains an internal list of directories - you visit during your IPython session, in the variable _dh. The - command %dhist shows this history nicely formatted. You can also - do 'cd -<tab>' to see directory history conveniently. - + you visit during your IPython session, in the variable ``_dh``. The + command :magic:`%dhist` shows this history nicely formatted. You can + also do ``cd -<tab>`` to see directory history conveniently. Usage: - cd 'dir': changes to directory 'dir'. - - cd -: changes to the last visited directory. - - cd -<n>: changes to the n-th directory in the directory history. - - cd --foo: change to directory that matches 'foo' in history + - ``cd 'dir'``: changes to directory 'dir'. + - ``cd -``: changes to the last visited directory. + - ``cd -<n>``: changes to the n-th directory in the directory history. + - ``cd --foo``: change to directory that matches 'foo' in history + - ``cd -b <bookmark_name>``: jump to a bookmark set by %bookmark + - Hitting a tab key after ``cd -b`` allows you to tab-complete + bookmark names. - cd -b <bookmark_name>: jump to a bookmark set by %bookmark - (note: cd <bookmark_name> is enough if there is no - directory <bookmark_name>, but a bookmark with the name exists.) - 'cd -b <tab>' allows you to tab-complete bookmark names. + .. note:: + ``cd <bookmark_name>`` is enough if there is no directory + ``<bookmark_name>``, but a bookmark with the name exists. Options: - -q: quiet. Do not print the working directory after the cd command is - executed. By default IPython's cd command does print this directory, - since the default prompts do not display path information. + -q Be quiet. Do not print the working directory after the + cd command is executed. By default IPython's cd + command does print this directory, since the default + prompts do not display path information. - Note that !cd doesn't work for this purpose because the shell where - !command runs is immediately discarded after executing 'command'. + .. note:: + Note that ``!cd`` doesn't work for this purpose because the shell + where ``!command`` runs is immediately discarded after executing + 'command'. Examples -------- @@ -386,8 +384,8 @@ class OSMagics(Magics): if ps == '-': try: ps = self.shell.user_ns['_dh'][-2] - except IndexError: - raise UsageError('%cd -: No previous directory to change to.') + except IndexError as e: + raise UsageError('%cd -: No previous directory to change to.') from e # jump to bookmark if needed else: if not os.path.isdir(ps) or 'b' in opts: @@ -436,11 +434,11 @@ class OSMagics(Magics): Usage:\\ - %env: lists all environment variables/values - %env var: get value for var - %env var val: set value for var - %env var=val: set value for var - %env var=$val: set value for var, using python expansion if possible + :``%env``: lists all environment variables/values + :``%env var``: get value for var + :``%env var val``: set value for var + :``%env var=val``: set value for var + :``%env var=$val``: set value for var, using python expansion if possible """ if parameter_s.strip(): split = '=' if '=' in parameter_s else ' ' @@ -506,7 +504,7 @@ class OSMagics(Magics): if tgt: self.cd(parameter_s) dir_s.insert(0,cwd) - return self.shell.magic('dirs') + return self.shell.run_line_magic('dirs', '') @line_magic def popd(self, parameter_s=''): @@ -630,8 +628,8 @@ class OSMagics(Magics): # while the list form is useful to loop over: In [6]: for f in a.l: - ...: !wc -l $f - ...: + ...: !wc -l $f + ...: 146 setup.py 130 win32_manual_post_install.py @@ -764,15 +762,15 @@ class OSMagics(Magics): if 'd' in opts: try: todel = args[0] - except IndexError: + except IndexError as e: raise UsageError( - "%bookmark -d: must provide a bookmark to delete") + "%bookmark -d: must provide a bookmark to delete") from e else: try: del bkms[todel] - except KeyError: + except KeyError as e: raise UsageError( - "%%bookmark -d: Can't delete bookmark '%s'" % todel) + "%%bookmark -d: Can't delete bookmark '%s'" % todel) from e elif 'r' in opts: bkms = {} @@ -803,18 +801,17 @@ class OSMagics(Magics): to be Python source and will show it with syntax highlighting. This magic command can either take a local filename, an url, - an history range (see %history) or a macro as argument :: + an history range (see %history) or a macro as argument. + + If no parameter is given, prints out history of current session up to + this point. :: %pycat myscript.py %pycat 7-27 %pycat myMacro %pycat http://www.example.com/myscript.py """ - if not parameter_s: - raise UsageError('Missing filename, URL, input history range, ' - 'or macro.') - - try : + try: cont = self.shell.find_user_code(parameter_s, skip_encoding_cookie=False) except (ValueError, IOError): print("Error: no such file, variable, URL, history range or macro") @@ -835,7 +832,7 @@ class OSMagics(Magics): @cell_magic def writefile(self, line, cell): """Write the contents of the cell to a file. - + The file will be overwritten unless the -a (--append) flag is specified. """ args = magic_arguments.parse_argstring(self.writefile, line) diff --git a/contrib/python/ipython/py3/IPython/core/magics/packaging.py b/contrib/python/ipython/py3/IPython/core/magics/packaging.py index 04bde051ae..2f7652c169 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/packaging.py +++ b/contrib/python/ipython/py3/IPython/core/magics/packaging.py @@ -8,10 +8,10 @@ # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- -import os import re import shlex import sys +from pathlib import Path from IPython.core.magic import Magics, magics_class, line_magic @@ -19,27 +19,28 @@ from IPython.core.magic import Magics, magics_class, line_magic def _is_conda_environment(): """Return True if the current Python executable is in a conda env""" # TODO: does this need to change on windows? - conda_history = os.path.join(sys.prefix, 'conda-meta', 'history') - return os.path.exists(conda_history) + return Path(sys.prefix, "conda-meta", "history").exists() def _get_conda_executable(): """Find the path to the conda executable""" # Check if there is a conda executable in the same directory as the Python executable. # This is the case within conda's root environment. - conda = os.path.join(os.path.dirname(sys.executable), 'conda') - if os.path.isfile(conda): - return conda + conda = Path(sys.executable).parent / "conda" + if conda.is_file(): + return str(conda) # Otherwise, attempt to extract the executable from conda history. # This applies in any conda environment. - R = re.compile(r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]") - with open(os.path.join(sys.prefix, 'conda-meta', 'history')) as f: - for line in f: - match = R.match(line) - if match: - return match.groupdict()['command'] - + history = Path(sys.prefix, "conda-meta", "history").read_text(encoding="utf-8") + match = re.search( + r"^#\s*cmd:\s*(?P<command>.*conda)\s[create|install]", + history, + flags=re.MULTILINE, + ) + if match: + return match.groupdict()["command"] + # Fallback: assume conda is available on the system path. return "conda" @@ -78,18 +79,19 @@ class PackagingMagics(Magics): @line_magic def conda(self, line): """Run the conda package manager within the current kernel. - + Usage: %conda install [pkgs] """ if not _is_conda_environment(): raise ValueError("The python kernel does not appear to be a conda environment. " "Please use ``%pip install`` instead.") - + conda = _get_conda_executable() args = shlex.split(line) - command = args[0] - args = args[1:] + command = args[0] if len(args) > 0 else "" + args = args[1:] if len(args) > 1 else [""] + extra_args = [] # When the subprocess does not allow us to respond "yes" during the installation, diff --git a/contrib/python/ipython/py3/IPython/core/magics/pylab.py b/contrib/python/ipython/py3/IPython/core/magics/pylab.py index 9ec441a3e2..0f3fff62fa 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/pylab.py +++ b/contrib/python/ipython/py3/IPython/core/magics/pylab.py @@ -154,6 +154,9 @@ class PylabMagics(Magics): gui, backend, clobbered = self.shell.enable_pylab(args.gui, import_all=import_all) self._show_matplotlib_backend(args.gui, backend) + print( + "%pylab is deprecated, use %matplotlib inline and import the required libraries." + ) print("Populating the interactive namespace from numpy and matplotlib") if clobbered: warn("pylab import has clobbered these variables: %s" % clobbered + diff --git a/contrib/python/ipython/py3/IPython/core/magics/script.py b/contrib/python/ipython/py3/IPython/core/magics/script.py index 8b7f6f94e0..9fd2fc6c0d 100644 --- a/contrib/python/ipython/py3/IPython/core/magics/script.py +++ b/contrib/python/ipython/py3/IPython/core/magics/script.py @@ -3,22 +3,22 @@ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. +import asyncio +import atexit import errno import os -import sys import signal +import sys import time -from subprocess import Popen, PIPE, CalledProcessError -import atexit +from subprocess import CalledProcessError +from threading import Thread + +from traitlets import Any, Dict, List, default from IPython.core import magic_arguments -from IPython.core.magic import ( - Magics, magics_class, line_magic, cell_magic -) -from IPython.lib.backgroundjobs import BackgroundJobManager -from IPython.utils import py3compat +from IPython.core.async_helpers import _AsyncIOProxy +from IPython.core.magic import Magics, cell_magic, line_magic, magics_class from IPython.utils.process import arg_split -from traitlets import List, Dict, default #----------------------------------------------------------------------------- # Magic implementation classes @@ -56,15 +56,16 @@ def script_args(f): ), magic_arguments.argument( '--no-raise-error', action="store_false", dest='raise_error', - help="""Whether you should raise an error message in addition to + help="""Whether you should raise an error message in addition to a stream on stderr if you get a nonzero exit code. - """ - ) + """, + ), ] for arg in args: f = arg(f) return f + @magics_class class ScriptMagics(Magics): """Magics for talking to scripts @@ -73,6 +74,17 @@ class ScriptMagics(Magics): with a program in a subprocess, and registers a few top-level magics that call %%script with common interpreters. """ + + event_loop = Any( + help=""" + The event loop on which to run subprocesses + + Not the main event loop, + because we want to be able to make blocking calls + and have certain requirements we don't want to impose on the main loop. + """ + ) + script_magics = List( help="""Extra script cell magics to define @@ -114,7 +126,6 @@ class ScriptMagics(Magics): def __init__(self, shell=None): super(ScriptMagics, self).__init__(shell=shell) self._generate_script_magics() - self.job_manager = BackgroundJobManager() self.bg_processes = [] atexit.register(self.kill_bg_processes) @@ -136,7 +147,7 @@ class ScriptMagics(Magics): def named_script_magic(line, cell): # if line, add it as cl-flags if line: - line = "%s %s" % (script, line) + line = "%s %s" % (script, line) else: line = script return self.shebang(line, cell) @@ -157,16 +168,16 @@ class ScriptMagics(Magics): @cell_magic("script") def shebang(self, line, cell): """Run a cell via a shell command - + The `%%script` line is like the #! line of script, specifying a program (bash, perl, ruby, etc.) with which to run. - + The rest of the cell is run by that program. - + Examples -------- :: - + In [1]: %%script bash ...: for i in 1 2 3; do ...: echo $i @@ -175,18 +186,70 @@ class ScriptMagics(Magics): 2 3 """ - argv = arg_split(line, posix = not sys.platform.startswith('win')) + + # Create the event loop in which to run script magics + # this operates on a background thread + if self.event_loop is None: + if sys.platform == "win32": + # don't override the current policy, + # just create an event loop + event_loop = asyncio.WindowsProactorEventLoopPolicy().new_event_loop() + else: + event_loop = asyncio.new_event_loop() + self.event_loop = event_loop + + # start the loop in a background thread + asyncio_thread = Thread(target=event_loop.run_forever, daemon=True) + asyncio_thread.start() + else: + event_loop = self.event_loop + + def in_thread(coro): + """Call a coroutine on the asyncio thread""" + return asyncio.run_coroutine_threadsafe(coro, event_loop).result() + + async def _handle_stream(stream, stream_arg, file_object): + while True: + line = (await stream.readline()).decode("utf8") + if not line: + break + if stream_arg: + self.shell.user_ns[stream_arg] = line + else: + file_object.write(line) + file_object.flush() + + async def _stream_communicate(process, cell): + process.stdin.write(cell) + process.stdin.close() + stdout_task = asyncio.create_task( + _handle_stream(process.stdout, args.out, sys.stdout) + ) + stderr_task = asyncio.create_task( + _handle_stream(process.stderr, args.err, sys.stderr) + ) + await asyncio.wait([stdout_task, stderr_task]) + await process.wait() + + argv = arg_split(line, posix=not sys.platform.startswith("win")) args, cmd = self.shebang.parser.parse_known_args(argv) - + try: - p = Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=PIPE) + p = in_thread( + asyncio.create_subprocess_exec( + *cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + stdin=asyncio.subprocess.PIPE, + ) + ) except OSError as e: if e.errno == errno.ENOENT: print("Couldn't find program: %r" % cmd[0]) return else: raise - + if not cell.endswith('\n'): cell += '\n' cell = cell.encode('utf8', 'replace') @@ -195,30 +258,35 @@ class ScriptMagics(Magics): self._gc_bg_processes() to_close = [] if args.out: - self.shell.user_ns[args.out] = p.stdout + self.shell.user_ns[args.out] = _AsyncIOProxy(p.stdout, event_loop) else: to_close.append(p.stdout) if args.err: - self.shell.user_ns[args.err] = p.stderr + self.shell.user_ns[args.err] = _AsyncIOProxy(p.stderr, event_loop) else: to_close.append(p.stderr) - self.job_manager.new(self._run_script, p, cell, to_close, daemon=True) + event_loop.call_soon_threadsafe( + lambda: asyncio.Task(self._run_script(p, cell, to_close)) + ) if args.proc: - self.shell.user_ns[args.proc] = p + proc_proxy = _AsyncIOProxy(p, event_loop) + proc_proxy.stdout = _AsyncIOProxy(p.stdout, event_loop) + proc_proxy.stderr = _AsyncIOProxy(p.stderr, event_loop) + self.shell.user_ns[args.proc] = proc_proxy return - + try: - out, err = p.communicate(cell) + in_thread(_stream_communicate(p, cell)) except KeyboardInterrupt: try: p.send_signal(signal.SIGINT) - time.sleep(0.1) - if p.poll() is not None: + in_thread(asyncio.wait_for(p.wait(), timeout=0.1)) + if p.returncode is not None: print("Process is interrupted.") return p.terminate() - time.sleep(0.1) - if p.poll() is not None: + in_thread(asyncio.wait_for(p.wait(), timeout=0.1)) + if p.returncode is not None: print("Process is terminated.") return p.kill() @@ -226,31 +294,31 @@ class ScriptMagics(Magics): except OSError: pass except Exception as e: - print("Error while terminating subprocess (pid=%i): %s" \ - % (p.pid, e)) + print("Error while terminating subprocess (pid=%i): %s" % (p.pid, e)) return - out = py3compat.decode(out) - err = py3compat.decode(err) - if args.out: - self.shell.user_ns[args.out] = out - else: - sys.stdout.write(out) - sys.stdout.flush() - if args.err: - self.shell.user_ns[args.err] = err - else: - sys.stderr.write(err) - sys.stderr.flush() - if args.raise_error and p.returncode!=0: - raise CalledProcessError(p.returncode, cell, output=out, stderr=err) - - def _run_script(self, p, cell, to_close): + + if args.raise_error and p.returncode != 0: + # If we get here and p.returncode is still None, we must have + # killed it but not yet seen its return code. We don't wait for it, + # in case it's stuck in uninterruptible sleep. -9 = SIGKILL + rc = p.returncode or -9 + raise CalledProcessError(rc, cell) + + shebang.__skip_doctest__ = os.name != "posix" + + async def _run_script(self, p, cell, to_close): """callback for running the script in the background""" + p.stdin.write(cell) + await p.stdin.drain() p.stdin.close() + await p.stdin.wait_closed() + await p.wait() + # asyncio read pipes have no close + # but we should drain the data anyway for s in to_close: - s.close() - p.wait() + await s.read() + self._gc_bg_processes() @line_magic("killbgscripts") def killbgscripts(self, _nouse_=''): @@ -263,7 +331,7 @@ class ScriptMagics(Magics): if not self.bg_processes: return for p in self.bg_processes: - if p.poll() is None: + if p.returncode is None: try: p.send_signal(signal.SIGINT) except: @@ -273,7 +341,7 @@ class ScriptMagics(Magics): if not self.bg_processes: return for p in self.bg_processes: - if p.poll() is None: + if p.returncode is None: try: p.terminate() except: @@ -283,7 +351,7 @@ class ScriptMagics(Magics): if not self.bg_processes: return for p in self.bg_processes: - if p.poll() is None: + if p.returncode is None: try: p.kill() except: @@ -291,4 +359,4 @@ class ScriptMagics(Magics): self._gc_bg_processes() def _gc_bg_processes(self): - self.bg_processes = [p for p in self.bg_processes if p.poll() is None] + self.bg_processes = [p for p in self.bg_processes if p.returncode is None] diff --git a/contrib/python/ipython/py3/IPython/core/oinspect.py b/contrib/python/ipython/py3/IPython/core/oinspect.py index 272916c966..1a5c0ae070 100644 --- a/contrib/python/ipython/py3/IPython/core/oinspect.py +++ b/contrib/python/ipython/py3/IPython/core/oinspect.py @@ -222,7 +222,7 @@ def format_argspec(argspec): This takes a dict instead of ordered arguments and calls inspect.format_argspec with the arguments in the necessary order. - DEPRECATED: Do not use; will be removed in future versions. + DEPRECATED (since 7.10): Do not use; will be removed in future versions. """ warnings.warn('`format_argspec` function is deprecated as of IPython 7.10' @@ -234,10 +234,13 @@ def format_argspec(argspec): @undoc def call_tip(oinfo, format_call=True): - """DEPRECATED. Extract call tip data from an oinfo dict. - """ - warnings.warn('`call_tip` function is deprecated as of IPython 6.0' - 'and will be removed in future versions.', DeprecationWarning, stacklevel=2) + """DEPRECATED since 6.0. Extract call tip data from an oinfo dict.""" + warnings.warn( + "`call_tip` function is deprecated as of IPython 6.0" + "and will be removed in future versions.", + DeprecationWarning, + stacklevel=2, + ) # Get call definition argspec = oinfo.get('argspec') if argspec is None: @@ -299,7 +302,7 @@ def find_file(obj) -> str: Returns ------- fname : str - The absolute path to the file where the object was defined. + The absolute path to the file where the object was defined. """ obj = _get_wrapped(obj) @@ -334,7 +337,7 @@ def find_source_lines(obj): Returns ------- lineno : int - The line number where the object definition starts. + The line number where the object definition starts. """ obj = _get_wrapped(obj) @@ -425,7 +428,6 @@ class Inspector(Colorable): Examples -------- - In [1]: class NoInit: ...: pass @@ -516,12 +518,12 @@ class Inspector(Colorable): """Return a mime bundle representation of the input text. - if `formatter` is None, the returned mime bundle has - a `text/plain` field, with the input text. - a `text/html` field with a `<pre>` tag containing the input text. + a ``text/plain`` field, with the input text. + a ``text/html`` field with a ``<pre>`` tag containing the input text. - - if `formatter` is not None, it must be a callable transforming the - input text into a mime bundle. Default values for `text/plain` and - `text/html` representations are the ones described above. + - if ``formatter`` is not None, it must be a callable transforming the + input text into a mime bundle. Default values for ``text/plain`` and + ``text/html`` representations are the ones described above. Note: @@ -566,24 +568,27 @@ class Inspector(Colorable): bundle['text/plain'] = text return bundle - def _get_info(self, obj, oname='', formatter=None, info=None, detail_level=0): + def _get_info( + self, obj, oname="", formatter=None, info=None, detail_level=0, omit_sections=() + ): """Retrieve an info dict and format it. Parameters - ========== - - obj: any + ---------- + obj : any Object to inspect and return info from - oname: str (default: ''): + oname : str (default: ''): Name of the variable pointing to `obj`. - formatter: callable - info: + formatter : callable + info already computed information - detail_level: integer + detail_level : integer Granularity of detail level, if set to 1, give more information. + omit_sections : container[str] + Titles or keys to omit from output (can be set, tuple, etc., anything supporting `in`) """ - info = self._info(obj, oname=oname, info=info, detail_level=detail_level) + info = self.info(obj, oname=oname, info=info, detail_level=detail_level) _mime = { 'text/plain': [], @@ -591,6 +596,8 @@ class Inspector(Colorable): } def append_field(bundle, title:str, key:str, formatter=None): + if title in omit_sections or key in omit_sections: + return field = info[key] if field is not None: formatted_field = self._mime_format(field, formatter) @@ -655,7 +662,16 @@ class Inspector(Colorable): return self.format_mime(_mime) - def pinfo(self, obj, oname='', formatter=None, info=None, detail_level=0, enable_html_pager=True): + def pinfo( + self, + obj, + oname="", + formatter=None, + info=None, + detail_level=0, + enable_html_pager=True, + omit_sections=(), + ): """Show detailed information about an object. Optional arguments: @@ -676,40 +692,48 @@ class Inspector(Colorable): precomputed already. - detail_level: if set to 1, more information is given. + + - omit_sections: set of section keys and titles to omit """ - info = self._get_info(obj, oname, formatter, info, detail_level) + info = self._get_info( + obj, oname, formatter, info, detail_level, omit_sections=omit_sections + ) if not enable_html_pager: del info['text/html'] page.page(info) - def info(self, obj, oname='', formatter=None, info=None, detail_level=0): - """DEPRECATED. Compute a dict with detailed information about an object. + def _info(self, obj, oname="", info=None, detail_level=0): """ - if formatter is not None: - warnings.warn('The `formatter` keyword argument to `Inspector.info`' - 'is deprecated as of IPython 5.0 and will have no effects.', - DeprecationWarning, stacklevel=2) - return self._info(obj, oname=oname, info=info, detail_level=detail_level) + Inspector.info() was likely improperly marked as deprecated + while only a parameter was deprecated. We "un-deprecate" it. + """ + + warnings.warn( + "The `Inspector.info()` method has been un-deprecated as of 8.0 " + "and the `formatter=` keyword removed. `Inspector._info` is now " + "an alias, and you can just call `.info()` directly.", + DeprecationWarning, + stacklevel=2, + ) + return self.info(obj, oname=oname, info=info, detail_level=detail_level) - def _info(self, obj, oname='', info=None, detail_level=0) -> dict: + def info(self, obj, oname="", info=None, detail_level=0) -> dict: """Compute a dict with detailed information about an object. Parameters - ========== - - obj: any + ---------- + obj : any An object to find information about - oname: str (default: ''): + oname : str (default: '') Name of the variable pointing to `obj`. - info: (default: None) + info : (default: None) A struct (dict like with attr access) with some information fields which may have been precomputed already. - detail_level: int (default:0) + detail_level : int (default:0) If set to 1, more information is given. Returns - ======= - + ------- An object info dict with known fields from `info_fields`. Keys are strings, values are string or None. """ @@ -941,7 +965,7 @@ class Inspector(Colorable): - show_all(False): show all names, including those starting with underscores. - + - list_types(False): list all available object types for object matching. """ #print 'ps pattern:<%r>' % pattern # dbg diff --git a/contrib/python/ipython/py3/IPython/core/page.py b/contrib/python/ipython/py3/IPython/core/page.py index ed16b61781..d3e6a9eef5 100644 --- a/contrib/python/ipython/py3/IPython/core/page.py +++ b/contrib/python/ipython/py3/IPython/core/page.py @@ -22,9 +22,10 @@ import tempfile import subprocess from io import UnsupportedOperation +from pathlib import Path from IPython import get_ipython -from IPython.core.display import display +from IPython.display import display from IPython.core.error import TryNext from IPython.utils.data import chop from IPython.utils.process import system @@ -45,7 +46,7 @@ def display_page(strng, start=0, screen_lines=25): def as_hook(page_func): """Wrap a pager func to strip the `self` arg - + so it can be called as a hook. """ return lambda self, *args, **kwargs: page_func(*args, **kwargs) @@ -106,7 +107,7 @@ def _detect_screen_size(screen_lines_def): term_flags = termios.tcgetattr(sys.stdout) except termios.error as err: # can fail on Linux 2.6, pager_page will catch the TypeError - raise TypeError('termios error: {0}'.format(err)) + raise TypeError('termios error: {0}'.format(err)) from err try: scr = curses.initscr() @@ -126,7 +127,7 @@ def _detect_screen_size(screen_lines_def): def pager_page(strng, start=0, screen_lines=0, pager_cmd=None): """Display a string, piping through a pager after a certain length. - + strng can be a mime-bundle dict, supplying multiple representations, keyed by mime-type. @@ -195,28 +196,32 @@ def pager_page(strng, start=0, screen_lines=0, pager_cmd=None): retval = 1 else: fd, tmpname = tempfile.mkstemp('.txt') + tmppath = Path(tmpname) try: os.close(fd) - with open(tmpname, 'wt') as tmpfile: + with tmppath.open("wt", encoding="utf-8") as tmpfile: tmpfile.write(strng) - cmd = "%s < %s" % (pager_cmd, tmpname) + cmd = "%s < %s" % (pager_cmd, tmppath) # tmpfile needs to be closed for windows if os.system(cmd): retval = 1 else: retval = None finally: - os.remove(tmpname) + Path.unlink(tmppath) else: try: retval = None # Emulate os.popen, but redirect stderr - proc = subprocess.Popen(pager_cmd, - shell=True, - stdin=subprocess.PIPE, - stderr=subprocess.DEVNULL - ) - pager = os._wrap_close(io.TextIOWrapper(proc.stdin), proc) + proc = subprocess.Popen( + pager_cmd, + shell=True, + stdin=subprocess.PIPE, + stderr=subprocess.DEVNULL, + ) + pager = os._wrap_close( + io.TextIOWrapper(proc.stdin, encoding="utf-8"), proc + ) try: pager_encoding = pager.encoding or sys.stdout.encoding pager.write(strng) @@ -236,10 +241,10 @@ def pager_page(strng, start=0, screen_lines=0, pager_cmd=None): def page(data, start=0, screen_lines=0, pager_cmd=None): """Display content in a pager, piping through a pager after a certain length. - + data can be a mime-bundle dict, supplying multiple representations, keyed by mime-type, or text. - + Pager is dispatched via the `show_in_pager` IPython hook. If no hook is registered, `pager_page` will be used. """ @@ -275,7 +280,7 @@ def page_file(fname, start=0, pager_cmd=None): try: if start > 0: start -= 1 - page(open(fname).read(),start) + page(open(fname, encoding="utf-8").read(), start) except: print('Unable to show file',repr(fname)) diff --git a/contrib/python/ipython/py3/IPython/core/payloadpage.py b/contrib/python/ipython/py3/IPython/core/payloadpage.py index eb613445dd..4958108076 100644 --- a/contrib/python/ipython/py3/IPython/core/payloadpage.py +++ b/contrib/python/ipython/py3/IPython/core/payloadpage.py @@ -17,10 +17,9 @@ def page(strng, start=0, screen_lines=0, pager_cmd=None): Parameters ---------- strng : str or mime-dict - Text to page, or a mime-type keyed dict of already formatted data. - + Text to page, or a mime-type keyed dict of already formatted data. start : int - Starting line at which to place the display. + Starting line at which to place the display. """ # Some routines may auto-compute start offsets incorrectly and pass a @@ -42,7 +41,7 @@ def page(strng, start=0, screen_lines=0, pager_cmd=None): def install_payload_page(): """DEPRECATED, use show_in_pager hook - + Install this version of page as IPython.core.page.page. """ warnings.warn("""install_payload_page is deprecated. diff --git a/contrib/python/ipython/py3/IPython/core/prefilter.py b/contrib/python/ipython/py3/IPython/core/prefilter.py index bf801f999c..0038e5c673 100644 --- a/contrib/python/ipython/py3/IPython/core/prefilter.py +++ b/contrib/python/ipython/py3/IPython/core/prefilter.py @@ -120,7 +120,7 @@ class PrefilterManager(Configurable): def __init__(self, shell=None, **kwargs): super(PrefilterManager, self).__init__(shell=shell, **kwargs) self.shell = shell - self.init_transformers() + self._transformers = [] self.init_handlers() self.init_checkers() @@ -128,14 +128,6 @@ class PrefilterManager(Configurable): # API for managing transformers #------------------------------------------------------------------------- - def init_transformers(self): - """Create the default transformers.""" - self._transformers = [] - for transformer_cls in _default_transformers: - transformer_cls( - shell=self.shell, prefilter_manager=self, parent=self - ) - def sort_transformers(self): """Sort the transformers by priority. @@ -687,9 +679,6 @@ class EmacsHandler(PrefilterHandler): #----------------------------------------------------------------------------- -_default_transformers = [ -] - _default_checkers = [ EmacsChecker, MacroChecker, diff --git a/contrib/python/ipython/py3/IPython/core/profiledir.py b/contrib/python/ipython/py3/IPython/core/profiledir.py index ba8f82b7d9..cb4d39339a 100644 --- a/contrib/python/ipython/py3/IPython/core/profiledir.py +++ b/contrib/python/ipython/py3/IPython/core/profiledir.py @@ -7,6 +7,7 @@ import os import shutil import errno +from pathlib import Path from traitlets.config.configurable import LoggingConfigurable from ..paths import get_ipython_package_dir @@ -131,19 +132,20 @@ class ProfileDir(LoggingConfigurable): self.check_pid_dir() self.check_startup_dir() - def copy_config_file(self, config_file, path=None, overwrite=False): + def copy_config_file(self, config_file: str, path: Path, overwrite=False) -> bool: """Copy a default config file into the active profile directory. Default configuration files are kept in :mod:`IPython.core.profile`. This function moves these from that location to the working profile directory. """ - dst = os.path.join(self.location, config_file) - if os.path.isfile(dst) and not overwrite: + dst = Path(os.path.join(self.location, config_file)) + if dst.exists() and not overwrite: return False if path is None: path = os.path.join(get_ipython_package_dir(), u'core', u'profile', u'default') - src = os.path.join(path, config_file) + assert isinstance(path, Path) + src = path / config_file shutil.copy(src, dst) return True diff --git a/contrib/python/ipython/py3/IPython/core/pylabtools.py b/contrib/python/ipython/py3/IPython/core/pylabtools.py index c9c8e14aa2..68e100f7d0 100644 --- a/contrib/python/ipython/py3/IPython/core/pylabtools.py +++ b/contrib/python/ipython/py3/IPython/core/pylabtools.py @@ -185,8 +185,8 @@ def mpl_runner(safe_execfile): Parameters ---------- safe_execfile : function - This must be a function with the same interface as the - :meth:`safe_execfile` method of IPython. + This must be a function with the same interface as the + :meth:`safe_execfile` method of IPython. Returns ------- @@ -231,8 +231,8 @@ def _reshow_nbagg_figure(fig): """reshow an nbagg figure""" try: reshow = fig.canvas.manager.reshow - except AttributeError: - raise NotImplementedError() + except AttributeError as e: + raise NotImplementedError() from e else: reshow() @@ -241,7 +241,7 @@ def select_figure_formats(shell, formats, **kwargs): """Select figure formats for the inline backend. Parameters - ========== + ---------- shell : InteractiveShell The main IPython instance. formats : str or set @@ -391,7 +391,7 @@ def import_pylab(user_ns, import_all=True): # IPython symbols to add user_ns['figsize'] = figsize - from IPython.core.display import display + from IPython.display import display # Add display and getfigs to the user's namespace user_ns['display'] = display user_ns['getfigs'] = getfigs @@ -408,7 +408,6 @@ def configure_inline_support(shell, backend): Parameters ---------- shell : InteractiveShell instance - backend : matplotlib backend """ warnings.warn( @@ -418,6 +417,8 @@ def configure_inline_support(shell, backend): stacklevel=2, ) - from matplotlib_inline.backend_inline import configure_inline_support as configure_inline_support_orig + from matplotlib_inline.backend_inline import ( + configure_inline_support as configure_inline_support_orig, + ) configure_inline_support_orig(shell, backend) diff --git a/contrib/python/ipython/py3/IPython/core/release.py b/contrib/python/ipython/py3/IPython/core/release.py index 879d8cc4f8..4e8c82cdb7 100644 --- a/contrib/python/ipython/py3/IPython/core/release.py +++ b/contrib/python/ipython/py3/IPython/core/release.py @@ -12,18 +12,14 @@ # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- -# Name of the package for release purposes. This is the name which labels -# the tarballs and RPMs made by distutils, so it's best to lowercase it. -name = 'ipython' - # IPython version information. An empty _version_extra corresponds to a full # release. 'dev' as a _version_extra string means this is a development # version -_version_major = 7 -_version_minor = 33 +_version_major = 8 +_version_minor = 3 _version_patch = 0 -_version_extra = '.dev' -# _version_extra = 'b1' +_version_extra = ".dev" +# _version_extra = "rc1" _version_extra = "" # Uncomment this for full releases # Construct full version string from these. @@ -40,49 +36,6 @@ version_info = (_version_major, _version_minor, _version_patch, _version_extra) kernel_protocol_version_info = (5, 0) kernel_protocol_version = "%i.%i" % kernel_protocol_version_info -description = "IPython: Productive Interactive Computing" - -long_description = \ -""" -IPython provides a rich toolkit to help you make the most out of using Python -interactively. Its main components are: - -* A powerful interactive Python shell -* A `Jupyter <https://jupyter.org/>`_ kernel to work with Python code in Jupyter - notebooks and other interactive frontends. - -The enhanced interactive Python shells have the following main features: - -* Comprehensive object introspection. - -* Input history, persistent across sessions. - -* Caching of output results during a session with automatically generated - references. - -* Extensible tab completion, with support by default for completion of python - variables and keywords, filenames and function keywords. - -* Extensible system of 'magic' commands for controlling the environment and - performing many tasks related either to IPython or the operating system. - -* A rich configuration system with easy switching between different setups - (simpler than changing $PYTHONSTARTUP environment variables every time). - -* Session logging and reloading. - -* Extensible syntax processing for special purpose situations. - -* Access to the system shell with user-extensible alias system. - -* Easily embeddable in other Python programs and GUIs. - -* Integrated access to the pdb debugger and the Python profiler. - -The latest development version is always available from IPython's `GitHub -site <http://github.com/ipython>`_. -""" - license = 'BSD' authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'), @@ -99,21 +52,3 @@ authors = {'Fernando' : ('Fernando Perez','fperez.net@gmail.com'), author = 'The IPython Development Team' author_email = 'ipython-dev@python.org' - -url = 'https://ipython.org' - - -platforms = ['Linux','Mac OSX','Windows'] - -keywords = ['Interactive','Interpreter','Shell', 'Embedding'] - -classifiers = [ - 'Framework :: IPython', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: System :: Shells' - ] diff --git a/contrib/python/ipython/py3/IPython/core/shellapp.py b/contrib/python/ipython/py3/IPython/core/shellapp.py index c442658ae7..f737bcb56b 100644 --- a/contrib/python/ipython/py3/IPython/core/shellapp.py +++ b/contrib/python/ipython/py3/IPython/core/shellapp.py @@ -98,11 +98,6 @@ shell_aliases = dict( ) shell_aliases['cache-size'] = 'InteractiveShell.cache_size' -if traitlets.version_info < (5, 0): - # traitlets 4 doesn't handle lists on CLI - shell_aliases["ext"] = "InteractiveShellApp.extra_extension" - - #----------------------------------------------------------------------------- # Main classes and functions #----------------------------------------------------------------------------- @@ -126,17 +121,6 @@ class InteractiveShellApp(Configurable): help="A list of dotted module names of IPython extensions to load." ).tag(config=True) - extra_extension = Unicode( - "", - help=""" - DEPRECATED. Dotted module name of a single extra IPython extension to load. - - Only one extension can be added this way. - - Only used with traitlets < 5.0, plural extra_extensions list is used in traitlets 5. - """, - ).tag(config=True) - extra_extensions = List( DottedObjectName(), help=""" @@ -293,8 +277,6 @@ class InteractiveShellApp(Configurable): extensions = ( self.default_extensions + self.extensions + self.extra_extensions ) - if self.extra_extension: - extensions.append(self.extra_extension) for ext in extensions: try: self.log.info("Loading IPython extension: %s" % ext) diff --git a/contrib/python/ipython/py3/IPython/core/ultratb.py b/contrib/python/ipython/py3/IPython/core/ultratb.py index de85a1f8ea..4447080f35 100644 --- a/contrib/python/ipython/py3/IPython/core/ultratb.py +++ b/contrib/python/ipython/py3/IPython/core/ultratb.py @@ -89,39 +89,28 @@ Inheritance diagram: #***************************************************************************** -import dis import inspect -import keyword import linecache -import os import pydoc -import re import sys import time -import tokenize import traceback +from types import TracebackType +from typing import Tuple, List, Any, Optional -from tokenize import generate_tokens - -# For purposes of monkeypatching inspect to fix a bug in it. -from inspect import getsourcefile, getfile, getmodule, \ - ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode +import stack_data +from pygments.formatters.terminal256 import Terminal256Formatter +from pygments.styles import get_style_by_name # IPython's own modules from IPython import get_ipython from IPython.core import debugger from IPython.core.display_trap import DisplayTrap from IPython.core.excolors import exception_colors -from IPython.utils import PyColorize from IPython.utils import path as util_path from IPython.utils import py3compat -from IPython.utils.data import uniq_stable from IPython.utils.terminal import get_terminal_size -from logging import info, error, debug - -from importlib.util import source_from_cache - import IPython.utils.colorable as colorable # Globals @@ -134,276 +123,81 @@ INDENT_SIZE = 8 # to users of ultratb who are NOT running inside ipython. DEFAULT_SCHEME = 'NoColor' - -# Number of frame above which we are likely to have a recursion and will -# **attempt** to detect it. Made modifiable mostly to speedup test suite -# as detecting recursion is one of our slowest test -_FRAME_RECURSION_LIMIT = 500 - # --------------------------------------------------------------------------- # Code begins -# Utility functions -def inspect_error(): - """Print a message about internal inspect errors. - - These are unfortunately quite common.""" - - error('Internal Python error in the inspect module.\n' - 'Below is the traceback from this internal error.\n') - - -# This function is a monkeypatch we apply to the Python inspect module. We have -# now found when it's needed (see discussion on issue gh-1456), and we have a -# test case (IPython.core.tests.test_ultratb.ChangedPyFileTest) that fails if -# the monkeypatch is not applied. TK, Aug 2012. -def findsource(object): - """Return the entire source file and starting line number for an object. - - The argument may be a module, class, method, function, traceback, frame, - or code object. The source code is returned as a list of all the lines - in the file and the line number indexes a line in that list. An IOError - is raised if the source code cannot be retrieved. - - FIXED version with which we monkeypatch the stdlib to work around a bug.""" - - file = getsourcefile(object) or getfile(object) - # If the object is a frame, then trying to get the globals dict from its - # module won't work. Instead, the frame object itself has the globals - # dictionary. - globals_dict = None - if inspect.isframe(object): - # XXX: can this ever be false? - globals_dict = object.f_globals - else: - module = getmodule(object, file) - if module: - globals_dict = module.__dict__ - lines = linecache.getlines(file, globals_dict) - if not lines: - raise IOError('could not get source code') - - if ismodule(object): - return lines, 0 - - if isclass(object): - name = object.__name__ - pat = re.compile(r'^(\s*)class\s*' + name + r'\b') - # make some effort to find the best matching class definition: - # use the one with the least indentation, which is the one - # that's most probably not inside a function definition. - candidates = [] - for i, line in enumerate(lines): - match = pat.match(line) - if match: - # if it's at toplevel, it's already the best one - if line[0] == 'c': - return lines, i - # else add whitespace to candidate list - candidates.append((match.group(1), i)) - if candidates: - # this will sort by whitespace, and by line number, - # less whitespace first - candidates.sort() - return lines, candidates[0][1] - else: - raise IOError('could not find class definition') - - if ismethod(object): - object = object.__func__ - if isfunction(object): - object = object.__code__ - if istraceback(object): - object = object.tb_frame - if isframe(object): - object = object.f_code - if iscode(object): - if not hasattr(object, 'co_firstlineno'): - raise IOError('could not find function definition') - pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') - pmatch = pat.match - # fperez - fix: sometimes, co_firstlineno can give a number larger than - # the length of lines, which causes an error. Safeguard against that. - lnum = min(object.co_firstlineno, len(lines)) - 1 - while lnum > 0: - if pmatch(lines[lnum]): - break - lnum -= 1 - - return lines, lnum - raise IOError('could not find code object') - - -# Monkeypatch inspect to apply our bugfix. -def with_patch_inspect(f): - """ - Deprecated since IPython 6.0 - decorator for monkeypatching inspect.findsource - """ - - def wrapped(*args, **kwargs): - save_findsource = inspect.findsource - inspect.findsource = findsource - try: - return f(*args, **kwargs) - finally: - inspect.findsource = save_findsource - - return wrapped - - -def fix_frame_records_filenames(records): - """Try to fix the filenames in each record from inspect.getinnerframes(). - - Particularly, modules loaded from within zip files have useless filenames - attached to their code object, and inspect.getinnerframes() just uses it. - """ - fixed_records = [] - for frame, filename, line_no, func_name, lines, index in records: - # Look inside the frame's globals dictionary for __file__, - # which should be better. However, keep Cython filenames since - # we prefer the source filenames over the compiled .so file. - if not filename.endswith(('.pyx', '.pxd', '.pxi')): - better_fn = frame.f_globals.get('__file__', None) - if isinstance(better_fn, str): - # Check the type just in case someone did something weird with - # __file__. It might also be None if the error occurred during - # import. - filename = better_fn - fixed_records.append((frame, filename, line_no, func_name, lines, index)) - return fixed_records - - -@with_patch_inspect -def _fixed_getinnerframes(etb, context=1, tb_offset=0): - LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 - - records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) - # If the error is at the console, don't build any context, since it would - # otherwise produce 5 blank lines printed out (there is no file at the - # console) - rec_check = records[tb_offset:] - try: - rname = rec_check[0][1] - if rname == '<ipython console>' or rname.endswith('<string>'): - return rec_check - except IndexError: - pass - - aux = traceback.extract_tb(etb) - assert len(records) == len(aux) - for i, (file, lnum, _, _) in enumerate(aux): - maybeStart = lnum - 1 - context // 2 - start = max(maybeStart, 0) - end = start + context - lines = linecache.getlines(file)[start:end] - buf = list(records[i]) - buf[LNUM_POS] = lnum - buf[INDEX_POS] = lnum - 1 - start - buf[LINES_POS] = lines - records[i] = tuple(buf) - return records[tb_offset:] - # Helper function -- largely belongs to VerboseTB, but we need the same # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they # can be recognized properly by ipython.el's py-traceback-line-re # (SyntaxErrors have to be treated specially because they have no traceback) -def _format_traceback_lines(lnum, index, lines, Colors, lvals, _line_format): +def _format_traceback_lines(lines, Colors, has_colors: bool, lvals): """ Format tracebacks lines with pointing arrow, leading numbers... Parameters - ========== - - lnum: int - index: int - lines: list[string] - Colors: + ---------- + lines : list[Line] + Colors ColorScheme used. - lvals: bytes + lvals : str Values of local variables, already colored, to inject just after the error line. - _line_format: f (str) -> (str, bool) - return (colorized version of str, failure to do so) """ numbers_width = INDENT_SIZE - 1 res = [] - for i,line in enumerate(lines, lnum-index): - line = py3compat.cast_unicode(line) + for stack_line in lines: + if stack_line is stack_data.LINE_GAP: + res.append('%s (...)%s\n' % (Colors.linenoEm, Colors.Normal)) + continue - new_line, err = _line_format(line, 'str') - if not err: - line = new_line - - if i == lnum: + line = stack_line.render(pygmented=has_colors).rstrip('\n') + '\n' + lineno = stack_line.lineno + if stack_line.is_current: # This is the line with the error - pad = numbers_width - len(str(i)) - num = '%s%s' % (debugger.make_arrow(pad), str(lnum)) - line = '%s%s%s %s%s' % (Colors.linenoEm, num, - Colors.line, line, Colors.Normal) + pad = numbers_width - len(str(lineno)) + num = '%s%s' % (debugger.make_arrow(pad), str(lineno)) + start_color = Colors.linenoEm else: - num = '%*s' % (numbers_width, i) - line = '%s%s%s %s' % (Colors.lineno, num, - Colors.Normal, line) + num = '%*s' % (numbers_width, lineno) + start_color = Colors.lineno + + line = '%s%s%s %s' % (start_color, num, Colors.Normal, line) res.append(line) - if lvals and i == lnum: + if lvals and stack_line.is_current: res.append(lvals + '\n') return res -def is_recursion_error(etype, value, records): - try: - # RecursionError is new in Python 3.5 - recursion_error_type = RecursionError - except NameError: - recursion_error_type = RuntimeError - - # The default recursion limit is 1000, but some of that will be taken up - # by stack frames in IPython itself. >500 frames probably indicates - # a recursion error. - return (etype is recursion_error_type) \ - and "recursion" in str(value).lower() \ - and len(records) > _FRAME_RECURSION_LIMIT - -def find_recursion(etype, value, records): - """Identify the repeating stack frames from a RecursionError traceback - 'records' is a list as returned by VerboseTB.get_records() +def _format_filename(file, ColorFilename, ColorNormal, *, lineno=None): + """ + Format filename lines with `In [n]` if it's the nth code cell or `File *.py` if it's a module. - Returns (last_unique, repeat_length) + Parameters + ---------- + file : str + ColorFilename + ColorScheme's filename coloring to be used. + ColorNormal + ColorScheme's normal coloring to be used. """ - # This involves a bit of guesswork - we want to show enough of the traceback - # to indicate where the recursion is occurring. We guess that the innermost - # quarter of the traceback (250 frames by default) is repeats, and find the - # first frame (from in to out) that looks different. - if not is_recursion_error(etype, value, records): - return len(records), 0 - - # Select filename, lineno, func_name to track frames with - records = [r[1:4] for r in records] - inner_frames = records[-(len(records)//4):] - frames_repeated = set(inner_frames) - - last_seen_at = {} - longest_repeat = 0 - i = len(records) - for frame in reversed(records): - i -= 1 - if frame not in frames_repeated: - last_unique = i - break - - if frame in last_seen_at: - distance = last_seen_at[frame] - i - longest_repeat = max(longest_repeat, distance) - - last_seen_at[frame] = i + ipinst = get_ipython() + + if ipinst is not None and file in ipinst.compile._filename_map: + file = "[%s]" % ipinst.compile._filename_map[file] + tpl_link = f"Input {ColorFilename}In {{file}}{ColorNormal}" else: - last_unique = 0 # The whole traceback was recursion + file = util_path.compress_user( + py3compat.cast_unicode(file, util_path.fs_encoding) + ) + if lineno is None: + tpl_link = f"File {ColorFilename}{{file}}{ColorNormal}" + else: + tpl_link = f"File {ColorFilename}{{file}}:{{lineno}}{ColorNormal}" - return last_unique, longest_repeat + return tpl_link.format(file=file, lineno=lineno) #--------------------------------------------------------------------------- # Module classes @@ -413,7 +207,16 @@ class TBTools(colorable.Colorable): # Number of frames to skip when reporting tracebacks tb_offset = 0 - def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): + def __init__( + self, + color_scheme="NoColor", + call_pdb=False, + ostream=None, + parent=None, + config=None, + *, + debugger_cls=None, + ): # Whether to call the interactive pdb debugger after printing # tracebacks or not super(TBTools, self).__init__(parent=parent, config=config) @@ -433,9 +236,10 @@ class TBTools(colorable.Colorable): self.set_colors(color_scheme) self.old_scheme = color_scheme # save initial value for toggles + self.debugger_cls = debugger_cls or debugger.Pdb if call_pdb: - self.pdb = debugger.Pdb() + self.pdb = self.debugger_cls() else: self.pdb = None @@ -458,21 +262,26 @@ class TBTools(colorable.Colorable): ostream = property(_get_ostream, _set_ostream) - def get_parts_of_chained_exception(self, evalue): - def get_chained_exception(exception_value): - cause = getattr(exception_value, '__cause__', None) - if cause: - return cause - if getattr(exception_value, '__suppress_context__', False): - return None - return getattr(exception_value, '__context__', None) + @staticmethod + def _get_chained_exception(exception_value): + cause = getattr(exception_value, "__cause__", None) + if cause: + return cause + if getattr(exception_value, "__suppress_context__", False): + return None + return getattr(exception_value, "__context__", None) + + def get_parts_of_chained_exception( + self, evalue + ) -> Optional[Tuple[type, BaseException, TracebackType]]: - chained_evalue = get_chained_exception(evalue) + chained_evalue = self._get_chained_exception(evalue) if chained_evalue: return chained_evalue.__class__, chained_evalue, chained_evalue.__traceback__ + return None - def prepare_chained_exception_message(self, cause): + def prepare_chained_exception_message(self, cause) -> List[Any]: direct_cause = "\nThe above exception was the direct cause of the following exception:\n" exception_during_handling = "\nDuring handling of the above exception, another exception occurred:\n" @@ -482,6 +291,10 @@ class TBTools(colorable.Colorable): message = [[exception_during_handling]] return message + @property + def has_colors(self) -> bool: + return self.color_scheme_table.active_scheme_name.lower() != "nocolor" + def set_colors(self, *args, **kw): """Shorthand access to the color table scheme selector method.""" @@ -508,7 +321,7 @@ class TBTools(colorable.Colorable): """Convert a structured traceback (a list) to a string.""" return '\n'.join(stb) - def text(self, etype, value, tb, tb_offset=None, context=5): + def text(self, etype, value, tb, tb_offset: Optional[int] = None, context=5): """Return formatted traceback. Subclasses may override this if they add extra arguments. @@ -517,8 +330,9 @@ class TBTools(colorable.Colorable): tb_offset, context) return self.stb2text(tb_list) - def structured_traceback(self, etype, evalue, tb, tb_offset=None, - context=5, mode=None): + def structured_traceback( + self, etype, evalue, tb, tb_offset: Optional[int] = None, context=5, mode=None + ): """Return a list of traceback frames. Must be implemented by each class. @@ -532,7 +346,7 @@ class ListTB(TBTools): Calling requires 3 arguments: (etype, evalue, elist) as would be obtained by:: - + etype, evalue, tb = sys.exc_info() if tb: elist = traceback.extract_tb(tb) @@ -546,9 +360,6 @@ class ListTB(TBTools): Because they are meant to be called without a full traceback (only a list), instances of this class can't call the interactive pdb debugger.""" - def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None, parent=None, config=None): - TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, - ostream=ostream, parent=parent,config=config) def __call__(self, etype, value, elist): self.ostream.flush() @@ -561,28 +372,30 @@ class ListTB(TBTools): else: return None - def structured_traceback(self, etype, evalue, etb=None, tb_offset=None, - context=5): + def structured_traceback( + self, + etype: type, + evalue: BaseException, + etb: Optional[TracebackType] = None, + tb_offset: Optional[int] = None, + context=5, + ): """Return a color formatted string with the traceback info. Parameters ---------- etype : exception type - Type of the exception raised. - + Type of the exception raised. evalue : object - Data stored in the exception - - etb : object - If list: List of frames, see class docstring for details. - If Traceback: Traceback of the exception. - + Data stored in the exception + etb : list | TracebackType | None + If list: List of frames, see class docstring for details. + If Traceback: Traceback of the exception. tb_offset : int, optional - Number of frames in the traceback to skip. If not given, the - instance evalue is used (set in constructor). - + Number of frames in the traceback to skip. If not given, the + instance evalue is used (set in constructor). context : int, optional - Number of lines of context information to print. + Number of lines of context information to print. Returns ------- @@ -602,6 +415,7 @@ class ListTB(TBTools): else: elist = [] tb_offset = self.tb_offset if tb_offset is None else tb_offset + assert isinstance(tb_offset, int) Colors = self.Colors out_list = [] if elist: @@ -650,21 +464,29 @@ class ListTB(TBTools): Colors = self.Colors list = [] for filename, lineno, name, line in extracted_list[:-1]: - item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ - (Colors.filename, filename, Colors.Normal, - Colors.lineno, lineno, Colors.Normal, - Colors.name, name, Colors.Normal) + item = " %s in %s%s%s\n" % ( + _format_filename( + filename, Colors.filename, Colors.Normal, lineno=lineno + ), + Colors.name, + name, + Colors.Normal, + ) if line: item += ' %s\n' % line.strip() list.append(item) # Emphasize the last entry filename, lineno, name, line = extracted_list[-1] - item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ - (Colors.normalEm, - Colors.filenameEm, filename, Colors.normalEm, - Colors.linenoEm, lineno, Colors.normalEm, - Colors.nameEm, name, Colors.normalEm, - Colors.Normal) + item = "%s %s in %s%s%s%s\n" % ( + Colors.normalEm, + _format_filename( + filename, Colors.filenameEm, Colors.normalEm, lineno=lineno + ), + Colors.nameEm, + name, + Colors.normalEm, + Colors.Normal, + ) if line: item += '%s %s%s\n' % (Colors.line, line.strip(), Colors.Normal) @@ -699,13 +521,22 @@ class ListTB(TBTools): lineno = value.lineno textline = linecache.getline(value.filename, value.lineno) else: - lineno = 'unknown' - textline = '' - list.append('%s File %s"%s"%s, line %s%s%s\n' % \ - (Colors.normalEm, - Colors.filenameEm, py3compat.cast_unicode(value.filename), Colors.normalEm, - Colors.linenoEm, lineno, Colors.Normal )) - if textline == '': + lineno = "unknown" + textline = "" + list.append( + "%s %s%s\n" + % ( + Colors.normalEm, + _format_filename( + value.filename, + Colors.filenameEm, + Colors.normalEm, + lineno=(None if lineno == "unknown" else lineno), + ), + Colors.Normal, + ) + ) + if textline == "": textline = py3compat.cast_unicode(value.text, "utf-8") if textline is not None: @@ -759,7 +590,7 @@ class ListTB(TBTools): Parameters ---------- etype : exception type - value : exception value + evalue : exception value """ # This method needs to use __call__ from *this* class, not the one from # a subclass whose signature or behavior may be different @@ -785,18 +616,34 @@ class VerboseTB(TBTools): traceback, to be used with alternate interpreters (because their own code would appear in the traceback).""" - def __init__(self, color_scheme='Linux', call_pdb=False, ostream=None, - tb_offset=0, long_header=False, include_vars=True, - check_cache=None, debugger_cls = None, - parent=None, config=None): + def __init__( + self, + color_scheme: str = "Linux", + call_pdb: bool = False, + ostream=None, + tb_offset: int = 0, + long_header: bool = False, + include_vars: bool = True, + check_cache=None, + debugger_cls=None, + parent=None, + config=None, + ): """Specify traceback offset, headers and color scheme. Define how many frames to drop from the tracebacks. Calling it with tb_offset=1 allows use of this handler in interpreters which will have their own code at the top of the traceback (VerboseTB will first remove that frame before printing the traceback info).""" - TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, - ostream=ostream, parent=parent, config=config) + TBTools.__init__( + self, + color_scheme=color_scheme, + call_pdb=call_pdb, + ostream=ostream, + parent=parent, + config=config, + debugger_cls=debugger_cls, + ) self.tb_offset = tb_offset self.long_header = long_header self.include_vars = include_vars @@ -809,97 +656,46 @@ class VerboseTB(TBTools): check_cache = linecache.checkcache self.check_cache = check_cache - self.debugger_cls = debugger_cls or debugger.Pdb self.skip_hidden = True - def format_records(self, records, last_unique, recursion_repeat): - """Format the stack frames of the traceback""" - frames = [] - - skipped = 0 - lastrecord = len(records) - 1 - for i, r in enumerate(records[: last_unique + recursion_repeat + 1]): - if self.skip_hidden: - if r[0].f_locals.get("__tracebackhide__", 0) and i != lastrecord: - skipped += 1 - continue - if skipped: - Colors = self.Colors # just a shorthand + quicker name lookup - ColorsNormal = Colors.Normal # used a lot - frames.append( - " %s[... skipping hidden %s frame]%s\n" - % (Colors.excName, skipped, ColorsNormal) - ) - skipped = 0 - - frames.append(self.format_record(*r)) - - if skipped: - Colors = self.Colors # just a shorthand + quicker name lookup - ColorsNormal = Colors.Normal # used a lot - frames.append( - " %s[... skipping hidden %s frame]%s\n" - % (Colors.excName, skipped, ColorsNormal) - ) - - if recursion_repeat: - frames.append('... last %d frames repeated, from the frame below ...\n' % recursion_repeat) - frames.append(self.format_record(*records[last_unique+recursion_repeat+1])) - - return frames - - def format_record(self, frame, file, lnum, func, lines, index): + def format_record(self, frame_info): """Format a single stack frame""" Colors = self.Colors # just a shorthand + quicker name lookup ColorsNormal = Colors.Normal # used a lot - col_scheme = self.color_scheme_table.active_scheme_name - indent = ' ' * INDENT_SIZE - em_normal = '%s\n%s%s' % (Colors.valEm, indent, ColorsNormal) - undefined = '%sundefined%s' % (Colors.em, ColorsNormal) - tpl_link = '%s%%s%s' % (Colors.filenameEm, ColorsNormal) - tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, - ColorsNormal) - tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ - (Colors.vName, Colors.valEm, ColorsNormal) - tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) - tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, - Colors.vName, ColorsNormal) - tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) - - if not file: - file = '?' - elif file.startswith(str("<")) and file.endswith(str(">")): - # Not a real filename, no problem... - pass - elif not os.path.isabs(file): - # Try to make the filename absolute by trying all - # sys.path entries (which is also what linecache does) - for dirname in sys.path: - try: - fullname = os.path.join(dirname, file) - if os.path.isfile(fullname): - file = os.path.abspath(fullname) - break - except Exception: - # Just in case that sys.path contains very - # strange entries... - pass - - file = py3compat.cast_unicode(file, util_path.fs_encoding) - link = tpl_link % util_path.compress_user(file) - args, varargs, varkw, locals_ = inspect.getargvalues(frame) - - if func == '?': - call = '' - elif func == '<module>': - call = tpl_call % (func, '') + + if isinstance(frame_info, stack_data.RepeatedFrames): + return ' %s[... skipping similar frames: %s]%s\n' % ( + Colors.excName, frame_info.description, ColorsNormal) + + indent = " " * INDENT_SIZE + em_normal = "%s\n%s%s" % (Colors.valEm, indent, ColorsNormal) + tpl_call = f"in {Colors.vName}{{file}}{Colors.valEm}{{scope}}{ColorsNormal}" + tpl_call_fail = "in %s%%s%s(***failed resolving arguments***)%s" % ( + Colors.vName, + Colors.valEm, + ColorsNormal, + ) + tpl_name_val = "%%s %s= %%s%s" % (Colors.valEm, ColorsNormal) + + link = _format_filename( + frame_info.filename, + Colors.filenameEm, + ColorsNormal, + lineno=frame_info.lineno, + ) + args, varargs, varkw, locals_ = inspect.getargvalues(frame_info.frame) + + func = frame_info.executing.code_qualname() + if func == "<module>": + call = tpl_call.format(file=func, scope="") else: # Decide whether to include variable details or not var_repr = eqrepr if self.include_vars else nullrepr try: - call = tpl_call % (func, inspect.formatargvalues(args, - varargs, varkw, - locals_, formatvalue=var_repr)) + scope = inspect.formatargvalues( + args, varargs, varkw, locals_, formatvalue=var_repr + ) + call = tpl_call.format(file=func, scope=scope) except KeyError: # This happens in situations like errors inside generator # expressions, where local variables are listed in the @@ -919,111 +715,26 @@ class VerboseTB(TBTools): # disabled. call = tpl_call_fail % func - # Don't attempt to tokenize binary files. - if file.endswith(('.so', '.pyd', '.dll')): - return '%s %s\n' % (link, call) - - elif file.endswith(('.pyc', '.pyo')): - # Look up the corresponding source file. - try: - file = source_from_cache(file) - except ValueError: - # Failed to get the source file for some reason - # E.g. https://github.com/ipython/ipython/issues/9486 - return '%s %s\n' % (link, call) - - def linereader(file=file, lnum=[lnum], getline=linecache.getline): - line = getline(file, lnum[0]) - lnum[0] += 1 - return line - - # Build the list of names on this line of code where the exception - # occurred. - try: - names = [] - name_cont = False - - for token_type, token, start, end, line in generate_tokens(linereader): - # build composite names - if token_type == tokenize.NAME and token not in keyword.kwlist: - if name_cont: - # Continuation of a dotted name - try: - names[-1].append(token) - except IndexError: - names.append([token]) - name_cont = False - else: - # Regular new names. We append everything, the caller - # will be responsible for pruning the list later. It's - # very tricky to try to prune as we go, b/c composite - # names can fool us. The pruning at the end is easy - # to do (or the caller can print a list with repeated - # names if so desired. - names.append([token]) - elif token == '.': - name_cont = True - elif token_type == tokenize.NEWLINE: - break - - except (IndexError, UnicodeDecodeError, SyntaxError): - # signals exit of tokenizer - # SyntaxError can occur if the file is not actually Python - # - see gh-6300 - pass - except tokenize.TokenError as msg: - # Tokenizing may fail for various reasons, many of which are - # harmless. (A good example is when the line in question is the - # close of a triple-quoted string, cf gh-6864). We don't want to - # show this to users, but want make it available for debugging - # purposes. - _m = ("An unexpected error occurred while tokenizing input\n" - "The following traceback may be corrupted or invalid\n" - "The error message is: %s\n" % msg) - debug(_m) - - # Join composite names (e.g. "dict.fromkeys") - names = ['.'.join(n) for n in names] - # prune names list of duplicates, but keep the right order - unique_names = uniq_stable(names) - - # Start loop over vars lvals = '' lvals_list = [] if self.include_vars: - for name_full in unique_names: - name_base = name_full.split('.', 1)[0] - if name_base in frame.f_code.co_varnames: - if name_base in locals_: - try: - value = repr(eval(name_full, locals_)) - except: - value = undefined - else: - value = undefined - name = tpl_local_var % name_full - else: - if name_base in frame.f_globals: - try: - value = repr(eval(name_full, frame.f_globals)) - except: - value = undefined - else: - value = undefined - name = tpl_global_var % name_full - lvals_list.append(tpl_name_val % (name, value)) + try: + # we likely want to fix stackdata at some point, but + # still need a workaround. + fibp = frame_info.variables_in_executing_piece + for var in fibp: + lvals_list.append(tpl_name_val % (var.name, repr(var.value))) + except Exception: + lvals_list.append( + "Exception trying to inspect frame. No more locals available." + ) if lvals_list: lvals = '%s%s' % (indent, em_normal.join(lvals_list)) - level = '%s %s\n' % (link, call) + result = "%s, %s\n" % (link, call) - if index is None: - return level - else: - _line_format = PyColorize.Parser(style=col_scheme, parent=self).format2 - return '%s%s' % (level, ''.join( - _format_traceback_lines(lnum, index, lines, Colors, lvals, - _line_format))) + result += ''.join(_format_traceback_lines(frame_info.lines, Colors, self.has_colors, lvals)) + return result def prepare_header(self, etype, long_version=False): colors = self.Colors # just a shorthand + quicker name lookup @@ -1061,7 +772,14 @@ class VerboseTB(TBTools): return ['%s%s%s: %s' % (colors.excName, etype_str, colorsnormal, py3compat.cast_unicode(evalue_str))] - def format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset): + def format_exception_as_a_whole( + self, + etype: type, + evalue: BaseException, + etb: Optional[TracebackType], + number_of_lines_of_context, + tb_offset: Optional[int], + ): """Formats the header, traceback and exception message for a single exception. This may be called multiple times by Python 3 exception chaining @@ -1075,52 +793,75 @@ class VerboseTB(TBTools): pass tb_offset = self.tb_offset if tb_offset is None else tb_offset + assert isinstance(tb_offset, int) head = self.prepare_header(etype, self.long_header) - records = self.get_records(etb, number_of_lines_of_context, tb_offset) + records = ( + self.get_records(etb, number_of_lines_of_context, tb_offset) if etb else [] + ) - - last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records) - - frames = self.format_records(records, last_unique, recursion_repeat) + frames = [] + skipped = 0 + lastrecord = len(records) - 1 + for i, r in enumerate(records): + if not isinstance(r, stack_data.RepeatedFrames) and self.skip_hidden: + if r.frame.f_locals.get("__tracebackhide__", 0) and i != lastrecord: + skipped += 1 + continue + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) + skipped = 0 + frames.append(self.format_record(r)) + if skipped: + Colors = self.Colors # just a shorthand + quicker name lookup + ColorsNormal = Colors.Normal # used a lot + frames.append( + " %s[... skipping hidden %s frame]%s\n" + % (Colors.excName, skipped, ColorsNormal) + ) formatted_exception = self.format_exception(etype, evalue) if records: - filepath, lnum = records[-1][1:3] - filepath = os.path.abspath(filepath) + frame_info = records[-1] ipinst = get_ipython() if ipinst is not None: - ipinst.hooks.synchronize_with_editor(filepath, lnum, 0) + ipinst.hooks.synchronize_with_editor(frame_info.filename, frame_info.lineno, 0) return [[head] + frames + [''.join(formatted_exception[0])]] - def get_records(self, etb, number_of_lines_of_context, tb_offset): - try: - # Try the default getinnerframes and Alex's: Alex's fixes some - # problems, but it generates empty tracebacks for console errors - # (5 blanks lines) where none should be returned. - return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset) - except UnicodeDecodeError: - # This can occur if a file's encoding magic comment is wrong. - # I can't see a way to recover without duplicating a bunch of code - # from the stdlib traceback module. --TK - error('\nUnicodeDecodeError while processing traceback.\n') - return None - except: - # FIXME: I've been getting many crash reports from python 2.3 - # users, traceable to inspect.py. If I can find a small test-case - # to reproduce this, I should either write a better workaround or - # file a bug report against inspect (if that's the real problem). - # So far, I haven't been able to find an isolated example to - # reproduce the problem. - inspect_error() - traceback.print_exc(file=self.ostream) - info('\nUnfortunately, your original traceback can not be constructed.\n') - return None - - def structured_traceback(self, etype, evalue, etb, tb_offset=None, - number_of_lines_of_context=5): + def get_records( + self, etb: TracebackType, number_of_lines_of_context: int, tb_offset: int + ): + assert etb is not None + context = number_of_lines_of_context - 1 + after = context // 2 + before = context - after + if self.has_colors: + style = get_style_by_name("default") + style = stack_data.style_with_executing_node(style, "") + formatter = Terminal256Formatter(style=style) + else: + formatter = None + options = stack_data.Options( + before=before, + after=after, + pygments_formatter=formatter, + ) + return list(stack_data.FrameInfo.stack_data(etb, options=options))[tb_offset:] + + def structured_traceback( + self, + etype: type, + evalue: Optional[BaseException], + etb: Optional[TracebackType], + tb_offset: Optional[int] = None, + number_of_lines_of_context: int = 5, + ): """Return a nice text document describing the traceback.""" - formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context, tb_offset) @@ -1133,6 +874,7 @@ class VerboseTB(TBTools): formatted_exceptions = formatted_exception exception = self.get_parts_of_chained_exception(evalue) if exception: + assert evalue is not None formatted_exceptions += self.prepare_chained_exception_message(evalue.__cause__) etype, evalue, etb = exception else: @@ -1157,7 +899,7 @@ class VerboseTB(TBTools): return structured_traceback_parts - def debugger(self, force=False): + def debugger(self, force: bool = False): """Call up the pdb debugger if desired, always clean up the tb reference. @@ -1191,6 +933,7 @@ class VerboseTB(TBTools): else: etb = self.tb = sys.last_traceback while self.tb is not None and self.tb.tb_next is not None: + assert self.tb.tb_next is not None self.tb = self.tb.tb_next if etb and etb.tb_next: etb = etb.tb_next @@ -1236,6 +979,8 @@ class FormattedTB(VerboseTB, ListTB): occurs with python programs that themselves execute other python code, like Python shells). """ + mode: str + def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, ostream=None, tb_offset=0, long_header=False, include_vars=False, @@ -1282,8 +1027,7 @@ class FormattedTB(VerboseTB, ListTB): """Convert a structured traceback (a list) to a string.""" return self.tb_join_char.join(stb) - - def set_mode(self, mode=None): + def set_mode(self, mode: Optional[str] = None): """Switch to the desired mode. If mode is not specified, cycles through the available modes.""" @@ -1293,9 +1037,12 @@ class FormattedTB(VerboseTB, ListTB): len(self.valid_modes) self.mode = self.valid_modes[new_idx] elif mode not in self.valid_modes: - raise ValueError('Unrecognized mode in FormattedTB: <' + mode + '>\n' - 'Valid modes: ' + str(self.valid_modes)) + raise ValueError( + "Unrecognized mode in FormattedTB: <" + mode + ">\n" + "Valid modes: " + str(self.valid_modes) + ) else: + assert isinstance(mode, str) self.mode = mode # include variable details only in 'Verbose' mode self.include_vars = (self.mode == self.valid_modes[2]) @@ -1340,7 +1087,7 @@ class AutoFormattedTB(FormattedTB): - tb_offset: the number of frames to skip over in the stack, on a per-call basis (this overrides temporarily the instance's tb_offset - given at initialization time. """ + given at initialization time.""" if out is None: out = self.ostream @@ -1357,6 +1104,10 @@ class AutoFormattedTB(FormattedTB): def structured_traceback(self, etype=None, value=None, tb=None, tb_offset=None, number_of_lines_of_context=5): + + etype: type + value: BaseException + # tb: TracebackType or tupleof tb types ? if etype is None: etype, value, tb = sys.exc_info() if isinstance(tb, tuple): diff --git a/contrib/python/ipython/py3/IPython/display.py b/contrib/python/ipython/py3/IPython/display.py index 7d248ba023..b7f64f25c9 100644 --- a/contrib/python/ipython/py3/IPython/display.py +++ b/contrib/python/ipython/py3/IPython/display.py @@ -1,16 +1,44 @@ """Public API for display tools in IPython. """ -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Imports -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- -from IPython.core.display import * +from IPython.core.display_functions import * +from IPython.core.display import ( + display_pretty, + display_html, + display_markdown, + display_svg, + display_png, + display_jpeg, + display_latex, + display_json, + display_javascript, + display_pdf, + DisplayObject, + TextDisplayObject, + Pretty, + HTML, + Markdown, + Math, + Latex, + SVG, + ProgressBar, + JSON, + GeoJSON, + Javascript, + Image, + set_matplotlib_formats, + set_matplotlib_close, + Video, +) from IPython.lib.display import * diff --git a/contrib/python/ipython/py3/IPython/extensions/autoreload.py b/contrib/python/ipython/py3/IPython/extensions/autoreload.py index ada680fcf0..816d2f35ea 100644 --- a/contrib/python/ipython/py3/IPython/extensions/autoreload.py +++ b/contrib/python/ipython/py3/IPython/extensions/autoreload.py @@ -48,6 +48,11 @@ The following magic commands are provided: Reload all modules (except those excluded by ``%aimport``) every time before executing the Python code typed. +``%autoreload 3`` + + Reload all modules AND autoload newly added objects + every time before executing the Python code typed. + ``%aimport`` List modules which are to be automatically imported or not to be imported. @@ -92,23 +97,23 @@ Some of the known remaining caveats are: - C extension modules cannot be reloaded, and so cannot be autoreloaded. """ -skip_doctest = True +__skip_doctest__ = True -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Copyright (C) 2000 Thomas Heller # Copyright (C) 2008 Pauli Virtanen <pav@iki.fi> # Copyright (C) 2012 The IPython Development Team # # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # # This IPython module is written by Pauli Virtanen, based on the autoreload # code by Thomas Heller. -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- # Imports -#----------------------------------------------------------------------------- +# ----------------------------------------------------------------------------- import os import sys @@ -116,22 +121,25 @@ import traceback import types import weakref import gc -from importlib import import_module +from importlib import import_module, reload from importlib.util import source_from_cache -from imp import reload -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Autoreload functionality -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ -class ModuleReloader(object): + +class ModuleReloader: enabled = False """Whether this reloader is enabled""" check_all = True """Autoreload all modules, not just those listed in 'modules'""" - def __init__(self): + autoload_obj = False + """Autoreload all modules AND autoload all new objects""" + + def __init__(self, shell=None): # Modules that failed to reload: {module: mtime-on-failed-reload, ...} self.failed = {} # Modules specially marked as autoreloadable. @@ -142,6 +150,7 @@ class ModuleReloader(object): self.old_objects = {} # Module modification timestamps self.modules_mtimes = {} + self.shell = shell # Cache module modification times self.check(check_all=True, do_reload=False) @@ -176,22 +185,22 @@ class ModuleReloader(object): self.mark_module_reloadable(module_name) import_module(module_name) - top_name = module_name.split('.')[0] + top_name = module_name.split(".")[0] top_module = sys.modules[top_name] return top_module, top_name def filename_and_mtime(self, module): - if not hasattr(module, '__file__') or module.__file__ is None: + if not hasattr(module, "__file__") or module.__file__ is None: return None, None - if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']: + if getattr(module, "__name__", None) in [None, "__mp_main__", "__main__"]: # we cannot reload(__main__) or reload(__mp_main__) return None, None filename = module.__file__ path, ext = os.path.splitext(filename) - if ext.lower() == '.py': + if ext.lower() == ".py": py_filename = filename else: try: @@ -242,21 +251,35 @@ class ModuleReloader(object): # If we've reached this point, we should try to reload the module if do_reload: try: - superreload(m, reload, self.old_objects) + if self.autoload_obj: + superreload(m, reload, self.old_objects, self.shell) + else: + superreload(m, reload, self.old_objects) if py_filename in self.failed: del self.failed[py_filename] except: - print("[autoreload of %s failed: %s]" % ( - modname, traceback.format_exc(10)), file=sys.stderr) + print( + "[autoreload of {} failed: {}]".format( + modname, traceback.format_exc(10) + ), + file=sys.stderr, + ) self.failed[py_filename] = pymtime -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ # superreload -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ -func_attrs = ['__code__', '__defaults__', '__doc__', - '__closure__', '__globals__', '__dict__'] +func_attrs = [ + "__code__", + "__defaults__", + "__doc__", + "__closure__", + "__globals__", + "__dict__", +] def update_function(old, new): @@ -272,7 +295,7 @@ def update_instances(old, new): """Use garbage collector to find all instances that refer to the old class definition and update their __class__ to point to the new class definition""" - + refs = gc.get_referrers(old) for ref in refs: @@ -298,20 +321,25 @@ def update_class(old, new): except (AttributeError, TypeError): pass continue + except ValueError: + # can't compare nested structures containing + # numpy arrays using `==` + pass - if update_generic(old_obj, new_obj): continue + if update_generic(old_obj, new_obj): + continue try: setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): - pass # skip non-writable attributes + pass # skip non-writable attributes for key in list(new.__dict__.keys()): if key not in list(old.__dict__.keys()): try: setattr(old, key, getattr(new, key)) except (AttributeError, TypeError): - pass # skip non-writable attributes + pass # skip non-writable attributes # update all instances of class update_instances(old, new) @@ -329,16 +357,18 @@ def isinstance2(a, b, typ): UPDATE_RULES = [ - (lambda a, b: isinstance2(a, b, type), - update_class), - (lambda a, b: isinstance2(a, b, types.FunctionType), - update_function), - (lambda a, b: isinstance2(a, b, property), - update_property), + (lambda a, b: isinstance2(a, b, type), update_class), + (lambda a, b: isinstance2(a, b, types.FunctionType), update_function), + (lambda a, b: isinstance2(a, b, property), update_property), ] -UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType), - lambda a, b: update_function(a.__func__, b.__func__)), -]) +UPDATE_RULES.extend( + [ + ( + lambda a, b: isinstance2(a, b, types.MethodType), + lambda a, b: update_function(a.__func__, b.__func__), + ), + ] +) def update_generic(a, b): @@ -349,14 +379,45 @@ def update_generic(a, b): return False -class StrongRef(object): +class StrongRef: def __init__(self, obj): self.obj = obj + def __call__(self): return self.obj -def superreload(module, reload=reload, old_objects=None): +mod_attrs = [ + "__name__", + "__doc__", + "__package__", + "__loader__", + "__spec__", + "__file__", + "__cached__", + "__builtins__", +] + + +def append_obj(module, d, name, obj, autoload=False): + in_module = hasattr(obj, "__module__") and obj.__module__ == module.__name__ + if autoload: + # check needed for module global built-ins + if not in_module and name in mod_attrs: + return False + else: + if not in_module: + return False + + key = (module.__name__, name) + try: + d.setdefault(key, []).append(weakref.ref(obj)) + except TypeError: + pass + return True + + +def superreload(module, reload=reload, old_objects=None, shell=None): """Enhanced version of the builtin reload function. superreload remembers objects previously in the module, and @@ -371,7 +432,7 @@ def superreload(module, reload=reload, old_objects=None): # collect old objects in the module for name, obj in list(module.__dict__.items()): - if not hasattr(obj, '__module__') or obj.__module__ != module.__name__: + if not append_obj(module, old_objects, name, obj): continue key = (module.__name__, name) try: @@ -385,8 +446,8 @@ def superreload(module, reload=reload, old_objects=None): old_dict = module.__dict__.copy() old_name = module.__name__ module.__dict__.clear() - module.__dict__['__name__'] = old_name - module.__dict__['__loader__'] = old_dict['__loader__'] + module.__dict__["__name__"] = old_name + module.__dict__["__loader__"] = old_dict["__loader__"] except (TypeError, AttributeError, KeyError): pass @@ -400,12 +461,21 @@ def superreload(module, reload=reload, old_objects=None): # iterate over all objects and update functions & classes for name, new_obj in list(module.__dict__.items()): key = (module.__name__, name) - if key not in old_objects: continue + if key not in old_objects: + # here 'shell' acts both as a flag and as an output var + if ( + shell is None + or name == "Enum" + or not append_obj(module, old_objects, name, new_obj, True) + ): + continue + shell.user_ns[name] = new_obj new_refs = [] for old_ref in old_objects[key]: old_obj = old_ref() - if old_obj is None: continue + if old_obj is None: + continue new_refs.append(old_ref) update_generic(old_obj, new_obj) @@ -416,22 +486,25 @@ def superreload(module, reload=reload, old_objects=None): return module -#------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ # IPython connectivity -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ from IPython.core.magic import Magics, magics_class, line_magic + @magics_class class AutoreloadMagics(Magics): def __init__(self, *a, **kw): - super(AutoreloadMagics, self).__init__(*a, **kw) - self._reloader = ModuleReloader() + super().__init__(*a, **kw) + self._reloader = ModuleReloader(self.shell) self._reloader.check_all = False + self._reloader.autoload_obj = False self.loaded_modules = set(sys.modules) @line_magic - def autoreload(self, parameter_s=''): + def autoreload(self, parameter_s=""): r"""%autoreload => Reload modules automatically %autoreload @@ -475,19 +548,24 @@ class AutoreloadMagics(Magics): autoreloaded. """ - if parameter_s == '': + if parameter_s == "": self._reloader.check(True) - elif parameter_s == '0': + elif parameter_s == "0": self._reloader.enabled = False - elif parameter_s == '1': + elif parameter_s == "1": self._reloader.check_all = False self._reloader.enabled = True - elif parameter_s == '2': + elif parameter_s == "2": + self._reloader.check_all = True + self._reloader.enabled = True + self._reloader.enabled = True + elif parameter_s == "3": self._reloader.check_all = True self._reloader.enabled = True + self._reloader.autoload_obj = True @line_magic - def aimport(self, parameter_s='', stream=None): + def aimport(self, parameter_s="", stream=None): """%aimport => Import modules for automatic reloading. %aimport @@ -511,13 +589,13 @@ class AutoreloadMagics(Magics): if self._reloader.check_all: stream.write("Modules to reload:\nall-except-skipped\n") else: - stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload)) - stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip)) - elif modname.startswith('-'): + stream.write("Modules to reload:\n%s\n" % " ".join(to_reload)) + stream.write("\nModules to skip:\n%s\n" % " ".join(to_skip)) + elif modname.startswith("-"): modname = modname[1:] self._reloader.mark_module_skipped(modname) else: - for _module in ([_.strip() for _ in modname.split(',')]): + for _module in [_.strip() for _ in modname.split(",")]: top_module, top_name = self._reloader.aimport_module(_module) # Inject module to user namespace @@ -531,8 +609,7 @@ class AutoreloadMagics(Magics): pass def post_execute_hook(self): - """Cache the modification times of any modules imported in this execution - """ + """Cache the modification times of any modules imported in this execution""" newly_loaded_modules = set(sys.modules) - self.loaded_modules for modname in newly_loaded_modules: _, pymtime = self._reloader.filename_and_mtime(sys.modules[modname]) @@ -546,5 +623,5 @@ def load_ipython_extension(ip): """Load the extension in IPython.""" auto_reload = AutoreloadMagics(ip) ip.register_magics(auto_reload) - ip.events.register('pre_run_cell', auto_reload.pre_run_cell) - ip.events.register('post_execute', auto_reload.post_execute_hook) + ip.events.register("pre_run_cell", auto_reload.pre_run_cell) + ip.events.register("post_execute", auto_reload.post_execute_hook) diff --git a/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py b/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py deleted file mode 100644 index 3c88e7c2a1..0000000000 --- a/contrib/python/ipython/py3/IPython/extensions/cythonmagic.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -**DEPRECATED** - -The cython magic has been integrated into Cython itself, -which is now released in version 0.21. - -cf github `Cython` organisation, `Cython` repo, under the -file `Cython/Build/IpythonMagic.py` -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2010-2011, IPython Development Team. -#----------------------------------------------------------------------------- - -import warnings - -## still load the magic in IPython 3.x, remove completely in future versions. -def load_ipython_extension(ip): - """Load the extension in IPython.""" - - warnings.warn("""The Cython magic has been moved to the Cython package""") diff --git a/contrib/python/ipython/py3/IPython/extensions/rmagic.py b/contrib/python/ipython/py3/IPython/extensions/rmagic.py deleted file mode 100644 index ec5763972e..0000000000 --- a/contrib/python/ipython/py3/IPython/extensions/rmagic.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- - -#----------------------------------------------------------------------------- -# Copyright (C) 2012 The IPython Development Team -#----------------------------------------------------------------------------- - -import warnings - -def load_ipython_extension(ip): - """Load the extension in IPython.""" - warnings.warn("The rmagic extension in IPython has moved to " - "`rpy2.ipython`, please see `rpy2` documentation.") diff --git a/contrib/python/ipython/py3/IPython/extensions/storemagic.py b/contrib/python/ipython/py3/IPython/extensions/storemagic.py index 51b79ad314..d9d00f14b9 100644 --- a/contrib/python/ipython/py3/IPython/extensions/storemagic.py +++ b/contrib/python/ipython/py3/IPython/extensions/storemagic.py @@ -17,6 +17,7 @@ import inspect, os, sys, textwrap from IPython.core.error import UsageError from IPython.core.magic import Magics, magics_class, line_magic +from IPython.testing.skipdoctest import skip_doctest from traitlets import Bool @@ -74,6 +75,7 @@ class StoreMagics(Magics): if self.autorestore: restore_data(self.shell) + @skip_doctest @line_magic def store(self, parameter_s=''): """Lightweight persistence for python variables. @@ -82,6 +84,7 @@ class StoreMagics(Magics): In [1]: l = ['hello',10,'world'] In [2]: %store l + Stored 'l' (list) In [3]: exit (IPython session is closed and started again...) @@ -126,13 +129,13 @@ class StoreMagics(Magics): if 'd' in opts: try: todel = args[0] - except IndexError: - raise UsageError('You must provide the variable to forget') + except IndexError as e: + raise UsageError('You must provide the variable to forget') from e else: try: del db['autorestore/' + todel] - except: - raise UsageError("Can't delete variable '%s'" % todel) + except BaseException as e: + raise UsageError("Can't delete variable '%s'" % todel) from e # reset elif 'z' in opts: for k in db.keys('autorestore/*'): @@ -173,12 +176,12 @@ class StoreMagics(Magics): # default action - store the variable else: # %store foo >file.txt or >>file.txt - if len(args) > 1 and args[1].startswith('>'): - fnam = os.path.expanduser(args[1].lstrip('>').lstrip()) - if args[1].startswith('>>'): - fil = open(fnam, 'a') + if len(args) > 1 and args[1].startswith(">"): + fnam = os.path.expanduser(args[1].lstrip(">").lstrip()) + if args[1].startswith(">>"): + fil = open(fnam, "a", encoding="utf-8") else: - fil = open(fnam, 'w') + fil = open(fnam, "w", encoding="utf-8") with fil: obj = ip.ev(args[0]) print("Writing '%s' (%s) to file '%s'." % (args[0], @@ -203,8 +206,8 @@ class StoreMagics(Magics): name = arg try: cmd = ip.alias_manager.retrieve_alias(name) - except ValueError: - raise UsageError("Unknown variable '%s'" % name) + except ValueError as e: + raise UsageError("Unknown variable '%s'" % name) from e staliases = db.get('stored_aliases',{}) staliases[name] = cmd diff --git a/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py b/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py deleted file mode 100644 index e6a83cd34b..0000000000 --- a/contrib/python/ipython/py3/IPython/extensions/sympyprinting.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -**DEPRECATED** - -A print function that pretty prints sympy Basic objects. - -:moduleauthor: Brian Granger - -Usage -===== - -Once the extension is loaded, Sympy Basic objects are automatically -pretty-printed. - -As of SymPy 0.7.2, maintenance of this extension has moved to SymPy under -sympy.interactive.ipythonprinting, any modifications to account for changes to -SymPy should be submitted to SymPy rather than changed here. This module is -maintained here for backwards compatibility with old SymPy versions. - -""" -#----------------------------------------------------------------------------- -# Copyright (C) 2008 The IPython Development Team -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import warnings - -def load_ipython_extension(ip): - warnings.warn("The sympyprinting extension has moved to `sympy`, " - "use `from sympy import init_printing; init_printing()`") diff --git a/contrib/python/ipython/py3/IPython/external/__init__.py b/contrib/python/ipython/py3/IPython/external/__init__.py index 1c8c546f11..eedc338eb8 100644 --- a/contrib/python/ipython/py3/IPython/external/__init__.py +++ b/contrib/python/ipython/py3/IPython/external/__init__.py @@ -2,4 +2,6 @@ This package contains all third-party modules bundled with IPython. """ -__all__ = [] +from typing import List + +__all__: List[str] = [] diff --git a/contrib/python/ipython/py3/IPython/external/decorators/__init__.py b/contrib/python/ipython/py3/IPython/external/decorators/__init__.py deleted file mode 100644 index 1db80edd35..0000000000 --- a/contrib/python/ipython/py3/IPython/external/decorators/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -try: - from numpy.testing import KnownFailure, knownfailureif -except ImportError: - from ._decorators import knownfailureif - try: - from ._numpy_testing_noseclasses import KnownFailure - except ImportError: - pass diff --git a/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py b/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py deleted file mode 100644 index 18f847adad..0000000000 --- a/contrib/python/ipython/py3/IPython/external/decorators/_decorators.py +++ /dev/null @@ -1,143 +0,0 @@ -""" -Decorators for labeling and modifying behavior of test objects. - -Decorators that merely return a modified version of the original -function object are straightforward. Decorators that return a new -function object need to use -:: - - nose.tools.make_decorator(original_function)(decorator) - -in returning the decorator, in order to preserve meta-data such as -function name, setup and teardown functions and so on - see -``nose.tools`` for more information. - -""" - -# IPython changes: make this work if numpy not available -# Original code: -try: - from ._numpy_testing_noseclasses import KnownFailureTest -except: - pass - -# End IPython changes - - -def skipif(skip_condition, msg=None): - """ - Make function raise SkipTest exception if a given condition is true. - - If the condition is a callable, it is used at runtime to dynamically - make the decision. This is useful for tests that may require costly - imports, to delay the cost until the test suite is actually executed. - - Parameters - ---------- - skip_condition : bool or callable - Flag to determine whether to skip the decorated test. - msg : str, optional - Message to give on raising a SkipTest exception. Default is None. - - Returns - ------- - decorator : function - Decorator which, when applied to a function, causes SkipTest - to be raised when `skip_condition` is True, and the function - to be called normally otherwise. - - Notes - ----- - The decorator itself is decorated with the ``nose.tools.make_decorator`` - function in order to transmit function name, and various other metadata. - - """ - - def skip_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - # Allow for both boolean or callable skip conditions. - if callable(skip_condition): - skip_val = lambda : skip_condition() - else: - skip_val = lambda : skip_condition - - def get_msg(func,msg=None): - """Skip message with information about function being skipped.""" - if msg is None: - out = 'Test skipped due to test condition' - else: - out = '\n'+msg - - return "Skipping test: %s%s" % (func.__name__,out) - - # We need to define *two* skippers because Python doesn't allow both - # return with value and yield inside the same function. - def skipper_func(*args, **kwargs): - """Skipper for normal test functions.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - return f(*args, **kwargs) - - def skipper_gen(*args, **kwargs): - """Skipper for test generators.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - for x in f(*args, **kwargs): - yield x - - # Choose the right skipper to use when building the actual decorator. - if nose.util.isgenerator(f): - skipper = skipper_gen - else: - skipper = skipper_func - - return nose.tools.make_decorator(f)(skipper) - - return skip_decorator - -def knownfailureif(fail_condition, msg=None): - """ - Make function raise KnownFailureTest exception if given condition is true. - - Parameters - ---------- - fail_condition : bool - Flag to determine whether to mark the decorated test as a known - failure (if True) or not (if False). - msg : str, optional - Message to give on raising a KnownFailureTest exception. - Default is None. - - Returns - ------- - decorator : function - Decorator, which, when applied to a function, causes KnownFailureTest to - be raised when `fail_condition` is True and the test fails. - - Notes - ----- - The decorator itself is decorated with the ``nose.tools.make_decorator`` - function in order to transmit function name, and various other metadata. - - """ - if msg is None: - msg = 'Test skipped due to known failure' - - def knownfail_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - def knownfailer(*args, **kwargs): - if fail_condition: - raise KnownFailureTest(msg) - else: - return f(*args, **kwargs) - return nose.tools.make_decorator(f)(knownfailer) - - return knownfail_decorator diff --git a/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py b/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py deleted file mode 100644 index ca6ccd87bb..0000000000 --- a/contrib/python/ipython/py3/IPython/external/decorators/_numpy_testing_noseclasses.py +++ /dev/null @@ -1,41 +0,0 @@ -# IPython: modified copy of numpy.testing.noseclasses, so -# IPython.external._decorators works without numpy being installed. - -# These classes implement a "known failure" error class. - -import os - -from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin - -class KnownFailureTest(Exception): - '''Raise this exception to mark a test as a known failing test.''' - pass - - -class KnownFailure(ErrorClassPlugin): - '''Plugin that installs a KNOWNFAIL error class for the - KnownFailureClass exception. When KnownFailureTest is raised, - the exception will be logged in the knownfail attribute of the - result, 'K' or 'KNOWNFAIL' (verbose) will be output, and the - exception will not be counted as an error or failure.''' - enabled = True - knownfail = ErrorClass(KnownFailureTest, - label='KNOWNFAIL', - isfailure=False) - - def options(self, parser, env=os.environ): - env_opt = 'NOSE_WITHOUT_KNOWNFAIL' - parser.add_option('--no-knownfail', action='store_true', - dest='noKnownFail', default=env.get(env_opt, False), - help='Disable special handling of KnownFailureTest ' - 'exceptions') - - def configure(self, options, conf): - if not self.can_configure: - return - self.conf = conf - disable = getattr(options, 'noKnownFail', False) - if disable: - self.enabled = False - - diff --git a/contrib/python/ipython/py3/IPython/external/mathjax.py b/contrib/python/ipython/py3/IPython/external/mathjax.py deleted file mode 100644 index 1b9b80905b..0000000000 --- a/contrib/python/ipython/py3/IPython/external/mathjax.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/python -""" -`IPython.external.mathjax` is deprecated with IPython 4.0+ - -mathjax is now install by default with the notebook package - -""" - -import sys - -if __name__ == '__main__' : - sys.exit("IPython.external.mathjax is deprecated, Mathjax is now installed by default with the notebook package") - diff --git a/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py b/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py index d2e7bd99f0..b3168f6e2e 100644 --- a/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py +++ b/contrib/python/ipython/py3/IPython/external/qt_for_kernel.py @@ -31,7 +31,6 @@ else: import os import sys -from IPython.utils.version import check_version from IPython.external.qt_loaders import ( load_qt, loaded_api, @@ -101,8 +100,8 @@ def get_options(): mpl = sys.modules.get('matplotlib', None) - if mpl is not None and not check_version(mpl.__version__, '1.0.2'): - #1.0.1 only supports PyQt4 v1 + if mpl is not None and tuple(mpl.__version__.split(".")) < ("1", "0", "2"): + # 1.0.1 only supports PyQt4 v1 return [QT_API_PYQT_DEFAULT] qt_api = os.environ.get('QT_API', None) diff --git a/contrib/python/ipython/py3/IPython/external/qt_loaders.py b/contrib/python/ipython/py3/IPython/external/qt_loaders.py index 79805358e7..39ea298460 100644 --- a/contrib/python/ipython/py3/IPython/external/qt_loaders.py +++ b/contrib/python/ipython/py3/IPython/external/qt_loaders.py @@ -8,13 +8,12 @@ bindings, which is unstable and likely to crash This is used primarily by qt and qt_for_kernel, and shouldn't be accessed directly from the outside """ +import importlib.abc import sys import types from functools import partial, lru_cache import operator -from IPython.utils.version import check_version - # ### Available APIs. # Qt6 QT_API_PYQT6 = "pyqt6" @@ -47,7 +46,7 @@ api_to_module = { } -class ImportDenier(object): +class ImportDenier(importlib.abc.MetaPathFinder): """Import Hook that will guard against bad Qt imports once IPython commits to a specific binding """ @@ -59,17 +58,17 @@ class ImportDenier(object): sys.modules.pop(module_name, None) self.__forbidden.add(module_name) - def find_module(self, fullname, path=None): + def find_spec(self, fullname, path, target=None): if path: return if fullname in self.__forbidden: - return self - - def load_module(self, fullname): - raise ImportError(""" + raise ImportError( + """ Importing %s disabled by IPython, which has already imported an Incompatible QT Binding: %s - """ % (fullname, loaded_api())) + """ + % (fullname, loaded_api()) + ) ID = ImportDenier() @@ -78,7 +77,7 @@ sys.meta_path.insert(0, ID) def commit_api(api): """Commit to a particular API, and trigger ImportErrors on subsequent - dangerous imports""" + dangerous imports""" modules = set(api_to_module.values()) modules.remove(api_to_module[api]) @@ -118,15 +117,15 @@ def loaded_api(): def has_binding(api): """Safely check for PyQt4/5, PySide or PySide2, without importing submodules - Parameters - ---------- - api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault'] - Which module to check for + Parameters + ---------- + api : str [ 'pyqtv1' | 'pyqt' | 'pyqt5' | 'pyside' | 'pyside2' | 'pyqtdefault'] + Which module to check for - Returns - ------- - True if the relevant module appears to be importable - """ + Returns + ------- + True if the relevant module appears to be importable + """ module_name = api_to_module[api] from importlib.util import find_spec @@ -149,7 +148,8 @@ def has_binding(api): if api == QT_API_PYSIDE: # We can also safely check PySide version import PySide - return check_version(PySide.__version__, '1.0.3') + + return PySide.__version_info__ >= (1, 0, 3) return True @@ -195,10 +195,9 @@ def import_pyqt4(version=2): Parameters ---------- version : 1, 2, or None - Which QString/QVariant API to use. Set to None to use the system - default - - ImportErrors rasied within this function are non-recoverable + Which QString/QVariant API to use. Set to None to use the system + default + ImportErrors raised within this function are non-recoverable """ # The new-style string API (version=2) automatically # converts QStrings to Unicode Python strings. Also, automatically unpacks @@ -211,7 +210,7 @@ def import_pyqt4(version=2): from PyQt4 import QtGui, QtCore, QtSvg - if not check_version(QtCore.PYQT_VERSION_STR, '4.7'): + if QtCore.PYQT_VERSION < 0x040700: raise ImportError("IPython requires PyQt4 >= 4.7, found %s" % QtCore.PYQT_VERSION_STR) @@ -229,7 +228,7 @@ def import_pyqt5(): """ Import PyQt5 - ImportErrors rasied within this function are non-recoverable + ImportErrors raised within this function are non-recoverable """ from PyQt5 import QtCore, QtSvg, QtWidgets, QtGui @@ -251,7 +250,7 @@ def import_pyqt6(): """ Import PyQt6 - ImportErrors rasied within this function are non-recoverable + ImportErrors raised within this function are non-recoverable """ from PyQt6 import QtCore, QtSvg, QtWidgets, QtGui @@ -321,13 +320,12 @@ def load_qt(api_options): Parameters ---------- - api_options: List of strings + api_options : List of strings The order of APIs to try. Valid items are 'pyside', 'pyside2', 'pyqt', 'pyqt5', 'pyqtv1' and 'pyqtdefault' Returns ------- - A tuple of QtCore, QtGui, QtSvg, QT_API The first three are the Qt modules. The last is the string indicating which module was loaded. diff --git a/contrib/python/ipython/py3/IPython/frontend.py b/contrib/python/ipython/py3/IPython/frontend.py deleted file mode 100644 index 9cc3eaff2f..0000000000 --- a/contrib/python/ipython/py3/IPython/frontend.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Shim to maintain backwards compatibility with old frontend imports. - -We have moved all contents of the old `frontend` subpackage into top-level -subpackages (`html`, `qt` and `terminal`), and flattened the notebook into -just `IPython.html`, formerly `IPython.frontend.html.notebook`. - -This will let code that was making `from IPython.frontend...` calls continue -working, though a warning will be printed. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The top-level `frontend` package has been deprecated since IPython 1.0. " - "All its subpackages have been moved to the top `IPython` level.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.frontend.html.notebook'] = ShimModule( - src='IPython.frontend.html.notebook', mirror='IPython.html') -sys.modules['IPython.frontend'] = ShimModule( - src='IPython.frontend', mirror='IPython') diff --git a/contrib/python/ipython/py3/IPython/html.py b/contrib/python/ipython/py3/IPython/html.py deleted file mode 100644 index 050be5c599..0000000000 --- a/contrib/python/ipython/py3/IPython/html.py +++ /dev/null @@ -1,28 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.html imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.html` package has been deprecated since IPython 4.0. " - "You should import from `notebook` instead. " - "`IPython.html.widgets` has moved to `ipywidgets`.", ShimWarning) - -_widgets = sys.modules['IPython.html.widgets'] = ShimModule( - src='IPython.html.widgets', mirror='ipywidgets') - -_html = ShimModule( - src='IPython.html', mirror='notebook') - -# hook up widgets -_html.widgets = _widgets -sys.modules['IPython.html'] = _html - -if __name__ == '__main__': - from notebook import notebookapp as app - app.launch_new_instance() diff --git a/contrib/python/ipython/py3/IPython/kernel/__init__.py b/contrib/python/ipython/py3/IPython/kernel/__init__.py deleted file mode 100644 index 70a05ed4aa..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.kernel imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.kernel` package has been deprecated since IPython 4.0." - "You should import from ipykernel or jupyter_client instead.", ShimWarning) - - -# zmq subdir is gone -sys.modules['IPython.kernel.zmq.session'] = ShimModule( - src='IPython.kernel.zmq.session', mirror='jupyter_client.session') -sys.modules['IPython.kernel.zmq'] = ShimModule( - src='IPython.kernel.zmq', mirror='ipykernel') - -for pkg in ('comm', 'inprocess'): - src = 'IPython.kernel.%s' % pkg - sys.modules[src] = ShimModule(src=src, mirror='ipykernel.%s' % pkg) - -for pkg in ('ioloop', 'blocking'): - src = 'IPython.kernel.%s' % pkg - sys.modules[src] = ShimModule(src=src, mirror='jupyter_client.%s' % pkg) - -# required for `from IPython.kernel import PKG` -from ipykernel import comm, inprocess -from jupyter_client import ioloop, blocking -# public API -from ipykernel.connect import * -from jupyter_client import * diff --git a/contrib/python/ipython/py3/IPython/kernel/__main__.py b/contrib/python/ipython/py3/IPython/kernel/__main__.py deleted file mode 100644 index d1f0cf5334..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -if __name__ == '__main__': - from ipykernel import kernelapp as app - app.launch_new_instance() diff --git a/contrib/python/ipython/py3/IPython/kernel/adapter.py b/contrib/python/ipython/py3/IPython/kernel/adapter.py deleted file mode 100644 index 3b8c046b2d..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/adapter.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.adapter import * diff --git a/contrib/python/ipython/py3/IPython/kernel/channels.py b/contrib/python/ipython/py3/IPython/kernel/channels.py deleted file mode 100644 index 8c7fe2a063..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/channels.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.channels import * diff --git a/contrib/python/ipython/py3/IPython/kernel/channelsabc.py b/contrib/python/ipython/py3/IPython/kernel/channelsabc.py deleted file mode 100644 index 88944012d4..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/channelsabc.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.channelsabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/client.py b/contrib/python/ipython/py3/IPython/kernel/client.py deleted file mode 100644 index a98690b74c..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/client.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.client import * diff --git a/contrib/python/ipython/py3/IPython/kernel/clientabc.py b/contrib/python/ipython/py3/IPython/kernel/clientabc.py deleted file mode 100644 index e0cf06c942..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/clientabc.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.clientabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/connect.py b/contrib/python/ipython/py3/IPython/kernel/connect.py deleted file mode 100644 index 5b6d40a5d3..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/connect.py +++ /dev/null @@ -1,2 +0,0 @@ -from ipykernel.connect import * -from jupyter_client.connect import * diff --git a/contrib/python/ipython/py3/IPython/kernel/kernelspec.py b/contrib/python/ipython/py3/IPython/kernel/kernelspec.py deleted file mode 100644 index 123419b2f5..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/kernelspec.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.kernelspec import * diff --git a/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py b/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py deleted file mode 100644 index 28cd33abd3..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/kernelspecapp.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.kernelspecapp import * diff --git a/contrib/python/ipython/py3/IPython/kernel/launcher.py b/contrib/python/ipython/py3/IPython/kernel/launcher.py deleted file mode 100644 index 1953bc4809..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/launcher.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.launcher import * diff --git a/contrib/python/ipython/py3/IPython/kernel/manager.py b/contrib/python/ipython/py3/IPython/kernel/manager.py deleted file mode 100644 index c88097cff6..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/manager.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.manager import * diff --git a/contrib/python/ipython/py3/IPython/kernel/managerabc.py b/contrib/python/ipython/py3/IPython/kernel/managerabc.py deleted file mode 100644 index 6b40827ff8..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/managerabc.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.managerabc import * diff --git a/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py b/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py deleted file mode 100644 index ce576e27ea..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/multikernelmanager.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.multikernelmanager import * diff --git a/contrib/python/ipython/py3/IPython/kernel/restarter.py b/contrib/python/ipython/py3/IPython/kernel/restarter.py deleted file mode 100644 index dc24117c3a..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/restarter.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.restarter import * diff --git a/contrib/python/ipython/py3/IPython/kernel/threaded.py b/contrib/python/ipython/py3/IPython/kernel/threaded.py deleted file mode 100644 index 4a1072f7fe..0000000000 --- a/contrib/python/ipython/py3/IPython/kernel/threaded.py +++ /dev/null @@ -1 +0,0 @@ -from jupyter_client.threaded import * diff --git a/contrib/python/ipython/py3/IPython/lib/__init__.py b/contrib/python/ipython/py3/IPython/lib/__init__.py index 8eb89012df..94b8ade4ec 100644 --- a/contrib/python/ipython/py3/IPython/lib/__init__.py +++ b/contrib/python/ipython/py3/IPython/lib/__init__.py @@ -9,13 +9,3 @@ Extra capabilities for IPython # Distributed under the terms of the BSD License. The full license is in # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -from IPython.lib.security import passwd - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- diff --git a/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py b/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py index 31997e13f2..e7ad51eb67 100644 --- a/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py +++ b/contrib/python/ipython/py3/IPython/lib/backgroundjobs.py @@ -116,7 +116,7 @@ class BackgroundJobManager(object): The given expression is passed to eval(), along with the optional global/local dicts provided. If no dicts are given, they are extracted automatically from the caller's frame. - + A Python statement is NOT a valid eval() expression. Basically, you can only use as an eval() argument something which can go on the right of an '=' sign and be assigned to a variable. @@ -135,7 +135,7 @@ class BackgroundJobManager(object): job_manager.new(myfunc, x, y, kw=dict(z=1)) - The reason for this assymmetry is that the new() method needs to + The reason for this asymmetry is that the new() method needs to maintain access to its own keywords, and this prevents name collisions between arguments to new() and arguments to your own functions. diff --git a/contrib/python/ipython/py3/IPython/lib/clipboard.py b/contrib/python/ipython/py3/IPython/lib/clipboard.py index 316a8ab1f8..95a6b0a0a3 100644 --- a/contrib/python/ipython/py3/IPython/lib/clipboard.py +++ b/contrib/python/ipython/py3/IPython/lib/clipboard.py @@ -16,9 +16,9 @@ def win32_clipboard_get(): """ try: import win32clipboard - except ImportError: + except ImportError as e: raise TryNext("Getting text from the clipboard requires the pywin32 " - "extensions: http://sourceforge.net/projects/pywin32/") + "extensions: http://sourceforge.net/projects/pywin32/") from e win32clipboard.OpenClipboard() try: text = win32clipboard.GetClipboardData(win32clipboard.CF_UNICODETEXT) @@ -26,8 +26,8 @@ def win32_clipboard_get(): try: text = win32clipboard.GetClipboardData(win32clipboard.CF_TEXT) text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) - except (TypeError, win32clipboard.error): - raise ClipboardEmpty + except (TypeError, win32clipboard.error) as e: + raise ClipboardEmpty from e finally: win32clipboard.CloseClipboard() return text @@ -52,15 +52,15 @@ def tkinter_clipboard_get(): """ try: from tkinter import Tk, TclError - except ImportError: - raise TryNext("Getting text from the clipboard on this platform requires tkinter.") + except ImportError as e: + raise TryNext("Getting text from the clipboard on this platform requires tkinter.") from e root = Tk() root.withdraw() try: text = root.clipboard_get() - except TclError: - raise ClipboardEmpty + except TclError as e: + raise ClipboardEmpty from e finally: root.destroy() text = py3compat.cast_unicode(text, py3compat.DEFAULT_ENCODING) diff --git a/contrib/python/ipython/py3/IPython/lib/deepreload.py b/contrib/python/ipython/py3/IPython/lib/deepreload.py index bd8c01b2a7..aaedab2425 100644 --- a/contrib/python/ipython/py3/IPython/lib/deepreload.py +++ b/contrib/python/ipython/py3/IPython/lib/deepreload.py @@ -28,7 +28,7 @@ re-implementation of hierarchical module import. import builtins as builtin_mod from contextlib import contextmanager -import imp +import importlib import sys from types import ModuleType @@ -97,21 +97,21 @@ def get_parent(globals, level): for x in range(level, 1, -1): try: dot = name.rindex('.', 0, dot) - except ValueError: + except ValueError as e: raise ValueError("attempted relative import beyond top-level " - "package") + "package") from e name = name[:dot] try: parent = sys.modules[name] - except: + except BaseException as e: if orig_level < 1: warn("Parent module '%.200s' not found while handling absolute " "import" % name) parent = None else: raise SystemError("Parent module '%.200s' not loaded, cannot " - "perform relative import" % name) + "perform relative import" % name) from e # We expect, but can't guarantee, if parent != None, that: # - parent.__name__ == name @@ -174,33 +174,17 @@ def import_submodule(mod, subname, fullname): print('Reloading', fullname) found_now[fullname] = 1 oldm = sys.modules.get(fullname, None) - - if mod is None: - path = None - elif hasattr(mod, '__path__'): - path = mod.__path__ - else: - return None - try: - # This appears to be necessary on Python 3, because imp.find_module() - # tries to import standard libraries (like io) itself, and we don't - # want them to be processed by our deep_import_hook. - with replace_import_hook(original_import): - fp, filename, stuff = imp.find_module(subname, path) - except ImportError: - return None - - try: - m = imp.load_module(fullname, fp, filename, stuff) + if oldm is not None: + m = importlib.reload(oldm) + else: + m = importlib.import_module(subname, mod) except: # load_module probably removed name from modules because of # the error. Put back the original module object. if oldm: sys.modules[fullname] = oldm raise - finally: - if fp: fp.close() add_submodule(mod, m, fullname, subname) @@ -285,50 +269,35 @@ def deep_reload_hook(m): except: modules_reloading[name] = m - dot = name.rfind('.') - if dot < 0: - subname = name - path = None - else: - try: - parent = sys.modules[name[:dot]] - except KeyError: - modules_reloading.clear() - raise ImportError("reload(): parent %.200s not in sys.modules" % name[:dot]) - subname = name[dot+1:] - path = getattr(parent, "__path__", None) - try: - # This appears to be necessary on Python 3, because imp.find_module() - # tries to import standard libraries (like io) itself, and we don't - # want them to be processed by our deep_import_hook. - with replace_import_hook(original_import): - fp, filename, stuff = imp.find_module(subname, path) - finally: - modules_reloading.clear() - - try: - newm = imp.load_module(name, fp, filename, stuff) + newm = importlib.reload(m) except: - # load_module probably removed name from modules because of - # the error. Put back the original module object. sys.modules[name] = m raise finally: - if fp: fp.close() - - modules_reloading.clear() + modules_reloading.clear() return newm # Save the original hooks -original_reload = imp.reload +original_reload = importlib.reload # Replacement for reload() -def reload(module, exclude=('sys', 'os.path', 'builtins', '__main__', - 'numpy', 'numpy._globals')): +def reload( + module, + exclude=( + *sys.builtin_module_names, + "sys", + "os.path", + "builtins", + "__main__", + "numpy", + "numpy._globals", + ), +): """Recursively reload all modules used in the given module. Optionally takes a list of modules to exclude from reloading. The default exclude - list contains sys, __main__, and __builtin__, to prevent, e.g., resetting + list contains modules listed in sys.builtin_module_names with additional + sys, os.path, builtins and __main__, to prevent, e.g., resetting display, exception, and io hooks. """ global found_now diff --git a/contrib/python/ipython/py3/IPython/lib/demo.py b/contrib/python/ipython/py3/IPython/lib/demo.py index 0b19c413c3..8c9ae905d4 100644 --- a/contrib/python/ipython/py3/IPython/lib/demo.py +++ b/contrib/python/ipython/py3/IPython/lib/demo.py @@ -184,6 +184,7 @@ import re import shlex import sys import pygments +from pathlib import Path from IPython.utils.text import marquee from IPython.utils import openpy @@ -238,7 +239,7 @@ class Demo(object): terminal16m - style('default'): a string of pygments style name to be used. - """ + """ if hasattr(src, "read"): # It seems to be a file or a file-like object self.fname = "from a file-like object" @@ -403,8 +404,8 @@ class Demo(object): index -= 1 filename = self.shell.mktempfile(self.src_blocks[index]) - self.shell.hooks.editor(filename,1) - with open(filename, 'r') as f: + self.shell.hooks.editor(filename, 1) + with open(Path(filename), "r", encoding="utf-8") as f: new_block = f.read() # update the source and colored block self.src_blocks[index] = new_block @@ -531,7 +532,7 @@ class Demo(object): elif token[0] == Token.Comment.Single: toks.append((Token.Comment.Single, token[1][0])) # parse comment content by rst lexer - # remove the extrat newline added by rst lexer + # remove the extra newline added by rst lexer toks += list(pygments.lex(token[1][1:], self.rst_lexer))[:-1] else: toks.append(token) diff --git a/contrib/python/ipython/py3/IPython/lib/display.py b/contrib/python/ipython/py3/IPython/lib/display.py index 7b94acf639..5ff2983dbf 100644 --- a/contrib/python/ipython/py3/IPython/lib/display.py +++ b/contrib/python/ipython/py3/IPython/lib/display.py @@ -65,34 +65,47 @@ class Audio(DisplayObject): Examples -------- - :: - # Generate a sound - import numpy as np - framerate = 44100 - t = np.linspace(0,5,framerate*5) - data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t) - Audio(data,rate=framerate) + >>> import pytest + >>> np = pytest.importorskip("numpy") + + Generate a sound + + >>> import numpy as np + >>> framerate = 44100 + >>> t = np.linspace(0,5,framerate*5) + >>> data = np.sin(2*np.pi*220*t) + np.sin(2*np.pi*224*t) + >>> Audio(data, rate=framerate) + <IPython.lib.display.Audio object> + + Can also do stereo or more channels + + >>> dataleft = np.sin(2*np.pi*220*t) + >>> dataright = np.sin(2*np.pi*224*t) + >>> Audio([dataleft, dataright], rate=framerate) + <IPython.lib.display.Audio object> + + From URL: - # Can also do stereo or more channels - dataleft = np.sin(2*np.pi*220*t) - dataright = np.sin(2*np.pi*224*t) - Audio([dataleft, dataright],rate=framerate) + >>> Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # doctest: +SKIP + >>> Audio(url="http://www.w3schools.com/html/horse.ogg") # doctest: +SKIP - Audio("http://www.nch.com.au/acm/8k16bitpcm.wav") # From URL - Audio(url="http://www.w3schools.com/html/horse.ogg") + From a File: - Audio('/path/to/sound.wav') # From file - Audio(filename='/path/to/sound.ogg') + >>> Audio('IPython/lib/tests/test.wav') # doctest: +SKIP + >>> Audio(filename='IPython/lib/tests/test.wav') # doctest: +SKIP - Audio(b'RAW_WAV_DATA..) # From bytes - Audio(data=b'RAW_WAV_DATA..) + From Bytes: + + >>> Audio(b'RAW_WAV_DATA..') # doctest: +SKIP + >>> Audio(data=b'RAW_WAV_DATA..') # doctest: +SKIP See Also -------- - - See also the ``Audio`` widgets form the ``ipywidget`` package for more flexibility and options. - + ipywidgets.Audio + + Audio widget with more more flexibility and options. + """ _read_flags = 'rb' @@ -183,9 +196,9 @@ class Audio(DisplayObject): try: max_abs_value = float(max([abs(x) for x in data])) - except TypeError: + except TypeError as e: raise TypeError('Only lists of mono audio are ' - 'supported if numpy is not installed') + 'supported if numpy is not installed') from e normalization_factor = Audio._get_normalization_factor(max_abs_value, normalize) scaled = array.array('h', [int(x / normalization_factor * 32767) for x in data]) @@ -272,10 +285,7 @@ class IFrame(object): def _repr_html_(self): """return the embed iframe""" if self.params: - try: - from urllib.parse import urlencode # Py 3 - except ImportError: - from urllib import urlencode + from urllib.parse import urlencode params = "?" + urlencode(self.params) else: params = "" @@ -500,27 +510,25 @@ class FileLinks(FileLink): self.recursive = recursive - def _get_display_formatter(self, - dirname_output_format, - fname_output_format, - fp_format, - fp_cleaner=None): - """ generate built-in formatter function - - this is used to define both the notebook and terminal built-in - formatters as they only differ by some wrapper text for each entry - - dirname_output_format: string to use for formatting directory - names, dirname will be substituted for a single "%s" which - must appear in this string - fname_output_format: string to use for formatting file names, - if a single "%s" appears in the string, fname will be substituted - if two "%s" appear in the string, the path to fname will be - substituted for the first and fname will be substituted for the - second - fp_format: string to use for formatting filepaths, must contain - exactly two "%s" and the dirname will be substituted for the first - and fname will be substituted for the second + def _get_display_formatter( + self, dirname_output_format, fname_output_format, fp_format, fp_cleaner=None + ): + """generate built-in formatter function + + this is used to define both the notebook and terminal built-in + formatters as they only differ by some wrapper text for each entry + + dirname_output_format: string to use for formatting directory + names, dirname will be substituted for a single "%s" which + must appear in this string + fname_output_format: string to use for formatting file names, + if a single "%s" appears in the string, fname will be substituted + if two "%s" appear in the string, the path to fname will be + substituted for the first and fname will be substituted for the + second + fp_format: string to use for formatting filepaths, must contain + exactly two "%s" and the dirname will be substituted for the first + and fname will be substituted for the second """ def f(dirname, fnames, included_suffixes=None): result = [] diff --git a/contrib/python/ipython/py3/IPython/lib/inputhook.py b/contrib/python/ipython/py3/IPython/lib/inputhook.py deleted file mode 100644 index e6e8f2dbbc..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhook.py +++ /dev/null @@ -1,666 +0,0 @@ -# coding: utf-8 -""" -Deprecated since IPython 5.0 - -Inputhook management for GUI event loop integration. -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -try: - import ctypes -except ImportError: - ctypes = None -except SystemError: # IronPython issue, 2/8/2014 - ctypes = None -import os -import platform -import sys -from distutils.version import LooseVersion as V - -from warnings import warn - - -warn("`IPython.lib.inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# Constants for identifying the GUI toolkits. -GUI_WX = 'wx' -GUI_QT = 'qt' -GUI_QT4 = 'qt4' -GUI_GTK = 'gtk' -GUI_TK = 'tk' -GUI_OSX = 'osx' -GUI_GLUT = 'glut' -GUI_PYGLET = 'pyglet' -GUI_GTK3 = 'gtk3' -GUI_NONE = 'none' # i.e. disable - -#----------------------------------------------------------------------------- -# Utilities -#----------------------------------------------------------------------------- - -def _stdin_ready_posix(): - """Return True if there's something to read on stdin (posix version).""" - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - return bool(infds) - -def _stdin_ready_nt(): - """Return True if there's something to read on stdin (nt version).""" - return msvcrt.kbhit() - -def _stdin_ready_other(): - """Return True, assuming there's something to read on stdin.""" - return True - -def _use_appnope(): - """Should we use appnope for dealing with OS X app nap? - - Checks if we are on OS X 10.9 or greater. - """ - return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9') - -def _ignore_CTRL_C_posix(): - """Ignore CTRL+C (SIGINT).""" - signal.signal(signal.SIGINT, signal.SIG_IGN) - -def _allow_CTRL_C_posix(): - """Take CTRL+C into account (SIGINT).""" - signal.signal(signal.SIGINT, signal.default_int_handler) - -def _ignore_CTRL_C_other(): - """Ignore CTRL+C (not implemented).""" - pass - -def _allow_CTRL_C_other(): - """Take CTRL+C into account (not implemented).""" - pass - -if os.name == 'posix': - import select - import signal - stdin_ready = _stdin_ready_posix - ignore_CTRL_C = _ignore_CTRL_C_posix - allow_CTRL_C = _allow_CTRL_C_posix -elif os.name == 'nt': - import msvcrt - stdin_ready = _stdin_ready_nt - ignore_CTRL_C = _ignore_CTRL_C_other - allow_CTRL_C = _allow_CTRL_C_other -else: - stdin_ready = _stdin_ready_other - ignore_CTRL_C = _ignore_CTRL_C_other - allow_CTRL_C = _allow_CTRL_C_other - - -#----------------------------------------------------------------------------- -# Main InputHookManager class -#----------------------------------------------------------------------------- - - -class InputHookManager(object): - """DEPRECATED since IPython 5.0 - - Manage PyOS_InputHook for different GUI toolkits. - - This class installs various hooks under ``PyOSInputHook`` to handle - GUI event loop integration. - """ - - def __init__(self): - if ctypes is None: - warn("IPython GUI event loop requires ctypes, %gui will not be available") - else: - self.PYFUNC = ctypes.PYFUNCTYPE(ctypes.c_int) - self.guihooks = {} - self.aliases = {} - self.apps = {} - self._reset() - - def _reset(self): - self._callback_pyfunctype = None - self._callback = None - self._installed = False - self._current_gui = None - - def get_pyos_inputhook(self): - """DEPRECATED since IPython 5.0 - - Return the current PyOS_InputHook as a ctypes.c_void_p.""" - warn("`get_pyos_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return ctypes.c_void_p.in_dll(ctypes.pythonapi,"PyOS_InputHook") - - def get_pyos_inputhook_as_func(self): - """DEPRECATED since IPython 5.0 - - Return the current PyOS_InputHook as a ctypes.PYFUNCYPE.""" - warn("`get_pyos_inputhook_as_func` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return self.PYFUNC.in_dll(ctypes.pythonapi,"PyOS_InputHook") - - def set_inputhook(self, callback): - """DEPRECATED since IPython 5.0 - - Set PyOS_InputHook to callback and return the previous one.""" - # On platforms with 'readline' support, it's all too likely to - # have a KeyboardInterrupt signal delivered *even before* an - # initial ``try:`` clause in the callback can be executed, so - # we need to disable CTRL+C in this situation. - ignore_CTRL_C() - self._callback = callback - self._callback_pyfunctype = self.PYFUNC(callback) - pyos_inputhook_ptr = self.get_pyos_inputhook() - original = self.get_pyos_inputhook_as_func() - pyos_inputhook_ptr.value = \ - ctypes.cast(self._callback_pyfunctype, ctypes.c_void_p).value - self._installed = True - return original - - def clear_inputhook(self, app=None): - """DEPRECATED since IPython 5.0 - - Set PyOS_InputHook to NULL and return the previous one. - - Parameters - ---------- - app : optional, ignored - This parameter is allowed only so that clear_inputhook() can be - called with a similar interface as all the ``enable_*`` methods. But - the actual value of the parameter is ignored. This uniform interface - makes it easier to have user-level entry points in the main IPython - app like :meth:`enable_gui`.""" - warn("`clear_inputhook` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - pyos_inputhook_ptr = self.get_pyos_inputhook() - original = self.get_pyos_inputhook_as_func() - pyos_inputhook_ptr.value = ctypes.c_void_p(None).value - allow_CTRL_C() - self._reset() - return original - - def clear_app_refs(self, gui=None): - """DEPRECATED since IPython 5.0 - - Clear IPython's internal reference to an application instance. - - Whenever we create an app for a user on qt4 or wx, we hold a - reference to the app. This is needed because in some cases bad things - can happen if a user doesn't hold a reference themselves. This - method is provided to clear the references we are holding. - - Parameters - ---------- - gui : None or str - If None, clear all app references. If ('wx', 'qt4') clear - the app for that toolkit. References are not held for gtk or tk - as those toolkits don't have the notion of an app. - """ - warn("`clear_app_refs` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if gui is None: - self.apps = {} - elif gui in self.apps: - del self.apps[gui] - - def register(self, toolkitname, *aliases): - """DEPRECATED since IPython 5.0 - - Register a class to provide the event loop for a given GUI. - - This is intended to be used as a class decorator. It should be passed - the names with which to register this GUI integration. The classes - themselves should subclass :class:`InputHookBase`. - - :: - - @inputhook_manager.register('qt') - class QtInputHook(InputHookBase): - def enable(self, app=None): - ... - """ - warn("`register` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - def decorator(cls): - if ctypes is not None: - inst = cls(self) - self.guihooks[toolkitname] = inst - for a in aliases: - self.aliases[a] = toolkitname - return cls - return decorator - - def current_gui(self): - """DEPRECATED since IPython 5.0 - - Return a string indicating the currently active GUI or None.""" - warn("`current_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - return self._current_gui - - def enable_gui(self, gui=None, app=None): - """DEPRECATED since IPython 5.0 - - Switch amongst GUI input hooks by name. - - This is a higher level method than :meth:`set_inputhook` - it uses the - GUI name to look up a registered object which enables the input hook - for that GUI. - - Parameters - ---------- - gui : optional, string or None - If None (or 'none'), clears input hook, otherwise it must be one - of the recognized GUI names (see ``GUI_*`` constants in module). - - app : optional, existing application object. - For toolkits that have the concept of a global app, you can supply an - existing one. If not given, the toolkit will be probed for one, and if - none is found, a new one will be created. Note that GTK does not have - this concept, and passing an app if ``gui=="GTK"`` will raise an error. - - Returns - ------- - The output of the underlying gui switch routine, typically the actual - PyOS_InputHook wrapper object or the GUI toolkit app created, if there was - one. - """ - warn("`enable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if gui in (None, GUI_NONE): - return self.disable_gui() - - if gui in self.aliases: - return self.enable_gui(self.aliases[gui], app) - - try: - gui_hook = self.guihooks[gui] - except KeyError: - e = "Invalid GUI request {!r}, valid ones are: {}" - raise ValueError(e.format(gui, ', '.join(self.guihooks))) - self._current_gui = gui - - app = gui_hook.enable(app) - if app is not None: - app._in_event_loop = True - self.apps[gui] = app - return app - - def disable_gui(self): - """DEPRECATED since IPython 5.0 - - Disable GUI event loop integration. - - If an application was registered, this sets its ``_in_event_loop`` - attribute to False. It then calls :meth:`clear_inputhook`. - """ - warn("`disable_gui` is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - gui = self._current_gui - if gui in self.apps: - self.apps[gui]._in_event_loop = False - return self.clear_inputhook() - -class InputHookBase(object): - """DEPRECATED since IPython 5.0 - - Base class for input hooks for specific toolkits. - - Subclasses should define an :meth:`enable` method with one argument, ``app``, - which will either be an instance of the toolkit's application class, or None. - They may also define a :meth:`disable` method with no arguments. - """ - def __init__(self, manager): - self.manager = manager - - def disable(self): - pass - -inputhook_manager = InputHookManager() - -@inputhook_manager.register('osx') -class NullInputHook(InputHookBase): - """DEPRECATED since IPython 5.0 - - A null inputhook that doesn't need to do anything""" - def enable(self, app=None): - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - -@inputhook_manager.register('wx') -class WxInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with wxPython. - - Parameters - ---------- - app : WX Application, optional. - Running application to use. If not given, we probe WX for an - existing application object, and create a new one if none is found. - - Notes - ----- - This methods sets the ``PyOS_InputHook`` for wxPython, which allows - the wxPython to integrate with terminal based applications like - IPython. - - If ``app`` is not given we probe for an existing one, and return it if - found. If no existing app is found, we create an :class:`wx.App` as - follows:: - - import wx - app = wx.App(redirect=False, clearSigInt=False) - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import wx - - wx_version = V(wx.__version__).version - - if wx_version < [2, 8]: - raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__) - - from IPython.lib.inputhookwx import inputhook_wx - self.manager.set_inputhook(inputhook_wx) - if _use_appnope(): - from appnope import nope - nope() - - import wx - if app is None: - app = wx.GetApp() - if app is None: - app = wx.App(redirect=False, clearSigInt=False) - - return app - - def disable(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with wxPython. - - This restores appnapp on OS X - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if _use_appnope(): - from appnope import nap - nap() - -@inputhook_manager.register('qt', 'qt4') -class Qt4InputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with PyQt4. - - Parameters - ---------- - app : Qt Application, optional. - Running application to use. If not given, we probe Qt for an - existing application object, and create a new one if none is found. - - Notes - ----- - This methods sets the PyOS_InputHook for PyQt4, which allows - the PyQt4 to integrate with terminal based applications like - IPython. - - If ``app`` is not given we probe for an existing one, and return it if - found. If no existing app is found, we create an :class:`QApplication` - as follows:: - - from PyQt4 import QtCore - app = QtGui.QApplication(sys.argv) - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookqt4 import create_inputhook_qt4 - app, inputhook_qt4 = create_inputhook_qt4(self.manager, app) - self.manager.set_inputhook(inputhook_qt4) - if _use_appnope(): - from appnope import nope - nope() - - return app - - def disable_qt4(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with PyQt4. - - This restores appnapp on OS X - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if _use_appnope(): - from appnope import nap - nap() - - -@inputhook_manager.register('qt5') -class Qt5InputHook(Qt4InputHook): - def enable(self, app=None): - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - os.environ['QT_API'] = 'pyqt5' - return Qt4InputHook.enable(self, app) - - -@inputhook_manager.register('gtk') -class GtkInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with PyGTK. - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the PyOS_InputHook for PyGTK, which allows - the PyGTK to integrate with terminal based applications like - IPython. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import gtk - try: - gtk.set_interactive(True) - except AttributeError: - # For older versions of gtk, use our own ctypes version - from IPython.lib.inputhookgtk import inputhook_gtk - self.manager.set_inputhook(inputhook_gtk) - - -@inputhook_manager.register('tk') -class TkInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with Tk. - - Parameters - ---------- - app : toplevel :class:`Tkinter.Tk` widget, optional. - Running toplevel widget to use. If not given, we probe Tk for an - existing one, and create a new one if none is found. - - Notes - ----- - If you have already created a :class:`Tkinter.Tk` object, the only - thing done by this method is to register with the - :class:`InputHookManager`, since creating that object automatically - sets ``PyOS_InputHook``. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - if app is None: - try: - from tkinter import Tk # Py 3 - except ImportError: - from Tkinter import Tk # Py 2 - app = Tk() - app.withdraw() - self.manager.apps[GUI_TK] = app - return app - - -@inputhook_manager.register('glut') -class GlutInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with GLUT. - - Parameters - ---------- - - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - - This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to - integrate with terminal based applications like IPython. Due to GLUT - limitations, it is currently not possible to start the event loop - without first creating a window. You should thus not create another - window but use instead the created one. See 'gui-glut.py' in the - docs/examples/lib directory. - - The default screen mode is set to: - glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - - import OpenGL.GLUT as glut - from IPython.lib.inputhookglut import glut_display_mode, \ - glut_close, glut_display, \ - glut_idle, inputhook_glut - - if GUI_GLUT not in self.manager.apps: - glut.glutInit( sys.argv ) - glut.glutInitDisplayMode( glut_display_mode ) - # This is specific to freeglut - if bool(glut.glutSetOption): - glut.glutSetOption( glut.GLUT_ACTION_ON_WINDOW_CLOSE, - glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS ) - glut.glutCreateWindow( sys.argv[0] ) - glut.glutReshapeWindow( 1, 1 ) - glut.glutHideWindow( ) - glut.glutWMCloseFunc( glut_close ) - glut.glutDisplayFunc( glut_display ) - glut.glutIdleFunc( glut_idle ) - else: - glut.glutWMCloseFunc( glut_close ) - glut.glutDisplayFunc( glut_display ) - glut.glutIdleFunc( glut_idle) - self.manager.set_inputhook( inputhook_glut ) - - - def disable(self): - """DEPRECATED since IPython 5.0 - - Disable event loop integration with glut. - - This sets PyOS_InputHook to NULL and set the display function to a - dummy one and set the timer to a dummy timer that will be triggered - very far in the future. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - import OpenGL.GLUT as glut - from glut_support import glutMainLoopEvent - - glut.glutHideWindow() # This is an event to be processed below - glutMainLoopEvent() - super(GlutInputHook, self).disable() - -@inputhook_manager.register('pyglet') -class PygletInputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with pyglet. - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the ``PyOS_InputHook`` for pyglet, which allows - pyglet to integrate with terminal based applications like - IPython. - - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookpyglet import inputhook_pyglet - self.manager.set_inputhook(inputhook_pyglet) - return app - - -@inputhook_manager.register('gtk3') -class Gtk3InputHook(InputHookBase): - def enable(self, app=None): - """DEPRECATED since IPython 5.0 - - Enable event loop integration with Gtk3 (gir bindings). - - Parameters - ---------- - app : ignored - Ignored, it's only a placeholder to keep the call signature of all - gui activation methods consistent, which simplifies the logic of - supporting magics. - - Notes - ----- - This methods sets the PyOS_InputHook for Gtk3, which allows - the Gtk3 to integrate with terminal based applications like - IPython. - """ - warn("This function is deprecated since IPython 5.0 and will be removed in future versions.", - DeprecationWarning, stacklevel=2) - from IPython.lib.inputhookgtk3 import inputhook_gtk3 - self.manager.set_inputhook(inputhook_gtk3) - - -clear_inputhook = inputhook_manager.clear_inputhook -set_inputhook = inputhook_manager.set_inputhook -current_gui = inputhook_manager.current_gui -clear_app_refs = inputhook_manager.clear_app_refs -enable_gui = inputhook_manager.enable_gui -disable_gui = inputhook_manager.disable_gui -register = inputhook_manager.register -guis = inputhook_manager.guihooks - - -def _deprecated_disable(): - warn("This function is deprecated since IPython 4.0 use disable_gui() instead", - DeprecationWarning, stacklevel=2) - inputhook_manager.disable_gui() - -disable_wx = disable_qt4 = disable_gtk = disable_gtk3 = disable_glut = \ - disable_pyglet = disable_osx = _deprecated_disable diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookglut.py b/contrib/python/ipython/py3/IPython/lib/inputhookglut.py deleted file mode 100644 index e6f7f12575..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookglut.py +++ /dev/null @@ -1,172 +0,0 @@ -# coding: utf-8 -""" -GLUT Inputhook support functions -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -# GLUT is quite an old library and it is difficult to ensure proper -# integration within IPython since original GLUT does not allow to handle -# events one by one. Instead, it requires for the mainloop to be entered -# and never returned (there is not even a function to exit he -# mainloop). Fortunately, there are alternatives such as freeglut -# (available for linux and windows) and the OSX implementation gives -# access to a glutCheckLoop() function that blocks itself until a new -# event is received. This means we have to setup the idle callback to -# ensure we got at least one event that will unblock the function. -# -# Furthermore, it is not possible to install these handlers without a window -# being first created. We choose to make this window invisible. This means that -# display mode options are set at this level and user won't be able to change -# them later without modifying the code. This should probably be made available -# via IPython options system. - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -import os -import sys -import time -import signal -import OpenGL.GLUT as glut -import OpenGL.platform as platform -from timeit import default_timer as clock - -#----------------------------------------------------------------------------- -# Constants -#----------------------------------------------------------------------------- - -# Frame per second : 60 -# Should probably be an IPython option -glut_fps = 60 - - -# Display mode : double buffeed + rgba + depth -# Should probably be an IPython option -glut_display_mode = (glut.GLUT_DOUBLE | - glut.GLUT_RGBA | - glut.GLUT_DEPTH) - -glutMainLoopEvent = None -if sys.platform == 'darwin': - try: - glutCheckLoop = platform.createBaseFunction( - 'glutCheckLoop', dll=platform.GLUT, resultType=None, - argTypes=[], - doc='glutCheckLoop( ) -> None', - argNames=(), - ) - except AttributeError: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions''' - '''Consider installing freeglut.''') - glutMainLoopEvent = glutCheckLoop -elif glut.HAVE_FREEGLUT: - glutMainLoopEvent = glut.glutMainLoopEvent -else: - raise RuntimeError( - '''Your glut implementation does not allow interactive sessions. ''' - '''Consider installing freeglut.''') - - -#----------------------------------------------------------------------------- -# Platform-dependent imports and functions -#----------------------------------------------------------------------------- - -if os.name == 'posix': - import select - - def stdin_ready(): - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - if infds: - return True - else: - return False - -elif sys.platform == 'win32': - import msvcrt - - def stdin_ready(): - return msvcrt.kbhit() - -#----------------------------------------------------------------------------- -# Callback functions -#----------------------------------------------------------------------------- - -def glut_display(): - # Dummy display function - pass - -def glut_idle(): - # Dummy idle function - pass - -def glut_close(): - # Close function only hides the current window - glut.glutHideWindow() - glutMainLoopEvent() - -def glut_int_handler(signum, frame): - # Catch sigint and print the default message - signal.signal(signal.SIGINT, signal.default_int_handler) - print('\nKeyboardInterrupt') - # Need to reprint the prompt at this stage - - - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- -def inputhook_glut(): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - - signal.signal(signal.SIGINT, glut_int_handler) - - try: - t = clock() - - # Make sure the default window is set after a window has been closed - if glut.glutGetWindow() == 0: - glut.glutSetWindow( 1 ) - glutMainLoopEvent() - return 0 - - while not stdin_ready(): - glutMainLoopEvent() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass - return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py deleted file mode 100644 index 98569f54d7..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk.py +++ /dev/null @@ -1,35 +0,0 @@ -# encoding: utf-8 -""" -Enable pygtk to be used interactively by setting PyOS_InputHook. - -Authors: Brian Granger -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -import gtk, gobject - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - - -def _main_quit(*args, **kwargs): - gtk.main_quit() - return False - -def inputhook_gtk(): - gobject.io_add_watch(sys.stdin, gobject.IO_IN, _main_quit) - gtk.main() - return 0 - diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py deleted file mode 100644 index b797e86255..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk3.py +++ /dev/null @@ -1,34 +0,0 @@ -# encoding: utf-8 -""" -Enable Gtk3 to be used interactively by IPython. - -Authors: Thomi Richards -""" -#----------------------------------------------------------------------------- -# Copyright (c) 2012, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -from gi.repository import Gtk, GLib - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def _main_quit(*args, **kwargs): - Gtk.main_quit() - return False - - -def inputhook_gtk3(): - GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, _main_quit) - Gtk.main() - return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py b/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py deleted file mode 100644 index a872cee36a..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookgtk4.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Enable Gtk4 to be used interactively by IPython. -""" -# ----------------------------------------------------------------------------- -# Copyright (c) 2021, the IPython Development Team. -# -# Distributed under the terms of the Modified BSD License. -# -# The full license is in the file COPYING.txt, distributed with this software. -# ----------------------------------------------------------------------------- - -# ----------------------------------------------------------------------------- -# Imports -# ----------------------------------------------------------------------------- - -import sys - -from gi.repository import GLib - -# ----------------------------------------------------------------------------- -# Code -# ----------------------------------------------------------------------------- - - -class _InputHook: - def __init__(self, context): - self._quit = False - GLib.io_add_watch(sys.stdin, GLib.PRIORITY_DEFAULT, GLib.IO_IN, self.quit) - - def quit(self, *args, **kwargs): - self._quit = True - return False - - def run(self): - context = GLib.MainContext.default() - while not self._quit: - context.iteration(True) - - -def inputhook_gtk4(): - hook = _InputHook() - hook.run() - return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py b/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py deleted file mode 100644 index fb91ffed17..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookpyglet.py +++ /dev/null @@ -1,111 +0,0 @@ -# encoding: utf-8 -""" -Enable pyglet to be used interactively by setting PyOS_InputHook. - -Authors -------- - -* Nicolas P. Rougier -* Fernando Perez -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import sys -import time -from timeit import default_timer as clock -import pyglet - -#----------------------------------------------------------------------------- -# Platform-dependent imports and functions -#----------------------------------------------------------------------------- - -if os.name == 'posix': - import select - - def stdin_ready(): - infds, outfds, erfds = select.select([sys.stdin],[],[],0) - if infds: - return True - else: - return False - -elif sys.platform == 'win32': - import msvcrt - - def stdin_ready(): - return msvcrt.kbhit() - - -# On linux only, window.flip() has a bug that causes an AttributeError on -# window close. For details, see: -# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e - -if sys.platform.startswith('linux'): - def flip(window): - try: - window.flip() - except AttributeError: - pass -else: - def flip(window): - window.flip() - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def inputhook_pyglet(): - """Run the pyglet event loop by processing pending events only. - - This keeps processing pending events until stdin is ready. After - processing all pending events, a call to time.sleep is inserted. This is - needed, otherwise, CPU usage is at 100%. This sleep time should be tuned - though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - try: - t = clock() - while not stdin_ready(): - pyglet.clock.tick() - for window in pyglet.app.windows: - window.switch_to() - window.dispatch_events() - window.dispatch_event('on_draw') - flip(window) - - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - except KeyboardInterrupt: - pass - return 0 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py b/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py deleted file mode 100644 index 8a83902fc0..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookqt4.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Qt4's inputhook support function - -Author: Christian Boos -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import os -import signal -import threading - -from IPython.core.interactiveshell import InteractiveShell -from IPython.external.qt_for_kernel import QtCore, QtGui -from IPython.lib.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready - -#----------------------------------------------------------------------------- -# Module Globals -#----------------------------------------------------------------------------- - -got_kbdint = False -sigint_timer = None - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def create_inputhook_qt4(mgr, app=None): - """Create an input hook for running the Qt4 application event loop. - - Parameters - ---------- - mgr : an InputHookManager - - app : Qt Application, optional. - Running application to use. If not given, we probe Qt for an - existing application object, and create a new one if none is found. - - Returns - ------- - A pair consisting of a Qt Application (either the one given or the - one found or created) and a inputhook. - - Notes - ----- - We use a custom input hook instead of PyQt4's default one, as it - interacts better with the readline packages (issue #481). - - The inputhook function works in tandem with a 'pre_prompt_hook' - which automatically restores the hook as an inputhook in case the - latter has been temporarily disabled after having intercepted a - KeyboardInterrupt. - """ - - if app is None: - app = QtCore.QCoreApplication.instance() - if app is None: - app = QtGui.QApplication([" "]) - - # Re-use previously created inputhook if any - ip = InteractiveShell.instance() - if hasattr(ip, '_inputhook_qt4'): - return app, ip._inputhook_qt4 - - # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of - # hooks (they both share the got_kbdint flag) - - def inputhook_qt4(): - """PyOS_InputHook python hook for Qt4. - - Process pending Qt events and if there's no pending keyboard - input, spend a short slice of time (50ms) running the Qt event - loop. - - As a Python ctypes callback can't raise an exception, we catch - the KeyboardInterrupt and temporarily deactivate the hook, - which will let a *second* CTRL+C be processed normally and go - back to a clean prompt line. - """ - try: - allow_CTRL_C() - app = QtCore.QCoreApplication.instance() - if not app: # shouldn't happen, but safer if it happens anyway... - return 0 - app.processEvents(QtCore.QEventLoop.AllEvents, 300) - if not stdin_ready(): - # Generally a program would run QCoreApplication::exec() - # from main() to enter and process the Qt event loop until - # quit() or exit() is called and the program terminates. - # - # For our input hook integration, we need to repeatedly - # enter and process the Qt event loop for only a short - # amount of time (say 50ms) to ensure that Python stays - # responsive to other user inputs. - # - # A naive approach would be to repeatedly call - # QCoreApplication::exec(), using a timer to quit after a - # short amount of time. Unfortunately, QCoreApplication - # emits an aboutToQuit signal before stopping, which has - # the undesirable effect of closing all modal windows. - # - # To work around this problem, we instead create a - # QEventLoop and call QEventLoop::exec(). Other than - # setting some state variables which do not seem to be - # used anywhere, the only thing QCoreApplication adds is - # the aboutToQuit signal which is precisely what we are - # trying to avoid. - timer = QtCore.QTimer() - event_loop = QtCore.QEventLoop() - timer.timeout.connect(event_loop.quit) - while not stdin_ready(): - timer.start(50) - event_loop.exec_() - timer.stop() - except KeyboardInterrupt: - global got_kbdint, sigint_timer - - ignore_CTRL_C() - got_kbdint = True - mgr.clear_inputhook() - - # This generates a second SIGINT so the user doesn't have to - # press CTRL+C twice to get a clean prompt. - # - # Since we can't catch the resulting KeyboardInterrupt here - # (because this is a ctypes callback), we use a timer to - # generate the SIGINT after we leave this callback. - # - # Unfortunately this doesn't work on Windows (SIGINT kills - # Python and CTRL_C_EVENT doesn't work). - if(os.name == 'posix'): - pid = os.getpid() - if(not sigint_timer): - sigint_timer = threading.Timer(.01, os.kill, - args=[pid, signal.SIGINT] ) - sigint_timer.start() - else: - print("\nKeyboardInterrupt - Ctrl-C again for new prompt") - - - except: # NO exceptions are allowed to escape from a ctypes callback - ignore_CTRL_C() - from traceback import print_exc - print_exc() - print("Got exception from inputhook_qt4, unregistering.") - mgr.clear_inputhook() - finally: - allow_CTRL_C() - return 0 - - def preprompthook_qt4(ishell): - """'pre_prompt_hook' used to restore the Qt4 input hook - - (in case the latter was temporarily deactivated after a - CTRL+C) - """ - global got_kbdint, sigint_timer - - if(sigint_timer): - sigint_timer.cancel() - sigint_timer = None - - if got_kbdint: - mgr.set_inputhook(inputhook_qt4) - got_kbdint = False - - ip._inputhook_qt4 = inputhook_qt4 - ip.set_hook('pre_prompt_hook', preprompthook_qt4) - - return app, inputhook_qt4 diff --git a/contrib/python/ipython/py3/IPython/lib/inputhookwx.py b/contrib/python/ipython/py3/IPython/lib/inputhookwx.py deleted file mode 100644 index 60520a299c..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/inputhookwx.py +++ /dev/null @@ -1,167 +0,0 @@ -# encoding: utf-8 - -""" -Enable wxPython to be used interactively by setting PyOS_InputHook. - -Authors: Robin Dunn, Brian Granger, Ondrej Certik -""" - -#----------------------------------------------------------------------------- -# Copyright (C) 2008-2011 The IPython Development Team -# -# Distributed under the terms of the BSD License. The full license is in -# the file COPYING, distributed as part of this software. -#----------------------------------------------------------------------------- - -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- - -import sys -import signal -import time -from timeit import default_timer as clock -import wx - -from IPython.lib.inputhook import stdin_ready - - -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- - -def inputhook_wx1(): - """Run the wx event loop by processing pending events only. - - This approach seems to work, but its performance is not great as it - relies on having PyOS_InputHook called regularly. - """ - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # Make a temporary event loop and process system events until - # there are no more waiting, then allow idle events (which - # will also deal with pending or posted wx events.) - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - while evtloop.Pending(): - evtloop.Dispatch() - app.ProcessIdle() - del ea - except KeyboardInterrupt: - pass - return 0 - -class EventLoopTimer(wx.Timer): - - def __init__(self, func): - self.func = func - wx.Timer.__init__(self) - - def Notify(self): - self.func() - -class EventLoopRunner(object): - - def Run(self, time): - self.evtloop = wx.EventLoop() - self.timer = EventLoopTimer(self.check_stdin) - self.timer.Start(time) - self.evtloop.Run() - - def check_stdin(self): - if stdin_ready(): - self.timer.Stop() - self.evtloop.Exit() - -def inputhook_wx2(): - """Run the wx event loop, polling for stdin. - - This version runs the wx eventloop for an undetermined amount of time, - during which it periodically checks to see if anything is ready on - stdin. If anything is ready on stdin, the event loop exits. - - The argument to elr.Run controls how often the event loop looks at stdin. - This determines the responsiveness at the keyboard. A setting of 1000 - enables a user to type at most 1 char per second. I have found that a - setting of 10 gives good keyboard response. We can shorten it further, - but eventually performance would suffer from calling select/kbhit too - often. - """ - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - elr = EventLoopRunner() - # As this time is made shorter, keyboard response improves, but idle - # CPU load goes up. 10 ms seems like a good compromise. - elr.Run(time=10) # CHANGE time here to control polling interval - except KeyboardInterrupt: - pass - return 0 - -def inputhook_wx3(): - """Run the wx event loop by processing pending events only. - - This is like inputhook_wx1, but it keeps processing pending events - until stdin is ready. After processing all pending events, a call to - time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. - This sleep time should be tuned though for best performance. - """ - # We need to protect against a user pressing Control-C when IPython is - # idle and this is running. We trap KeyboardInterrupt and pass. - try: - app = wx.GetApp() - if app is not None: - assert wx.Thread_IsMain() - - # The import of wx on Linux sets the handler for signal.SIGINT - # to 0. This is a bug in wx or gtk. We fix by just setting it - # back to the Python default. - if not callable(signal.getsignal(signal.SIGINT)): - signal.signal(signal.SIGINT, signal.default_int_handler) - - evtloop = wx.EventLoop() - ea = wx.EventLoopActivator(evtloop) - t = clock() - while not stdin_ready(): - while evtloop.Pending(): - t = clock() - evtloop.Dispatch() - app.ProcessIdle() - # We need to sleep at this point to keep the idle CPU load - # low. However, if sleep to long, GUI response is poor. As - # a compromise, we watch how often GUI events are being processed - # and switch between a short and long sleep time. Here are some - # stats useful in helping to tune this. - # time CPU load - # 0.001 13% - # 0.005 3% - # 0.01 1.5% - # 0.05 0.5% - used_time = clock() - t - if used_time > 10.0: - # print 'Sleep for 1 s' # dbg - time.sleep(1.0) - elif used_time > 0.1: - # Few GUI events coming in, so we can sleep longer - # print 'Sleep for 0.05 s' # dbg - time.sleep(0.05) - else: - # Many GUI events coming in, so sleep only very little - time.sleep(0.001) - del ea - except KeyboardInterrupt: - pass - return 0 - -if sys.platform == 'darwin': - # On OSX, evtloop.Pending() always returns True, regardless of there being - # any events pending. As such we can't use implementations 1 or 3 of the - # inputhook as those depend on a pending/dispatch loop. - inputhook_wx = inputhook_wx2 -else: - # This is our default implementation - inputhook_wx = inputhook_wx3 diff --git a/contrib/python/ipython/py3/IPython/lib/kernel.py b/contrib/python/ipython/py3/IPython/lib/kernel.py deleted file mode 100644 index af9827667f..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/kernel.py +++ /dev/null @@ -1,13 +0,0 @@ -"""[DEPRECATED] Utilities for connecting to kernels - -Moved to IPython.kernel.connect -""" - -import warnings -warnings.warn("IPython.lib.kernel moved to IPython.kernel.connect in IPython 1.0," - " and will be removed in IPython 6.0.", - DeprecationWarning -) - -from ipykernel.connect import * - diff --git a/contrib/python/ipython/py3/IPython/lib/latextools.py b/contrib/python/ipython/py3/IPython/lib/latextools.py index f976f2edb1..27aeef5b0e 100644 --- a/contrib/python/ipython/py3/IPython/lib/latextools.py +++ b/contrib/python/ipython/py3/IPython/lib/latextools.py @@ -12,6 +12,8 @@ import subprocess from base64 import encodebytes import textwrap +from pathlib import Path, PurePath + from IPython.utils.process import find_cmd, FindCmdError from traitlets.config import get_config from traitlets.config.configurable import SingletonConfigurable @@ -75,7 +77,6 @@ def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', format, e.g. '#AA20FA'. scale : float Scale factor for the resulting PNG. - None is returned when the backend cannot be used. """ @@ -95,8 +96,8 @@ def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', try: color = "RGB {}".format(" ".join([str(int(x, 16)) for x in textwrap.wrap(color[1:], 2)])) - except ValueError: - raise ValueError('Invalid color specification {}.'.format(color)) + except ValueError as e: + raise ValueError('Invalid color specification {}.'.format(color)) from e else: raise ValueError('Invalid color specification {}.'.format(color)) else: @@ -109,7 +110,8 @@ def latex_to_png(s, encode=False, backend=None, wrap=False, color='Black', def latex_to_png_mpl(s, wrap, color='Black', scale=1.0): try: - from matplotlib import mathtext + from matplotlib import figure, font_manager, mathtext + from matplotlib.backends import backend_agg from pyparsing import ParseFatalException except ImportError: return None @@ -120,11 +122,18 @@ def latex_to_png_mpl(s, wrap, color='Black', scale=1.0): s = u'${0}$'.format(s) try: - mt = mathtext.MathTextParser('bitmap') - f = BytesIO() - dpi = 120*scale - mt.to_png(f, s, fontsize=12, dpi=dpi, color=color) - return f.getvalue() + prop = font_manager.FontProperties(size=12) + dpi = 120 * scale + buffer = BytesIO() + + # Adapted from mathtext.math_to_image + parser = mathtext.MathTextParser("path") + width, height, depth, _, _ = parser.parse(s, dpi=72, prop=prop) + fig = figure.Figure(figsize=(width / 72, height / 72)) + fig.text(0, depth / height, s, fontproperties=prop, color=color) + backend_agg.FigureCanvasAgg(fig) + fig.savefig(buffer, dpi=dpi, format="png", transparent=True) + return buffer.getvalue() except (ValueError, RuntimeError, ParseFatalException): return None @@ -136,12 +145,12 @@ def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0): except FindCmdError: return None try: - workdir = tempfile.mkdtemp() - tmpfile = os.path.join(workdir, "tmp.tex") - dvifile = os.path.join(workdir, "tmp.dvi") - outfile = os.path.join(workdir, "tmp.png") + workdir = Path(tempfile.mkdtemp()) + tmpfile = workdir.joinpath("tmp.tex") + dvifile = workdir.joinpath("tmp.dvi") + outfile = workdir.joinpath("tmp.png") - with open(tmpfile, "w", encoding='utf8') as f: + with tmpfile.open("w", encoding="utf8") as f: f.writelines(genelatex(s, wrap)) with open(os.devnull, 'wb') as devnull: @@ -172,7 +181,7 @@ def latex_to_png_dvipng(s, wrap, color='Black', scale=1.0): stderr=devnull, ) - with open(outfile, "rb") as f: + with outfile.open("rb") as f: return f.read() except subprocess.CalledProcessError: return None diff --git a/contrib/python/ipython/py3/IPython/lib/lexers.py b/contrib/python/ipython/py3/IPython/lib/lexers.py index 4494da5657..0c9b6e1bc7 100644 --- a/contrib/python/ipython/py3/IPython/lib/lexers.py +++ b/contrib/python/ipython/py3/IPython/lib/lexers.py @@ -221,11 +221,9 @@ class IPythonConsoleLexer(Lexer): In [2]: a Out[2]: 'foo' - In [3]: print a + In [3]: print(a) foo - In [4]: 1 / 0 - Support is also provided for IPython exceptions: @@ -234,13 +232,9 @@ class IPythonConsoleLexer(Lexer): .. code-block:: ipythonconsole In [1]: raise Exception - - --------------------------------------------------------------------------- - Exception Traceback (most recent call last) - <ipython-input-1-fca2ab0ca76b> in <module> - ----> 1 raise Exception - - Exception: + Traceback (most recent call last): + ... + Exception """ name = 'IPython console session' diff --git a/contrib/python/ipython/py3/IPython/lib/pretty.py b/contrib/python/ipython/py3/IPython/lib/pretty.py index 1cb46b1413..72f143522d 100644 --- a/contrib/python/ipython/py3/IPython/lib/pretty.py +++ b/contrib/python/ipython/py3/IPython/lib/pretty.py @@ -34,6 +34,22 @@ pretty printer passed:: def _repr_pretty_(self, p, cycle): ... +Here's an example for a class with a simple constructor:: + + class MySimpleObject: + + def __init__(self, a, b, *, c=None): + self.a = a + self.b = b + self.c = c + + def _repr_pretty_(self, p, cycle): + ctor = CallExpression.factory(self.__class__.__name__) + if self.c is None: + p.pretty(ctor(a, b)) + else: + p.pretty(ctor(a, b, c=c)) + Here is an example implementation of a `_repr_pretty_` method for a list subclass:: @@ -93,7 +109,7 @@ from IPython.utils.decorators import undoc from IPython.utils.py3compat import PYPY __all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', - 'for_type', 'for_type_by_name'] + 'for_type', 'for_type_by_name', 'RawText', 'RawStringLiteral', 'CallExpression'] MAX_SEQ_LENGTH = 1000 @@ -500,6 +516,75 @@ class GroupQueue(object): pass +class RawText: + """ Object such that ``p.pretty(RawText(value))`` is the same as ``p.text(value)``. + + An example usage of this would be to show a list as binary numbers, using + ``p.pretty([RawText(bin(i)) for i in integers])``. + """ + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): + p.text(self.value) + + +class CallExpression: + """ Object which emits a line-wrapped call expression in the form `__name(*args, **kwargs)` """ + def __init__(__self, __name, *args, **kwargs): + # dunders are to avoid clashes with kwargs, as python's name manging + # will kick in. + self = __self + self.name = __name + self.args = args + self.kwargs = kwargs + + @classmethod + def factory(cls, name): + def inner(*args, **kwargs): + return cls(name, *args, **kwargs) + return inner + + def _repr_pretty_(self, p, cycle): + # dunders are to avoid clashes with kwargs, as python's name manging + # will kick in. + + started = False + def new_item(): + nonlocal started + if started: + p.text(",") + p.breakable() + started = True + + prefix = self.name + "(" + with p.group(len(prefix), prefix, ")"): + for arg in self.args: + new_item() + p.pretty(arg) + for arg_name, arg in self.kwargs.items(): + new_item() + arg_prefix = arg_name + "=" + with p.group(len(arg_prefix), arg_prefix): + p.pretty(arg) + + +class RawStringLiteral: + """ Wrapper that shows a string with a `r` prefix """ + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): + base_repr = repr(self.value) + if base_repr[:1] in 'uU': + base_repr = base_repr[1:] + prefix = 'ur' + else: + prefix = 'r' + base_repr = prefix + base_repr.replace('\\\\', '\\') + p.text(base_repr) + + def _default_pprint(obj, p, cycle): """ The default print function. Used if an object does not provide one and @@ -541,7 +626,7 @@ def _default_pprint(obj, p, cycle): def _seq_pprinter_factory(start, end): """ Factory that returns a pprint function useful for sequences. Used by - the default pprint for tuples, dicts, and lists. + the default pprint for tuples and lists. """ def inner(obj, p, cycle): if cycle: @@ -553,7 +638,7 @@ def _seq_pprinter_factory(start, end): p.text(',') p.breakable() p.pretty(x) - if len(obj) == 1 and type(obj) is tuple: + if len(obj) == 1 and isinstance(obj, tuple): # Special case for 1-item tuples. p.text(',') p.end_group(step, end) @@ -623,45 +708,38 @@ def _super_pprint(obj, p, cycle): p.end_group(8, '>') -def _re_pattern_pprint(obj, p, cycle): - """The pprint function for regular expression patterns.""" - p.text('re.compile(') - pattern = repr(obj.pattern) - if pattern[:1] in 'uU': - pattern = pattern[1:] - prefix = 'ur' - else: - prefix = 'r' - pattern = prefix + pattern.replace('\\\\', '\\') - p.text(pattern) - if obj.flags: - p.text(',') - p.breakable() + +class _ReFlags: + def __init__(self, value): + self.value = value + + def _repr_pretty_(self, p, cycle): done_one = False for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL', 'UNICODE', 'VERBOSE', 'DEBUG'): - if obj.flags & getattr(re, flag): + if self.value & getattr(re, flag): if done_one: p.text('|') p.text('re.' + flag) done_one = True - p.text(')') + + +def _re_pattern_pprint(obj, p, cycle): + """The pprint function for regular expression patterns.""" + re_compile = CallExpression.factory('re.compile') + if obj.flags: + p.pretty(re_compile(RawStringLiteral(obj.pattern), _ReFlags(obj.flags))) + else: + p.pretty(re_compile(RawStringLiteral(obj.pattern))) def _types_simplenamespace_pprint(obj, p, cycle): """The pprint function for types.SimpleNamespace.""" - name = 'namespace' - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - for idx, (attr, value) in enumerate(obj.__dict__.items()): - if idx: - p.text(',') - p.breakable() - attr_kwarg = '{}='.format(attr) - with p.group(len(attr_kwarg), attr_kwarg): - p.pretty(value) + namespace = CallExpression.factory('namespace') + if cycle: + p.pretty(namespace(RawText("..."))) + else: + p.pretty(namespace(**obj.__dict__)) def _type_pprint(obj, p, cycle): @@ -724,14 +802,8 @@ def _exception_pprint(obj, p, cycle): name = getattr(obj.__class__, '__qualname__', obj.__class__.__name__) if obj.__class__.__module__ not in ('exceptions', 'builtins'): name = '%s.%s' % (obj.__class__.__module__, name) - step = len(name) + 1 - p.begin_group(step, name + '(') - for idx, arg in enumerate(getattr(obj, 'args', ())): - if idx: - p.text(',') - p.breakable() - p.pretty(arg) - p.end_group(step, ')') + + p.pretty(CallExpression(name, *getattr(obj, 'args', ()))) #: the exception base @@ -817,45 +889,51 @@ _singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis, def _defaultdict_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - p.pretty(obj.default_factory) - p.text(',') - p.breakable() - p.pretty(dict(obj)) + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + else: + p.pretty(cls_ctor(obj.default_factory, dict(obj))) def _ordereddict_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - elif len(obj): - p.pretty(list(obj.items())) + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + elif len(obj): + p.pretty(cls_ctor(list(obj.items()))) + else: + p.pretty(cls_ctor()) def _deque_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - else: - p.pretty(list(obj)) - + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + else: + p.pretty(cls_ctor(list(obj))) def _counter_pprint(obj, p, cycle): - name = obj.__class__.__name__ - with p.group(len(name) + 1, name + '(', ')'): - if cycle: - p.text('...') - elif len(obj): - p.pretty(dict(obj)) + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + elif len(obj): + p.pretty(cls_ctor(dict(obj))) + else: + p.pretty(cls_ctor()) + + +def _userlist_pprint(obj, p, cycle): + cls_ctor = CallExpression.factory(obj.__class__.__name__) + if cycle: + p.pretty(cls_ctor(RawText("..."))) + else: + p.pretty(cls_ctor(obj.data)) + for_type_by_name('collections', 'defaultdict', _defaultdict_pprint) for_type_by_name('collections', 'OrderedDict', _ordereddict_pprint) for_type_by_name('collections', 'deque', _deque_pprint) for_type_by_name('collections', 'Counter', _counter_pprint) +for_type_by_name("collections", "UserList", _userlist_pprint) if __name__ == '__main__': from random import randrange diff --git a/contrib/python/ipython/py3/IPython/lib/security.py b/contrib/python/ipython/py3/IPython/lib/security.py deleted file mode 100644 index 91a2344eab..0000000000 --- a/contrib/python/ipython/py3/IPython/lib/security.py +++ /dev/null @@ -1,114 +0,0 @@ -""" -Password generation for the IPython notebook. -""" -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- -# Stdlib -import getpass -import hashlib -import random - -# Our own -from IPython.core.error import UsageError -from IPython.utils.py3compat import encode - -#----------------------------------------------------------------------------- -# Globals -#----------------------------------------------------------------------------- - -# Length of the salt in nr of hex chars, which implies salt_len * 4 -# bits of randomness. -salt_len = 12 - -#----------------------------------------------------------------------------- -# Functions -#----------------------------------------------------------------------------- - -def passwd(passphrase=None, algorithm='sha1'): - """Generate hashed password and salt for use in notebook configuration. - - In the notebook configuration, set `c.NotebookApp.password` to - the generated string. - - Parameters - ---------- - passphrase : str - Password to hash. If unspecified, the user is asked to input - and verify a password. - algorithm : str - Hashing algorithm to use (e.g, 'sha1' or any argument supported - by :func:`hashlib.new`). - - Returns - ------- - hashed_passphrase : str - Hashed password, in the format 'hash_algorithm:salt:passphrase_hash'. - - Examples - -------- - >>> passwd('mypassword') - 'sha1:7cf3:b7d6da294ea9592a9480c8f52e63cd42cfb9dd12' - - """ - if passphrase is None: - for i in range(3): - p0 = getpass.getpass('Enter password: ') - p1 = getpass.getpass('Verify password: ') - if p0 == p1: - passphrase = p0 - break - else: - print('Passwords do not match.') - else: - raise UsageError('No matching passwords found. Giving up.') - - h = hashlib.new(algorithm) - salt = ('%0' + str(salt_len) + 'x') % random.getrandbits(4 * salt_len) - h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) - - return ':'.join((algorithm, salt, h.hexdigest())) - - -def passwd_check(hashed_passphrase, passphrase): - """Verify that a given passphrase matches its hashed version. - - Parameters - ---------- - hashed_passphrase : str - Hashed password, in the format returned by `passwd`. - passphrase : str - Passphrase to validate. - - Returns - ------- - valid : bool - True if the passphrase matches the hash. - - Examples - -------- - >>> from IPython.lib.security import passwd_check - >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', - ... 'mypassword') - True - - >>> passwd_check('sha1:0e112c3ddfce:a68df677475c2b47b6e86d0467eec97ac5f4b85a', - ... 'anotherpassword') - False - """ - try: - algorithm, salt, pw_digest = hashed_passphrase.split(':', 2) - except (ValueError, TypeError): - return False - - try: - h = hashlib.new(algorithm) - except ValueError: - return False - - if len(pw_digest) == 0: - return False - - h.update(encode(passphrase, 'utf-8') + encode(salt, 'ascii')) - - return h.hexdigest() == pw_digest diff --git a/contrib/python/ipython/py3/IPython/nbconvert.py b/contrib/python/ipython/py3/IPython/nbconvert.py deleted file mode 100644 index 2de4ee50bc..0000000000 --- a/contrib/python/ipython/py3/IPython/nbconvert.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.nbconvert imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.nbconvert` package has been deprecated since IPython 4.0. " - "You should import from nbconvert instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.nbconvert'] = ShimModule( - src='IPython.nbconvert', mirror='nbconvert') diff --git a/contrib/python/ipython/py3/IPython/nbformat.py b/contrib/python/ipython/py3/IPython/nbformat.py deleted file mode 100644 index 310277de00..0000000000 --- a/contrib/python/ipython/py3/IPython/nbformat.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.nbformat imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.nbformat` package has been deprecated since IPython 4.0. " - "You should import from nbformat instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.nbformat'] = ShimModule( - src='IPython.nbformat', mirror='nbformat') diff --git a/contrib/python/ipython/py3/IPython/parallel.py b/contrib/python/ipython/py3/IPython/parallel.py deleted file mode 100644 index 0f10012783..0000000000 --- a/contrib/python/ipython/py3/IPython/parallel.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.parallel imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.parallel` package has been deprecated since IPython 4.0. " - "You should import from ipyparallel instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -sys.modules['IPython.parallel'] = ShimModule( - src='IPython.parallel', mirror='ipyparallel') - diff --git a/contrib/python/ipython/py3/IPython/paths.py b/contrib/python/ipython/py3/IPython/paths.py index e19269058a..4fd253cf1e 100644 --- a/contrib/python/ipython/py3/IPython/paths.py +++ b/contrib/python/ipython/py3/IPython/paths.py @@ -8,9 +8,14 @@ from warnings import warn import IPython from IPython.utils.importstring import import_item from IPython.utils.path import ( - get_home_dir, get_xdg_dir, get_xdg_cache_dir, compress_user, _writable_dir, - ensure_dir_exists, fs_encoding) -from IPython.utils import py3compat + get_home_dir, + get_xdg_dir, + get_xdg_cache_dir, + compress_user, + _writable_dir, + ensure_dir_exists, +) + def get_ipython_dir() -> str: """Get the IPython directory for this platform and user. @@ -49,8 +54,7 @@ def get_ipython_dir() -> str: warn(('{0} is deprecated. Move link to {1} to ' 'get rid of this message').format(cu(xdg_ipdir), cu(ipdir))) else: - warn('Moving {0} to {1}'.format(cu(xdg_ipdir), cu(ipdir))) - shutil.move(xdg_ipdir, ipdir) + ipdir = xdg_ipdir ipdir = os.path.normpath(os.path.expanduser(ipdir)) @@ -105,7 +109,8 @@ def get_ipython_module_path(module_str): mod = import_item(module_str) the_path = mod.__file__.replace('.pyc', '.py') the_path = the_path.replace('.pyo', '.py') - return py3compat.cast_unicode(the_path, fs_encoding) + return the_path + def locate_profile(profile='default'): """Find the path to the folder associated with a given profile. @@ -115,7 +120,7 @@ def locate_profile(profile='default'): from IPython.core.profiledir import ProfileDir, ProfileDirError try: pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile) - except ProfileDirError: + except ProfileDirError as e: # IOError makes more sense when people are expecting a path - raise IOError("Couldn't find profile %r" % profile) + raise IOError("Couldn't find profile %r" % profile) from e return pd.location diff --git a/contrib/python/ipython/py3/IPython/qt.py b/contrib/python/ipython/py3/IPython/qt.py deleted file mode 100644 index 7557a3f329..0000000000 --- a/contrib/python/ipython/py3/IPython/qt.py +++ /dev/null @@ -1,24 +0,0 @@ -""" -Shim to maintain backwards compatibility with old IPython.qt imports. -""" -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - -import sys -from warnings import warn - -from IPython.utils.shimmodule import ShimModule, ShimWarning - -warn("The `IPython.qt` package has been deprecated since IPython 4.0. " - "You should import from qtconsole instead.", ShimWarning) - -# Unconditionally insert the shim into sys.modules so that further import calls -# trigger the custom attribute access above - -_console = sys.modules['IPython.qt.console'] = ShimModule( - src='IPython.qt.console', mirror='qtconsole') - -_qt = ShimModule(src='IPython.qt', mirror='qtconsole') - -_qt.console = _console -sys.modules['IPython.qt'] = _qt diff --git a/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py b/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py index 7678fd6801..75c2a25ccb 100644 --- a/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py +++ b/contrib/python/ipython/py3/IPython/sphinxext/custom_doctests.py @@ -107,10 +107,10 @@ def float_doctest(sphinx_shell, args, input_lines, found, submitted): try: rtol = float(args[2]) atol = float(args[3]) - except IndexError: + except IndexError as e: e = ("Both `rtol` and `atol` must be specified " "if either are specified: {0}".format(args)) - raise IndexError(e) + raise IndexError(e) from e try: submitted = str_to_array(submitted) diff --git a/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py b/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py index ac0964032a..18bdfcae99 100644 --- a/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py +++ b/contrib/python/ipython/py3/IPython/sphinxext/ipython_directive.py @@ -220,6 +220,8 @@ except Exception: # for tokenizing blocks COMMENT, INPUT, OUTPUT = range(3) +PSEUDO_DECORATORS = ["suppress", "verbatim", "savefig", "doctest"] + #----------------------------------------------------------------------------- # Functions and class declarations #----------------------------------------------------------------------------- @@ -263,11 +265,17 @@ def block_parser(part, rgxin, rgxout, fmtin, fmtout): block.append((COMMENT, line)) continue - if line_stripped.startswith('@'): - # Here is where we assume there is, at most, one decorator. - # Might need to rethink this. - decorator = line_stripped - continue + if any( + line_stripped.startswith("@" + pseudo_decorator) + for pseudo_decorator in PSEUDO_DECORATORS + ): + if decorator: + raise RuntimeError( + "Applying multiple pseudo-decorators on one line is not supported" + ) + else: + decorator = line_stripped + continue # does this look like an input line? matchin = rgxin.match(line) diff --git a/contrib/python/ipython/py3/IPython/terminal/debugger.py b/contrib/python/ipython/py3/IPython/terminal/debugger.py index db8ecac0d2..8448d96370 100644 --- a/contrib/python/ipython/py3/IPython/terminal/debugger.py +++ b/contrib/python/ipython/py3/IPython/terminal/debugger.py @@ -1,21 +1,19 @@ import asyncio -import signal +import os import sys from IPython.core.debugger import Pdb from IPython.core.completer import IPCompleter from .ptutils import IPythonPTCompleter -from .shortcuts import create_ipython_shortcuts, suspend_to_bg, cursor_in_leading_ws +from .shortcuts import create_ipython_shortcuts +from . import embed -from prompt_toolkit.enums import DEFAULT_BUFFER -from prompt_toolkit.filters import (Condition, has_focus, has_selection, - vi_insert_mode, emacs_insert_mode) -from prompt_toolkit.key_binding import KeyBindings -from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline +from pathlib import Path from pygments.token import Token from prompt_toolkit.shortcuts.prompt import PromptSession from prompt_toolkit.enums import EditingMode from prompt_toolkit.formatted_text import PygmentsTokens +from prompt_toolkit.history import InMemoryHistory, FileHistory from concurrent.futures import ThreadPoolExecutor from prompt_toolkit import __version__ as ptk_version @@ -34,22 +32,20 @@ class TerminalPdb(Pdb): def pt_init(self, pt_session_options=None): """Initialize the prompt session and the prompt loop and store them in self.pt_app and self.pt_loop. - + Additional keyword arguments for the PromptSession class can be specified in pt_session_options. """ if pt_session_options is None: pt_session_options = {} - + def get_prompt_tokens(): return [(Token.Prompt, self.prompt)] if self._ptcomp is None: - compl = IPCompleter(shell=self.shell, - namespace={}, - global_namespace={}, - parent=self.shell, - ) + compl = IPCompleter( + shell=self.shell, namespace={}, global_namespace={}, parent=self.shell + ) # add a completer for all the do_ methods methods_names = [m[3:] for m in dir(self) if m.startswith("do_")] @@ -62,11 +58,24 @@ class TerminalPdb(Pdb): self._ptcomp = IPythonPTCompleter(compl) + # setup history only when we start pdb + if self.shell.debugger_history is None: + if self.shell.debugger_history_file is not None: + + p = Path(self.shell.debugger_history_file).expanduser() + if not p.exists(): + p.touch() + self.debugger_history = FileHistory(os.path.expanduser(str(p))) + else: + self.debugger_history = InMemoryHistory() + else: + self.debugger_history = self.shell.debugger_history + options = dict( message=(lambda: PygmentsTokens(get_prompt_tokens())), editing_mode=getattr(EditingMode, self.shell.editing_mode.upper()), key_bindings=create_ipython_shortcuts(self.shell), - history=self.shell.debugger_history, + history=self.debugger_history, completer=self._ptcomp, enable_history_search=True, mouse_support=self.shell.mouse_support, @@ -124,6 +133,18 @@ class TerminalPdb(Pdb): except Exception: raise + def do_interact(self, arg): + ipshell = embed.InteractiveShellEmbed( + config=self.shell.config, + banner1="*interactive*", + exit_msg="*exiting interactive console...*", + ) + global_ns = self.curframe.f_globals + ipshell( + module=sys.modules.get(global_ns["__name__"], None), + local_ns=self.curframe_locals, + ) + def set_trace(frame=None): """ @@ -141,6 +162,6 @@ if __name__ == '__main__': # happened after hitting "c", this is needed in order to # be able to quit the debugging session (see #9950). old_trace_dispatch = pdb.Pdb.trace_dispatch - pdb.Pdb = TerminalPdb - pdb.Pdb.trace_dispatch = old_trace_dispatch + pdb.Pdb = TerminalPdb # type: ignore + pdb.Pdb.trace_dispatch = old_trace_dispatch # type: ignore pdb.main() diff --git a/contrib/python/ipython/py3/IPython/terminal/embed.py b/contrib/python/ipython/py3/IPython/terminal/embed.py index 188844fadd..85e76d5558 100644 --- a/contrib/python/ipython/py3/IPython/terminal/embed.py +++ b/contrib/python/ipython/py3/IPython/terminal/embed.py @@ -19,6 +19,8 @@ from IPython.terminal.ipapp import load_default_config from traitlets import Bool, CBool, Unicode from IPython.utils.io import ask_yes_no +from typing import Set + class KillEmbedded(Exception):pass # kept for backward compatibility as IPython 6 was released with @@ -47,7 +49,6 @@ class EmbeddedMagics(Magics): you may then kill it and the program will then continue to run without the interactive shell interfering again. - Kill Instance Option: If for some reasons you need to kill the location where the instance @@ -106,6 +107,14 @@ class EmbeddedMagics(Magics): self.shell.ask_exit() +class _Sentinel: + def __init__(self, repr): + assert isinstance(repr, str) + self.repr = repr + + def __repr__(self): + return repr + class InteractiveShellEmbed(TerminalInteractiveShell): @@ -123,17 +132,17 @@ class InteractiveShellEmbed(TerminalInteractiveShell): help="Automatically set the terminal title" ).tag(config=True) - _inactive_locations = set() + _inactive_locations: Set[str] = set() + + def _disable_init_location(self): + """Disable the current Instance creation location""" + InteractiveShellEmbed._inactive_locations.add(self._init_location_id) @property def embedded_active(self): return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\ and (self._init_location_id not in InteractiveShellEmbed._inactive_locations) - def _disable_init_location(self): - """Disable the current Instance creation location""" - InteractiveShellEmbed._inactive_locations.add(self._init_location_id) - @embedded_active.setter def embedded_active(self, value): if value: @@ -146,9 +155,9 @@ class InteractiveShellEmbed(TerminalInteractiveShell): self._call_location_id) def __init__(self, **kw): - if kw.get('user_global_ns', None) is not None: - raise DeprecationWarning( - "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0.") + assert ( + "user_global_ns" not in kw + ), "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0." clid = kw.pop('_init_location_id', None) if not clid: @@ -174,8 +183,16 @@ class InteractiveShellEmbed(TerminalInteractiveShell): super(InteractiveShellEmbed, self).init_magics() self.register_magics(EmbeddedMagics) - def __call__(self, header='', local_ns=None, module=None, dummy=None, - stack_depth=1, global_ns=None, compile_flags=None, **kw): + def __call__( + self, + header="", + local_ns=None, + module=None, + dummy=None, + stack_depth=1, + compile_flags=None, + **kw + ): """Activate the interactive interpreter. __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start @@ -225,8 +242,9 @@ class InteractiveShellEmbed(TerminalInteractiveShell): # Call the embedding code with a stack depth of 1 so it can skip over # our call and get the original caller's namespaces. - self.mainloop(local_ns, module, stack_depth=stack_depth, - global_ns=global_ns, compile_flags=compile_flags) + self.mainloop( + local_ns, module, stack_depth=stack_depth, compile_flags=compile_flags + ) self.banner2 = self.old_banner2 @@ -236,40 +254,35 @@ class InteractiveShellEmbed(TerminalInteractiveShell): if self.should_raise: raise KillEmbedded('Embedded IPython raising error, as user requested.') - - def mainloop(self, local_ns=None, module=None, stack_depth=0, - display_banner=None, global_ns=None, compile_flags=None): + def mainloop( + self, + local_ns=None, + module=None, + stack_depth=0, + compile_flags=None, + ): """Embeds IPython into a running python program. Parameters ---------- - local_ns, module - Working local namespace (a dict) and module (a module or similar - object). If given as None, they are automatically taken from the scope - where the shell was called, so that program variables become visible. - + Working local namespace (a dict) and module (a module or similar + object). If given as None, they are automatically taken from the scope + where the shell was called, so that program variables become visible. stack_depth : int - How many levels in the stack to go to looking for namespaces (when - local_ns or module is None). This allows an intermediate caller to - make sure that this function gets the namespace from the intended - level in the stack. By default (0) it will get its locals and globals - from the immediate caller. - + How many levels in the stack to go to looking for namespaces (when + local_ns or module is None). This allows an intermediate caller to + make sure that this function gets the namespace from the intended + level in the stack. By default (0) it will get its locals and globals + from the immediate caller. compile_flags - A bit field identifying the __future__ features - that are enabled, as passed to the builtin :func:`compile` function. - If given as None, they are automatically taken from the scope where - the shell was called. + A bit field identifying the __future__ features + that are enabled, as passed to the builtin :func:`compile` function. + If given as None, they are automatically taken from the scope where + the shell was called. """ - if (global_ns is not None) and (module is None): - raise DeprecationWarning("'global_ns' keyword argument is deprecated, and has been removed in IPython 5.0 use `module` keyword argument instead.") - - if (display_banner is not None): - warnings.warn("The display_banner parameter is deprecated since IPython 4.0", DeprecationWarning) - # Get locals and globals from caller if ((local_ns is None or module is None or compile_flags is None) and self.default_user_namespaces): @@ -334,7 +347,7 @@ class InteractiveShellEmbed(TerminalInteractiveShell): self.compile.flags = orig_compile_flags -def embed(**kwargs): +def embed(*, header="", compile_flags=None, **kwargs): """Call this to embed IPython at the current point in your program. The first invocation of this will create an :class:`InteractiveShellEmbed` @@ -360,8 +373,6 @@ def embed(**kwargs): config argument. """ config = kwargs.get('config') - header = kwargs.pop('header', u'') - compile_flags = kwargs.pop('compile_flags', None) if config is None: config = load_default_config() config.InteractiveShellEmbed = config.TerminalInteractiveShell diff --git a/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py b/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py index 4e35aadd61..06724bea87 100644 --- a/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py +++ b/contrib/python/ipython/py3/IPython/terminal/interactiveshell.py @@ -3,23 +3,34 @@ import asyncio import os import sys -import warnings from warnings import warn +from IPython.core.async_helpers import get_asyncio_loop from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC -from IPython.utils import io from IPython.utils.py3compat import input from IPython.utils.terminal import toggle_set_term_title, set_term_title, restore_term_title from IPython.utils.process import abbrev_cwd from traitlets import ( - Bool, Unicode, Dict, Integer, observe, Instance, Type, default, Enum, Union, - Any, validate + Bool, + Unicode, + Dict, + Integer, + observe, + Instance, + Type, + default, + Enum, + Union, + Any, + validate, + Float, ) +from prompt_toolkit.auto_suggest import AutoSuggestFromHistory from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode from prompt_toolkit.filters import (HasFocus, Condition, IsDone) from prompt_toolkit.formatted_text import PygmentsTokens -from prompt_toolkit.history import InMemoryHistory +from prompt_toolkit.history import History from prompt_toolkit.layout.processors import ConditionalProcessor, HighlightMatchingBracketProcessor from prompt_toolkit.output import ColorDepth from prompt_toolkit.patch_stdout import patch_stdout @@ -39,7 +50,6 @@ from .prompts import Prompts, ClassicPrompts, RichPromptDisplayHook from .ptutils import IPythonPTCompleter, IPythonPTLexer from .shortcuts import create_ipython_shortcuts -DISPLAY_BANNER_DEPRECATED = object() PTK3 = ptk_version.startswith('3.') @@ -48,17 +58,17 @@ class _NoStyle(Style): pass _style_overrides_light_bg = { - Token.Prompt: '#0000ff', - Token.PromptNum: '#0000ee bold', - Token.OutPrompt: '#cc0000', - Token.OutPromptNum: '#bb0000 bold', + Token.Prompt: '#ansibrightblue', + Token.PromptNum: '#ansiblue bold', + Token.OutPrompt: '#ansibrightred', + Token.OutPromptNum: '#ansired bold', } _style_overrides_linux = { - Token.Prompt: '#00cc00', - Token.PromptNum: '#00bb00 bold', - Token.OutPrompt: '#cc0000', - Token.OutPromptNum: '#bb0000 bold', + Token.Prompt: '#ansibrightgreen', + Token.PromptNum: '#ansigreen bold', + Token.OutPrompt: '#ansibrightred', + Token.OutPromptNum: '#ansired bold', } def get_default_editor(): @@ -91,13 +101,72 @@ else: _use_simple_prompt = ('IPY_TEST_SIMPLE_PROMPT' in os.environ) or (not _is_tty) def black_reformat_handler(text_before_cursor): + """ + We do not need to protect against error, + this is taken care at a higher level where any reformat error is ignored. + Indeed we may call reformatting on incomplete code. + """ import black + formatted_text = black.format_str(text_before_cursor, mode=black.FileMode()) - if not text_before_cursor.endswith('\n') and formatted_text.endswith('\n'): - formatted_text = formatted_text[:-1] + if not text_before_cursor.endswith("\n") and formatted_text.endswith("\n"): + formatted_text = formatted_text[:-1] return formatted_text +def yapf_reformat_handler(text_before_cursor): + from yapf.yapflib import file_resources + from yapf.yapflib import yapf_api + + style_config = file_resources.GetDefaultStyleForDir(os.getcwd()) + formatted_text, was_formatted = yapf_api.FormatCode( + text_before_cursor, style_config=style_config + ) + if was_formatted: + if not text_before_cursor.endswith("\n") and formatted_text.endswith("\n"): + formatted_text = formatted_text[:-1] + return formatted_text + else: + return text_before_cursor + + +class PtkHistoryAdapter(History): + """ + Prompt toolkit has it's own way of handling history, Where it assumes it can + Push/pull from history. + + """ + + def __init__(self, shell): + super().__init__() + self.shell = shell + self._refresh() + + def append_string(self, string): + # we rely on sql for that. + self._loaded = False + self._refresh() + + def _refresh(self): + if not self._loaded: + self._loaded_strings = list(self.load_history_strings()) + + def load_history_strings(self): + last_cell = "" + res = [] + for __, ___, cell in self.shell.history_manager.get_tail( + self.shell.history_load_length, include_latest=True + ): + # Ignore blank lines and consecutive duplicates + cell = cell.rstrip() + if cell and (cell != last_cell): + res.append(cell) + last_cell = cell + yield from res[::-1] + + def store_string(self, string: str) -> None: + pass + class TerminalInteractiveShell(InteractiveShell): mime_renderers = Dict().tag(config=True) @@ -112,6 +181,10 @@ class TerminalInteractiveShell(InteractiveShell): pt_app = None debugger_history = None + debugger_history_file = Unicode( + "~/.pdbhistory", help="File in which to store and read history" + ).tag(config=True) + simple_prompt = Bool(_use_simple_prompt, help="""Use `raw_input` for the REPL, without completion and prompt colors. @@ -137,11 +210,45 @@ class TerminalInteractiveShell(InteractiveShell): help="Shortcut style to use at the prompt. 'vi' or 'emacs'.", ).tag(config=True) - autoformatter = Unicode(None, - help="Autoformatter to reformat Terminal code. Can be `'black'` or `None`", + emacs_bindings_in_vi_insert_mode = Bool( + True, + help="Add shortcuts from 'emacs' insert mode to 'vi' insert mode.", + ).tag(config=True) + + modal_cursor = Bool( + True, + help=""" + Cursor shape changes depending on vi mode: beam in vi insert mode, + block in nav mode, underscore in replace mode.""", + ).tag(config=True) + + ttimeoutlen = Float( + 0.01, + help="""The time in milliseconds that is waited for a key code + to complete.""", + ).tag(config=True) + + timeoutlen = Float( + 0.5, + help="""The time in milliseconds that is waited for a mapped key + sequence to complete.""", + ).tag(config=True) + + autoformatter = Unicode( + None, + help="Autoformatter to reformat Terminal code. Can be `'black'`, `'yapf'` or `None`", allow_none=True ).tag(config=True) + auto_match = Bool( + False, + help=""" + Automatically add/delete closing bracket or quote when opening bracket or quote is entered/deleted. + Brackets: (), [], {} + Quotes: '', \"\" + """, + ).tag(config=True) + mouse_support = Bool(False, help="Enable mouse support in the prompt\n(Note: prevents selecting text with the mouse)" ).tag(config=True) @@ -171,16 +278,21 @@ class TerminalInteractiveShell(InteractiveShell): if self.pt_app: self.pt_app.editing_mode = getattr(EditingMode, change.new.upper()) - @observe('autoformatter') - def _autoformatter_changed(self, change): - formatter = change.new + def _set_formatter(self, formatter): if formatter is None: self.reformat_handler = lambda x:x elif formatter == 'black': self.reformat_handler = black_reformat_handler + elif formatter == "yapf": + self.reformat_handler = yapf_reformat_handler else: raise ValueError + @observe("autoformatter") + def _autoformatter_changed(self, change): + formatter = change.new + self._set_formatter(formatter) + @observe('highlighting_style') @observe('colors') def _highlighting_style_changed(self, change): @@ -195,10 +307,12 @@ class TerminalInteractiveShell(InteractiveShell): ).tag(config=True) true_color = Bool(False, - help=("Use 24bit colors instead of 256 colors in prompt highlighting. " - "If your terminal supports true color, the following command " - "should print 'TRUECOLOR' in orange: " - "printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\"") + help="""Use 24bit colors instead of 256 colors in prompt highlighting. + If your terminal supports true color, the following command should + print ``TRUECOLOR`` in orange:: + + printf \"\\x1b[38;2;255;100;0mTRUECOLOR\\x1b[0m\\n\" + """, ).tag(config=True) editor = Unicode(get_default_editor(), @@ -256,6 +370,29 @@ class TerminalInteractiveShell(InteractiveShell): help="Allows to enable/disable the prompt toolkit history search" ).tag(config=True) + autosuggestions_provider = Unicode( + "AutoSuggestFromHistory", + help="Specifies from which source automatic suggestions are provided. " + "Can be set to `'AutoSuggestFromHistory`' or `None` to disable" + "automatic suggestions. Default is `'AutoSuggestFromHistory`'.", + allow_none=True, + ).tag(config=True) + + def _set_autosuggestions(self, provider): + if provider is None: + self.auto_suggest = None + elif provider == "AutoSuggestFromHistory": + self.auto_suggest = AutoSuggestFromHistory() + else: + raise ValueError("No valid provider.") + if self.pt_app: + self.pt_app.auto_suggest = self.auto_suggest + + @observe("autosuggestions_provider") + def _autosuggestions_provider_changed(self, change): + provider = change.new + self._set_autosuggestions(provider) + prompt_includes_vi_mode = Bool(True, help="Display the current vi mode (when using vi editing mode)." ).tag(config=True) @@ -276,9 +413,7 @@ class TerminalInteractiveShell(InteractiveShell): def init_display_formatter(self): super(TerminalInteractiveShell, self).init_display_formatter() # terminal only supports plain text - self.display_formatter.active_types = ['text/plain'] - # disable `_ipython_display_` - self.display_formatter.ipython_display_formatter.enabled = False + self.display_formatter.active_types = ["text/plain"] def init_prompt_toolkit_cli(self): if self.simple_prompt: @@ -297,16 +432,9 @@ class TerminalInteractiveShell(InteractiveShell): # Set up keyboard shortcuts key_bindings = create_ipython_shortcuts(self) + # Pre-populate history from IPython's history database - history = InMemoryHistory() - last_cell = u"" - for __, ___, cell in self.history_manager.get_tail(self.history_load_length, - include_latest=True): - # Ignore blank lines and consecutive duplicates - cell = cell.rstrip() - if cell and (cell != last_cell): - history.append_string(cell) - last_cell = cell + history = PtkHistoryAdapter(self) self._style = self._make_style_from_name_or_cls(self.highlighting_style) self.style = DynamicStyle(lambda: self._style) @@ -315,18 +443,20 @@ class TerminalInteractiveShell(InteractiveShell): self.pt_loop = asyncio.new_event_loop() self.pt_app = PromptSession( - editing_mode=editing_mode, - key_bindings=key_bindings, - history=history, - completer=IPythonPTCompleter(shell=self), - enable_history_search = self.enable_history_search, - style=self.style, - include_default_pygments_style=False, - mouse_support=self.mouse_support, - enable_open_in_editor=self.extra_open_editor_shortcuts, - color_depth=self.color_depth, - tempfile_suffix=".py", - **self._extra_prompt_options()) + auto_suggest=self.auto_suggest, + editing_mode=editing_mode, + key_bindings=key_bindings, + history=history, + completer=IPythonPTCompleter(shell=self), + enable_history_search=self.enable_history_search, + style=self.style, + include_default_pygments_style=False, + mouse_support=self.mouse_support, + enable_open_in_editor=self.extra_open_editor_shortcuts, + color_depth=self.color_depth, + tempfile_suffix=".py", + **self._extra_prompt_options() + ) def _make_style_from_name_or_cls(self, name_or_cls): """ @@ -349,16 +479,16 @@ class TerminalInteractiveShell(InteractiveShell): # looks like. These tweaks to the default theme help with that. style_cls = get_style_by_name('default') style_overrides.update({ - Token.Number: '#007700', + Token.Number: '#ansigreen', Token.Operator: 'noinherit', - Token.String: '#BB6622', - Token.Name.Function: '#2080D0', - Token.Name.Class: 'bold #2080D0', - Token.Name.Namespace: 'bold #2080D0', + Token.String: '#ansiyellow', + Token.Name.Function: '#ansiblue', + Token.Name.Class: 'bold #ansiblue', + Token.Name.Namespace: 'bold #ansiblue', Token.Name.Variable.Magic: '#ansiblue', - Token.Prompt: '#009900', + Token.Prompt: '#ansigreen', Token.PromptNum: '#ansibrightgreen bold', - Token.OutPrompt: '#990000', + Token.OutPrompt: '#ansired', Token.OutPromptNum: '#ansibrightred bold', }) @@ -382,9 +512,9 @@ class TerminalInteractiveShell(InteractiveShell): else: style_cls = name_or_cls style_overrides = { - Token.Prompt: '#009900', + Token.Prompt: '#ansigreen', Token.PromptNum: '#ansibrightgreen bold', - Token.OutPrompt: '#990000', + Token.OutPrompt: '#ansired', Token.OutPromptNum: '#ansibrightred bold', } style_overrides.update(self.highlighting_style_overrides) @@ -461,14 +591,14 @@ class TerminalInteractiveShell(InteractiveShell): # while/true inside which will freeze the prompt. policy = asyncio.get_event_loop_policy() - try: - old_loop = policy.get_event_loop() - except RuntimeError: - # This happens when the the event loop is closed, - # e.g. by calling `asyncio.run()`. - old_loop = None - - policy.set_event_loop(self.pt_loop) + old_loop = get_asyncio_loop() + + # FIXME: prompt_toolkit is using the deprecated `asyncio.get_event_loop` + # to get the current event loop. + # This will probably be replaced by an attribute or input argument, + # at which point we can stop calling the soon-to-be-deprecated `set_event_loop` here. + if old_loop is not self.pt_loop: + policy.set_event_loop(self.pt_loop) try: with patch_stdout(raw=True): text = self.pt_app.prompt( @@ -476,7 +606,7 @@ class TerminalInteractiveShell(InteractiveShell): **self._extra_prompt_options()) finally: # Restore the original event loop. - if old_loop is not None: + if old_loop is not None and old_loop is not self.pt_loop: policy.set_event_loop(old_loop) return text @@ -484,7 +614,6 @@ class TerminalInteractiveShell(InteractiveShell): def enable_win_unicode_console(self): # Since IPython 7.10 doesn't support python < 3.6 and PEP 528, Python uses the unicode APIs for the Windows # console by default, so WUC shouldn't be needed. - from warnings import warn warn("`enable_win_unicode_console` is deprecated since IPython 7.10, does not do anything and will be removed in the future", DeprecationWarning, stacklevel=2) @@ -496,16 +625,6 @@ class TerminalInteractiveShell(InteractiveShell): import colorama colorama.init() - # For some reason we make these wrappers around stdout/stderr. - # For now, we need to reset them so all output gets coloured. - # https://github.com/ipython/ipython/issues/8669 - # io.std* are deprecated, but don't show our own deprecation warnings - # during initialization of the deprecated API. - with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - io.stdout = io.IOStream(sys.stdout) - io.stderr = io.IOStream(sys.stderr) - def init_magics(self): super(TerminalInteractiveShell, self).init_magics() self.register_magics(TerminalMagics) @@ -525,22 +644,19 @@ class TerminalInteractiveShell(InteractiveShell): def __init__(self, *args, **kwargs): super(TerminalInteractiveShell, self).__init__(*args, **kwargs) + self._set_autosuggestions(self.autosuggestions_provider) self.init_prompt_toolkit_cli() self.init_term_title() self.keep_running = True + self._set_formatter(self.autoformatter) - self.debugger_history = InMemoryHistory() def ask_exit(self): self.keep_running = False rl_next_input = None - def interact(self, display_banner=DISPLAY_BANNER_DEPRECATED): - - if display_banner is not DISPLAY_BANNER_DEPRECATED: - warn('interact `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) - + def interact(self): self.keep_running = True while self.keep_running: print(self.separate_in, end='') @@ -556,11 +672,9 @@ class TerminalInteractiveShell(InteractiveShell): if code: self.run_cell(code, store_history=True) - def mainloop(self, display_banner=DISPLAY_BANNER_DEPRECATED): + def mainloop(self): # An extra layer of protection in case someone mashing Ctrl-C breaks # out of our internal code. - if display_banner is not DISPLAY_BANNER_DEPRECATED: - warn('mainloop `display_banner` argument is deprecated since IPython 5.0. Call `show_banner()` if needed.', DeprecationWarning, stacklevel=2) while True: try: self.interact() @@ -577,6 +691,13 @@ class TerminalInteractiveShell(InteractiveShell): self.restore_term_title() + # try to call some at-exit operation optimistically as some things can't + # be done during interpreter shutdown. this is technically inaccurate as + # this make mainlool not re-callable, but that should be a rare if not + # in existent use case. + + self._atexit_once() + _inputhook = None def inputhook(self, context): @@ -601,7 +722,7 @@ class TerminalInteractiveShell(InteractiveShell): # When we integrate the asyncio event loop, run the UI in the # same event loop as the rest of the code. don't use an actual # input hook. (Asyncio is not made for nesting event loops.) - self.pt_loop = asyncio.get_event_loop() + self.pt_loop = get_asyncio_loop() elif self._inputhook: # If an inputhook was set, create a new asyncio event loop with diff --git a/contrib/python/ipython/py3/IPython/terminal/ipapp.py b/contrib/python/ipython/py3/IPython/terminal/ipapp.py index 1a3c6c791b..a87eb2f443 100644 --- a/contrib/python/ipython/py3/IPython/terminal/ipapp.py +++ b/contrib/python/ipython/py3/IPython/terminal/ipapp.py @@ -210,26 +210,6 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): StoreMagics, ] - deprecated_subcommands = dict( - qtconsole=('qtconsole.qtconsoleapp.JupyterQtConsoleApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter Qt Console.""" - ), - notebook=('notebook.notebookapp.NotebookApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter HTML Notebook Server.""" - ), - console=('jupyter_console.app.ZMQTerminalIPythonApp', - """DEPRECATED, Will be removed in IPython 6.0 : Launch the Jupyter terminal-based Console.""" - ), - nbconvert=('nbconvert.nbconvertapp.NbConvertApp', - "DEPRECATED, Will be removed in IPython 6.0 : Convert notebooks to/from other formats." - ), - trust=('nbformat.sign.TrustNotebookApp', - "DEPRECATED, Will be removed in IPython 6.0 : Sign notebooks to trust their potentially unsafe contents at load." - ), - kernelspec=('jupyter_client.kernelspecapp.KernelSpecApp', - "DEPRECATED, Will be removed in IPython 6.0 : Manage Jupyter kernel specifications." - ), - ) subcommands = dict( profile = ("IPython.core.profileapp.ProfileApp", "Create and manage IPython profiles." @@ -244,11 +224,7 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): "Manage the IPython history database." ), ) - deprecated_subcommands['install-nbextension'] = ( - "notebook.nbextensions.InstallNBExtensionApp", - "DEPRECATED, Will be removed in IPython 6.0 : Install Jupyter notebook extension files" - ) - subcommands.update(deprecated_subcommands) + # *do* autocreate requested profile, but don't create the config file. auto_create=Bool(True) @@ -288,22 +264,6 @@ class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): # internal, not-configurable something_to_run=Bool(False) - def parse_command_line(self, argv=None): - """override to allow old '-pylab' flag with deprecation warning""" - - argv = sys.argv[1:] if argv is None else argv - - if '-pylab' in argv: - # deprecated `-pylab` given, - # warn and transform into current syntax - argv = argv[:] # copy, don't clobber - idx = argv.index('-pylab') - warnings.warn("`-pylab` flag has been deprecated.\n" - " Use `--matplotlib <backend>` and import pylab manually.") - argv[idx] = '--pylab' - - return super(TerminalIPythonApp, self).parse_command_line(argv) - @catch_config_error def initialize(self, argv=None): """Do actions after construct, but before starting the app.""" diff --git a/contrib/python/ipython/py3/IPython/terminal/magics.py b/contrib/python/ipython/py3/IPython/terminal/magics.py index 42231c3f80..206ff20a0f 100644 --- a/contrib/python/ipython/py3/IPython/terminal/magics.py +++ b/contrib/python/ipython/py3/IPython/terminal/magics.py @@ -11,6 +11,7 @@ import sys from IPython.core.error import TryNext, UsageError from IPython.core.magic import Magics, magics_class, line_magic from IPython.lib.clipboard import ClipboardEmpty +from IPython.testing.skipdoctest import skip_doctest from IPython.utils.text import SList, strip_email_quotes from IPython.utils import py3compat @@ -52,7 +53,7 @@ class TerminalMagics(Magics): self.shell.user_ns['pasted_block'] = b self.shell.using_paste_magics = True try: - self.shell.run_cell(b) + self.shell.run_cell(b, store_history=True) finally: self.shell.using_paste_magics = False @@ -83,6 +84,7 @@ class TerminalMagics(Magics): self.shell.set_autoindent() print("Automatic indentation is:",['OFF','ON'][self.shell.autoindent]) + @skip_doctest @line_magic def cpaste(self, parameter_s=''): """Paste & execute a pre-formatted code block from clipboard. @@ -111,9 +113,9 @@ class TerminalMagics(Magics): Shell escapes are not supported (yet). - See also + See Also -------- - paste: automatically pull code from clipboard. + paste : automatically pull code from clipboard. Examples -------- @@ -174,9 +176,9 @@ class TerminalMagics(Magics): IPython statements (magics, shell escapes) are not supported (yet). - See also + See Also -------- - cpaste: manually paste code into terminal until you mark its end. + cpaste : manually paste code into terminal until you mark its end. """ opts, name = self.parse_options(parameter_s, 'rq', mode='string') if 'r' in opts: @@ -191,16 +193,15 @@ class TerminalMagics(Magics): else: error('Could not get text from the clipboard.') return - except ClipboardEmpty: - raise UsageError("The clipboard appears to be empty") + except ClipboardEmpty as e: + raise UsageError("The clipboard appears to be empty") from e # By default, echo back to terminal unless quiet mode is requested if 'q' not in opts: - write = self.shell.write - write(self.shell.pycolorize(block)) - if not block.endswith('\n'): - write('\n') - write("## -- End pasted text --\n") + sys.stdout.write(self.shell.pycolorize(block)) + if not block.endswith("\n"): + sys.stdout.write("\n") + sys.stdout.write("## -- End pasted text --\n") self.store_or_execute(block, name) diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py index 95cf194f86..2d8c128208 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/asyncio.py @@ -27,15 +27,12 @@ prompt_toolkit`s `patch_stdout`):: In [4]: asyncio.ensure_future(f()) """ -import asyncio from prompt_toolkit import __version__ as ptk_version -PTK3 = ptk_version.startswith('3.') +from IPython.core.async_helpers import get_asyncio_loop +PTK3 = ptk_version.startswith('3.') -# Keep reference to the original asyncio loop, because getting the event loop -# within the input hook would return the other loop. -loop = asyncio.get_event_loop() def inputhook(context): @@ -52,6 +49,9 @@ def inputhook(context): # For prompt_toolkit 2.0, we can run the current asyncio event loop, # because prompt_toolkit 2.0 uses a different event loop internally. + # get the persistent asyncio event loop + loop = get_asyncio_loop() + def stop(): loop.stop() @@ -61,4 +61,3 @@ def inputhook(context): loop.run_forever() finally: loop.remove_reader(fileno) - diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py index f6d54a55b4..835aadfc97 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/glut.py @@ -44,10 +44,10 @@ if sys.platform == 'darwin': doc='glutCheckLoop( ) -> None', argNames=(), ) - except AttributeError: + except AttributeError as e: raise RuntimeError( - '''Your glut implementation does not allow interactive sessions''' - '''Consider installing freeglut.''') + '''Your glut implementation does not allow interactive sessions. ''' + '''Consider installing freeglut.''') from e glutMainLoopEvent = glutCheckLoop elif glut.HAVE_FREEGLUT: glutMainLoopEvent = glut.glutMainLoopEvent diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py index 80440196fb..2754820efc 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/osx.py @@ -9,7 +9,7 @@ import ctypes import ctypes.util from threading import Event -objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library('objc')) +objc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("objc")) # type: ignore void_p = ctypes.c_void_p @@ -37,7 +37,7 @@ def C(classname): # end obj-c boilerplate from appnope # CoreFoundation C-API calls we will use: -CoreFoundation = ctypes.cdll.LoadLibrary(ctypes.util.find_library('CoreFoundation')) +CoreFoundation = ctypes.cdll.LoadLibrary(ctypes.util.find_library("CoreFoundation")) # type: ignore CFFileDescriptorCreate = CoreFoundation.CFFileDescriptorCreate CFFileDescriptorCreate.restype = void_p diff --git a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py index b999f5aa17..f1e710aff5 100644 --- a/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py +++ b/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/qt.py @@ -74,6 +74,8 @@ def inputhook(context): ) try: # connect the callback we care about before we turn it on + # lambda is necessary as PyQT inspect the function signature to know + # what arguments to pass to. See https://github.com/ipython/ipython/pull/12355 notifier.activated.connect(lambda: event_loop.exit()) notifier.setEnabled(True) # only start the event loop we are not already flipped diff --git a/contrib/python/ipython/py3/IPython/terminal/ptshell.py b/contrib/python/ipython/py3/IPython/terminal/ptshell.py deleted file mode 100644 index 666d3c5b51..0000000000 --- a/contrib/python/ipython/py3/IPython/terminal/ptshell.py +++ /dev/null @@ -1,8 +0,0 @@ -raise DeprecationWarning("""DEPRECATED: - -After Popular request and decision from the BDFL: -`IPython.terminal.ptshell` has been moved back to `IPython.terminal.interactiveshell` -during the beta cycle (after IPython 5.0.beta3) Sorry about that. - -This file will be removed in 5.0 rc or final. -""") diff --git a/contrib/python/ipython/py3/IPython/terminal/ptutils.py b/contrib/python/ipython/py3/IPython/terminal/ptutils.py index 3e5d3c5c77..c390d4972a 100644 --- a/contrib/python/ipython/py3/IPython/terminal/ptutils.py +++ b/contrib/python/ipython/py3/IPython/terminal/ptutils.py @@ -9,8 +9,6 @@ not to be used outside IPython. import unicodedata from wcwidth import wcwidth -import sys -import traceback from IPython.core.completer import ( provisionalcompleter, cursor_to_position, @@ -22,6 +20,8 @@ from prompt_toolkit.patch_stdout import patch_stdout import pygments.lexers as pygments_lexers import os +import sys +import traceback _completion_sentinel = object() diff --git a/contrib/python/ipython/py3/IPython/terminal/shortcuts.py b/contrib/python/ipython/py3/IPython/terminal/shortcuts.py index a23fa091a0..615397abc5 100644 --- a/contrib/python/ipython/py3/IPython/terminal/shortcuts.py +++ b/contrib/python/ipython/py3/IPython/terminal/shortcuts.py @@ -9,6 +9,8 @@ Module to define and register Terminal IPython shortcuts with import warnings import signal import sys +import re +import os from typing import Callable @@ -18,6 +20,8 @@ from prompt_toolkit.filters import (has_focus, has_selection, Condition, vi_insert_mode, emacs_insert_mode, has_completions, vi_mode) from prompt_toolkit.key_binding.bindings.completion import display_completions_like_readline from prompt_toolkit.key_binding import KeyBindings +from prompt_toolkit.key_binding.bindings import named_commands as nc +from prompt_toolkit.key_binding.vi_state import InputMode, ViState from IPython.utils.decorators import undoc @@ -53,7 +57,7 @@ def create_ipython_shortcuts(shell): & insert_mode ))(reformat_and_execute) - kb.add('c-\\')(force_exit) + kb.add("c-\\")(quit) kb.add('c-p', filter=(vi_insert_mode & has_focus(DEFAULT_BUFFER)) )(previous_history_or_previous_completion) @@ -82,15 +86,274 @@ def create_ipython_shortcuts(shell): kb.add('f2', filter=has_focus(DEFAULT_BUFFER))(open_input_in_editor) - if shell.display_completions == 'readlinelike': - kb.add('c-i', filter=(has_focus(DEFAULT_BUFFER) - & ~has_selection - & insert_mode - & ~cursor_in_leading_ws - ))(display_completions_like_readline) + @Condition + def auto_match(): + return shell.auto_match - if sys.platform == 'win32': - kb.add('c-v', filter=(has_focus(DEFAULT_BUFFER) & ~vi_mode))(win_paste) + focused_insert = (vi_insert_mode | emacs_insert_mode) & has_focus(DEFAULT_BUFFER) + _preceding_text_cache = {} + _following_text_cache = {} + + def preceding_text(pattern): + try: + return _preceding_text_cache[pattern] + except KeyError: + pass + m = re.compile(pattern) + + def _preceding_text(): + app = get_app() + return bool(m.match(app.current_buffer.document.current_line_before_cursor)) + + condition = Condition(_preceding_text) + _preceding_text_cache[pattern] = condition + return condition + + def following_text(pattern): + try: + return _following_text_cache[pattern] + except KeyError: + pass + m = re.compile(pattern) + + def _following_text(): + app = get_app() + return bool(m.match(app.current_buffer.document.current_line_after_cursor)) + + condition = Condition(_following_text) + _following_text_cache[pattern] = condition + return condition + + # auto match + @kb.add("(", filter=focused_insert & auto_match & following_text(r"[,)}\]]|$")) + def _(event): + event.current_buffer.insert_text("()") + event.current_buffer.cursor_left() + + @kb.add("[", filter=focused_insert & auto_match & following_text(r"[,)}\]]|$")) + def _(event): + event.current_buffer.insert_text("[]") + event.current_buffer.cursor_left() + + @kb.add("{", filter=focused_insert & auto_match & following_text(r"[,)}\]]|$")) + def _(event): + event.current_buffer.insert_text("{}") + event.current_buffer.cursor_left() + + @kb.add( + '"', + filter=focused_insert + & auto_match + & preceding_text(r'^([^"]+|"[^"]*")*$') + & following_text(r"[,)}\]]|$"), + ) + def _(event): + event.current_buffer.insert_text('""') + event.current_buffer.cursor_left() + + @kb.add( + "'", + filter=focused_insert + & auto_match + & preceding_text(r"^([^']+|'[^']*')*$") + & following_text(r"[,)}\]]|$"), + ) + def _(event): + event.current_buffer.insert_text("''") + event.current_buffer.cursor_left() + + # raw string + @kb.add( + "(", filter=focused_insert & auto_match & preceding_text(r".*(r|R)[\"'](-*)$") + ) + def _(event): + matches = re.match( + r".*(r|R)[\"'](-*)", + event.current_buffer.document.current_line_before_cursor, + ) + dashes = matches.group(2) or "" + event.current_buffer.insert_text("()" + dashes) + event.current_buffer.cursor_left(len(dashes) + 1) + + @kb.add( + "[", filter=focused_insert & auto_match & preceding_text(r".*(r|R)[\"'](-*)$") + ) + def _(event): + matches = re.match( + r".*(r|R)[\"'](-*)", + event.current_buffer.document.current_line_before_cursor, + ) + dashes = matches.group(2) or "" + event.current_buffer.insert_text("[]" + dashes) + event.current_buffer.cursor_left(len(dashes) + 1) + + @kb.add( + "{", filter=focused_insert & auto_match & preceding_text(r".*(r|R)[\"'](-*)$") + ) + def _(event): + matches = re.match( + r".*(r|R)[\"'](-*)", + event.current_buffer.document.current_line_before_cursor, + ) + dashes = matches.group(2) or "" + event.current_buffer.insert_text("{}" + dashes) + event.current_buffer.cursor_left(len(dashes) + 1) + + # just move cursor + @kb.add(")", filter=focused_insert & auto_match & following_text(r"^\)")) + @kb.add("]", filter=focused_insert & auto_match & following_text(r"^\]")) + @kb.add("}", filter=focused_insert & auto_match & following_text(r"^\}")) + @kb.add('"', filter=focused_insert & auto_match & following_text('^"')) + @kb.add("'", filter=focused_insert & auto_match & following_text("^'")) + def _(event): + event.current_buffer.cursor_right() + + @kb.add( + "backspace", + filter=focused_insert + & preceding_text(r".*\($") + & auto_match + & following_text(r"^\)"), + ) + @kb.add( + "backspace", + filter=focused_insert + & preceding_text(r".*\[$") + & auto_match + & following_text(r"^\]"), + ) + @kb.add( + "backspace", + filter=focused_insert + & preceding_text(r".*\{$") + & auto_match + & following_text(r"^\}"), + ) + @kb.add( + "backspace", + filter=focused_insert + & preceding_text('.*"$') + & auto_match + & following_text('^"'), + ) + @kb.add( + "backspace", + filter=focused_insert + & preceding_text(r".*'$") + & auto_match + & following_text(r"^'"), + ) + def _(event): + event.current_buffer.delete() + event.current_buffer.delete_before_cursor() + + if shell.display_completions == "readlinelike": + kb.add( + "c-i", + filter=( + has_focus(DEFAULT_BUFFER) + & ~has_selection + & insert_mode + & ~cursor_in_leading_ws + ), + )(display_completions_like_readline) + + if sys.platform == "win32": + kb.add("c-v", filter=(has_focus(DEFAULT_BUFFER) & ~vi_mode))(win_paste) + + @Condition + def ebivim(): + return shell.emacs_bindings_in_vi_insert_mode + + focused_insert_vi = has_focus(DEFAULT_BUFFER) & vi_insert_mode + + # Needed for to accept autosuggestions in vi insert mode + def _apply_autosuggest(event): + b = event.current_buffer + suggestion = b.suggestion + if suggestion is not None and suggestion.text: + b.insert_text(suggestion.text) + else: + nc.end_of_line(event) + + @kb.add("end", filter=has_focus(DEFAULT_BUFFER) & (ebivim | ~vi_insert_mode)) + def _(event): + _apply_autosuggest(event) + + @kb.add("c-e", filter=focused_insert_vi & ebivim) + def _(event): + _apply_autosuggest(event) + + @kb.add("c-f", filter=focused_insert_vi) + def _(event): + b = event.current_buffer + suggestion = b.suggestion + if suggestion: + b.insert_text(suggestion.text) + else: + nc.forward_char(event) + + @kb.add("escape", "f", filter=focused_insert_vi & ebivim) + def _(event): + b = event.current_buffer + suggestion = b.suggestion + if suggestion: + t = re.split(r"(\S+\s+)", suggestion.text) + b.insert_text(next((x for x in t if x), "")) + else: + nc.forward_word(event) + + # Simple Control keybindings + key_cmd_dict = { + "c-a": nc.beginning_of_line, + "c-b": nc.backward_char, + "c-k": nc.kill_line, + "c-w": nc.backward_kill_word, + "c-y": nc.yank, + "c-_": nc.undo, + } + + for key, cmd in key_cmd_dict.items(): + kb.add(key, filter=focused_insert_vi & ebivim)(cmd) + + # Alt and Combo Control keybindings + keys_cmd_dict = { + # Control Combos + ("c-x", "c-e"): nc.edit_and_execute, + ("c-x", "e"): nc.edit_and_execute, + # Alt + ("escape", "b"): nc.backward_word, + ("escape", "c"): nc.capitalize_word, + ("escape", "d"): nc.kill_word, + ("escape", "h"): nc.backward_kill_word, + ("escape", "l"): nc.downcase_word, + ("escape", "u"): nc.uppercase_word, + ("escape", "y"): nc.yank_pop, + ("escape", "."): nc.yank_last_arg, + } + + for keys, cmd in keys_cmd_dict.items(): + kb.add(*keys, filter=focused_insert_vi & ebivim)(cmd) + + def get_input_mode(self): + app = get_app() + app.ttimeoutlen = shell.ttimeoutlen + app.timeoutlen = shell.timeoutlen + + return self._input_mode + + def set_input_mode(self, mode): + shape = {InputMode.NAVIGATION: 2, InputMode.REPLACE: 4}.get(mode, 6) + cursor = "\x1b[{} q".format(shape) + + sys.stdout.write(cursor) + sys.stdout.flush() + + self._input_mode = mode + + if shell.editing_mode == "vi" and shell.modal_cursor: + ViState._input_mode = InputMode.INSERT + ViState.input_mode = property(get_input_mode, set_input_mode) return kb @@ -196,11 +459,16 @@ def reset_search_buffer(event): def suspend_to_bg(event): event.app.suspend_to_background() -def force_exit(event): +def quit(event): """ - Force exit (with a non-zero return value) + On platforms that support SIGQUIT, send SIGQUIT to the current process. + On other platforms, just exit the process with a message. """ - sys.exit("Quit") + sigquit = getattr(signal, "SIGQUIT", None) + if sigquit is not None: + os.kill(0, signal.SIGQUIT) + else: + sys.exit("Quit") def indent_buffer(event): event.current_buffer.insert_text(' ' * 4) @@ -273,4 +541,4 @@ if sys.platform == 'win32': return except ClipboardEmpty: return - event.current_buffer.insert_text(text.replace('\t', ' ' * 4)) + event.current_buffer.insert_text(text.replace("\t", " " * 4)) diff --git a/contrib/python/ipython/py3/IPython/testing/__init__.py b/contrib/python/ipython/py3/IPython/testing/__init__.py index 552608792d..8fcd65ea41 100644 --- a/contrib/python/ipython/py3/IPython/testing/__init__.py +++ b/contrib/python/ipython/py3/IPython/testing/__init__.py @@ -12,38 +12,9 @@ import os #----------------------------------------------------------------------------- -# Functions -#----------------------------------------------------------------------------- - -# User-level entry point for testing -def test(**kwargs): - """Run the entire IPython test suite. - - Any of the options for run_iptestall() may be passed as keyword arguments. - - For example:: - - IPython.test(testgroups=['lib', 'config', 'utils'], fast=2) - - will run those three sections of the test suite, using two processes. - """ - - # Do the import internally, so that this function doesn't increase total - # import time - from .iptestcontroller import run_iptestall, default_options - options = default_options() - for name, val in kwargs.items(): - setattr(options, name, val) - run_iptestall(options) - -#----------------------------------------------------------------------------- # Constants #----------------------------------------------------------------------------- # We scale all timeouts via this factor, slow machines can increase it IPYTHON_TESTING_TIMEOUT_SCALE = float(os.getenv( 'IPYTHON_TESTING_TIMEOUT_SCALE', 1)) - -# So nose doesn't try to run this as a test itself and we end up with an -# infinite test loop -test.__test__ = False diff --git a/contrib/python/ipython/py3/IPython/testing/__main__.py b/contrib/python/ipython/py3/IPython/testing/__main__.py deleted file mode 100644 index 4b0bb8ba9c..0000000000 --- a/contrib/python/ipython/py3/IPython/testing/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -if __name__ == '__main__': - from IPython.testing import iptestcontroller - iptestcontroller.main() diff --git a/contrib/python/ipython/py3/IPython/testing/decorators.py b/contrib/python/ipython/py3/IPython/testing/decorators.py index 4539a72a8c..644a513a8c 100644 --- a/contrib/python/ipython/py3/IPython/testing/decorators.py +++ b/contrib/python/ipython/py3/IPython/testing/decorators.py @@ -44,11 +44,6 @@ from decorator import decorator # Expose the unittest-driven decorators from .ipunittest import ipdoctest, ipdocstring -# Grab the numpy-specific decorators which we keep in a file that we -# occasionally update from upstream: decorators.py is a copy of -# numpy.testing.decorators, we expose all of it here. -from IPython.external.decorators import knownfailureif - #----------------------------------------------------------------------------- # Classes and functions #----------------------------------------------------------------------------- @@ -66,99 +61,9 @@ def as_unittest(func): # Utility functions -def apply_wrapper(wrapper, func): - """Apply a wrapper to a function for decoration. - - This mixes Michele Simionato's decorator tool with nose's make_decorator, - to apply a wrapper in a decorator so that all nose attributes, as well as - function signature and other properties, survive the decoration cleanly. - This will ensure that wrapped functions can still be well introspected via - IPython, for example. - """ - warnings.warn("The function `apply_wrapper` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - import nose.tools - - return decorator(wrapper,nose.tools.make_decorator(func)(wrapper)) - - -def make_label_dec(label, ds=None): - """Factory function to create a decorator that applies one or more labels. - - Parameters - ---------- - label : string or sequence - One or more labels that will be applied by the decorator to the functions - it decorates. Labels are attributes of the decorated function with their - value set to True. - - ds : string - An optional docstring for the resulting decorator. If not given, a - default docstring is auto-generated. - - Returns - ------- - A decorator. - - Examples - -------- - - A simple labeling decorator: - - >>> slow = make_label_dec('slow') - >>> slow.__doc__ - "Labels a test as 'slow'." - - And one that uses multiple labels and a custom docstring: - - >>> rare = make_label_dec(['slow','hard'], - ... "Mix labels 'slow' and 'hard' for rare tests.") - >>> rare.__doc__ - "Mix labels 'slow' and 'hard' for rare tests." - - Now, let's test using this one: - >>> @rare - ... def f(): pass - ... - >>> - >>> f.slow - True - >>> f.hard - True - """ - - warnings.warn("The function `make_label_dec` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - if isinstance(label, str): - labels = [label] - else: - labels = label - - # Validate that the given label(s) are OK for use in setattr() by doing a - # dry run on a dummy function. - tmp = lambda : None - for label in labels: - setattr(tmp,label,True) - - # This is the actual decorator we'll return - def decor(f): - for label in labels: - setattr(f,label,True) - return f - # Apply the user's docstring, or autogenerate a basic one - if ds is None: - ds = "Labels a test as %r." % label - decor.__doc__ = ds - - return decor - - -# Inspired by numpy's skipif, but uses the full apply_wrapper utility to -# preserve function metadata better and allows the skip condition to be a -# callable. def skipif(skip_condition, msg=None): - ''' Make function raise SkipTest exception if skip_condition is true + """Make function raise SkipTest exception if skip_condition is true Parameters ---------- @@ -177,57 +82,15 @@ def skipif(skip_condition, msg=None): Decorator, which, when applied to a function, causes SkipTest to be raised when the skip_condition was True, and the function to be called normally otherwise. + """ + if msg is None: + msg = "Test skipped due to test condition." + + import pytest + + assert isinstance(skip_condition, bool) + return pytest.mark.skipif(skip_condition, reason=msg) - Notes - ----- - You will see from the code that we had to further decorate the - decorator with the nose.tools.make_decorator function in order to - transmit function name, and various other metadata. - ''' - - def skip_decorator(f): - # Local import to avoid a hard nose dependency and only incur the - # import time overhead at actual test-time. - import nose - - # Allow for both boolean or callable skip conditions. - if callable(skip_condition): - skip_val = skip_condition - else: - skip_val = lambda : skip_condition - - def get_msg(func,msg=None): - """Skip message with information about function being skipped.""" - if msg is None: out = 'Test skipped due to test condition.' - else: out = msg - return "Skipping test: %s. %s" % (func.__name__,out) - - # We need to define *two* skippers because Python doesn't allow both - # return with value and yield inside the same function. - def skipper_func(*args, **kwargs): - """Skipper for normal test functions.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - return f(*args, **kwargs) - - def skipper_gen(*args, **kwargs): - """Skipper for test generators.""" - if skip_val(): - raise nose.SkipTest(get_msg(f,msg)) - else: - for x in f(*args, **kwargs): - yield x - - # Choose the right skipper to use when building the actual generator. - if nose.util.isgenerator(f): - skipper = skipper_gen - else: - skipper = skipper_func - - return nose.tools.make_decorator(f)(skipper) - - return skip_decorator # A version with the condition set to true, common case just to attach a message # to a skip decorator @@ -254,12 +117,7 @@ def skip(msg=None): def onlyif(condition, msg): """The reverse from skipif, see skipif for details.""" - if callable(condition): - skip_condition = lambda : not condition() - else: - skip_condition = lambda : not condition - - return skipif(skip_condition, msg) + return skipif(not condition, msg) #----------------------------------------------------------------------------- # Utility functions for decorators @@ -278,20 +136,6 @@ def module_not_available(module): return mod_not_avail -def decorated_dummy(dec, name): - """Return a dummy function decorated with dec, with the given name. - - Examples - -------- - import IPython.testing.decorators as dec - setup = dec.decorated_dummy(dec.skip_if_no_x11, __name__) - """ - warnings.warn("The function `decorated_dummy` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - dummy = lambda: None - dummy.__name__ = name - return dec(dummy) - #----------------------------------------------------------------------------- # Decorators for public use @@ -308,9 +152,6 @@ skip_if_not_win32 = skipif(sys.platform != 'win32', "This test only runs under Windows") skip_if_not_linux = skipif(not sys.platform.startswith('linux'), "This test only runs under Linux") -skip_if_not_osx = skipif(sys.platform != 'darwin', - "This test only runs under OSX") - _x11_skip_cond = (sys.platform not in ('darwin', 'win32') and os.environ.get('DISPLAY', '') == '') @@ -318,17 +159,6 @@ _x11_skip_msg = "Skipped under *nix when X11/XOrg not available" skip_if_no_x11 = skipif(_x11_skip_cond, _x11_skip_msg) - -# Decorators to skip certain tests on specific platform/python combinations -skip_win32_py38 = skipif(sys.version_info > (3,8) and os.name == 'nt') - - -# not a decorator itself, returns a dummy function to be used as setup -def skip_file_no_x11(name): - warnings.warn("The function `skip_file_no_x11` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - return decorated_dummy(skip_if_no_x11, name) if _x11_skip_cond else None - # Other skip decorators # generic skip without module @@ -338,10 +168,6 @@ skipif_not_numpy = skip_without('numpy') skipif_not_matplotlib = skip_without('matplotlib') -skipif_not_sympy = skip_without('sympy') - -skip_known_failure = knownfailureif(True,'This test is known to fail') - # A null 'decorator', useful to make more readable code that needs to pick # between different decorators based on OS or other conditions null_deco = lambda f: f @@ -364,20 +190,13 @@ def onlyif_cmds_exist(*commands): """ Decorator to skip test when at least one of `commands` is not found. """ + assert ( + os.environ.get("IPTEST_WORKING_DIR", None) is None + ), "iptest deprecated since IPython 8.0" for cmd in commands: + reason = f"This test runs only if command '{cmd}' is installed" if not shutil.which(cmd): - return skip("This test runs only if command '{0}' " - "is installed".format(cmd)) - return null_deco + import pytest -def onlyif_any_cmd_exists(*commands): - """ - Decorator to skip test unless at least one of `commands` is found. - """ - warnings.warn("The function `onlyif_any_cmd_exists` is deprecated since IPython 4.0", - DeprecationWarning, stacklevel=2) - for cmd in commands: - if shutil.which(cmd): - return null_deco - return skip("This test runs only if one of the commands {0} " - "is installed".format(commands)) + return pytest.mark.skip(reason=reason) + return null_deco diff --git a/contrib/python/ipython/py3/IPython/testing/globalipapp.py b/contrib/python/ipython/py3/IPython/testing/globalipapp.py index c435f9d087..698e3d845a 100644 --- a/contrib/python/ipython/py3/IPython/testing/globalipapp.py +++ b/contrib/python/ipython/py3/IPython/testing/globalipapp.py @@ -14,6 +14,8 @@ import sys import types import warnings +from pathlib import Path + from . import tools from IPython.core import page @@ -21,30 +23,6 @@ from IPython.utils import io from IPython.terminal.interactiveshell import TerminalInteractiveShell -class StreamProxy(io.IOStream): - """Proxy for sys.stdout/err. This will request the stream *at call time* - allowing for nose's Capture plugin's redirection of sys.stdout/err. - - Parameters - ---------- - name : str - The name of the stream. This will be requested anew at every call - """ - - def __init__(self, name): - warnings.warn("StreamProxy is deprecated and unused as of IPython 5", DeprecationWarning, - stacklevel=2, - ) - self.name=name - - @property - def stream(self): - return getattr(sys, self.name) - - def flush(self): - self.stream.flush() - - def get_ipython(): # This will get replaced by the real thing once we start IPython below return start_ipython() @@ -95,7 +73,7 @@ def start_ipython(): # A few more tweaks needed for playing nicely with doctests... # remove history file - shell.tempfiles.append(config.HistoryManager.hist_file) + shell.tempfiles.append(Path(config.HistoryManager.hist_file)) # These traps are normally only active for interactive use, set them # permanently since we'll be mocking interactive sessions. diff --git a/contrib/python/ipython/py3/IPython/testing/iptest.py b/contrib/python/ipython/py3/IPython/testing/iptest.py deleted file mode 100644 index 8efcc97201..0000000000 --- a/contrib/python/ipython/py3/IPython/testing/iptest.py +++ /dev/null @@ -1,460 +0,0 @@ -# -*- coding: utf-8 -*- -"""IPython Test Suite Runner. - -This module provides a main entry point to a user script to test IPython -itself from the command line. There are two ways of running this script: - -1. With the syntax `iptest all`. This runs our entire test suite by - calling this script (with different arguments) recursively. This - causes modules and package to be tested in different processes, using nose - or trial where appropriate. -2. With the regular nose syntax, like `iptest IPython -- -vvs`. In this form - the script simply calls nose, but with special command line flags and - plugins loaded. Options after `--` are passed to nose. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import glob -from io import BytesIO -import os -import os.path as path -import sys -from threading import Thread, Lock, Event -import warnings - -import nose.plugins.builtin -from nose.plugins.xunit import Xunit -from nose import SkipTest -from nose.core import TestProgram -from nose.plugins import Plugin -from nose.util import safe_str - -from IPython import version_info -from IPython.utils.py3compat import decode -from IPython.utils.importstring import import_item -from IPython.testing.plugin.ipdoctest import IPythonDoctest -from IPython.external.decorators import KnownFailure, knownfailureif - -pjoin = path.join - - -# Enable printing all warnings raise by IPython's modules -warnings.filterwarnings('ignore', message='.*Matplotlib is building the font cache.*', category=UserWarning, module='.*') -warnings.filterwarnings('error', message='.*', category=ResourceWarning, module='.*') -warnings.filterwarnings('error', message=".*{'config': True}.*", category=DeprecationWarning, module='IPy.*') -warnings.filterwarnings('default', message='.*', category=Warning, module='IPy.*') - -warnings.filterwarnings('error', message='.*apply_wrapper.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*make_label_dec', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*decorated_dummy.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*skip_file_no_x11.*', category=DeprecationWarning, module='.*') -warnings.filterwarnings('error', message='.*onlyif_any_cmd_exists.*', category=DeprecationWarning, module='.*') - -warnings.filterwarnings('error', message='.*disable_gui.*', category=DeprecationWarning, module='.*') - -warnings.filterwarnings('error', message='.*ExceptionColors global is deprecated.*', category=DeprecationWarning, module='.*') - -# Jedi older versions -warnings.filterwarnings( - 'error', message='.*elementwise != comparison failed and.*', category=FutureWarning, module='.*') - -if version_info < (6,): - # nose.tools renames all things from `camelCase` to `snake_case` which raise an - # warning with the runner they also import from standard import library. (as of Dec 2015) - # Ignore, let's revisit that in a couple of years for IPython 6. - warnings.filterwarnings( - 'ignore', message='.*Please use assertEqual instead', category=Warning, module='IPython.*') - -if version_info < (8,): - warnings.filterwarnings('ignore', message='.*Completer.complete.*', - category=PendingDeprecationWarning, module='.*') -else: - warnings.warn( - 'Completer.complete was pending deprecation and should be changed to Deprecated', FutureWarning) - - - -# ------------------------------------------------------------------------------ -# Monkeypatch Xunit to count known failures as skipped. -# ------------------------------------------------------------------------------ -def monkeypatch_xunit(): - try: - dec.knownfailureif(True)(lambda: None)() - except Exception as e: - KnownFailureTest = type(e) - - def addError(self, test, err, capt=None): - if issubclass(err[0], KnownFailureTest): - err = (SkipTest,) + err[1:] - return self.orig_addError(test, err, capt) - - Xunit.orig_addError = Xunit.addError - Xunit.addError = addError - -#----------------------------------------------------------------------------- -# Check which dependencies are installed and greater than minimum version. -#----------------------------------------------------------------------------- -def extract_version(mod): - return mod.__version__ - -def test_for(item, min_version=None, callback=extract_version): - """Test to see if item is importable, and optionally check against a minimum - version. - - If min_version is given, the default behavior is to check against the - `__version__` attribute of the item, but specifying `callback` allows you to - extract the value you are interested in. e.g:: - - In [1]: import sys - - In [2]: from IPython.testing.iptest import test_for - - In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info) - Out[3]: True - - """ - try: - check = import_item(item) - except (ImportError, RuntimeError): - # GTK reports Runtime error if it can't be initialized even if it's - # importable. - return False - else: - if min_version: - if callback: - # extra processing step to get version to compare - check = callback(check) - - return check >= min_version - else: - return True - -# Global dict where we can store information on what we have and what we don't -# have available at test run time -have = {'matplotlib': test_for('matplotlib'), - 'pygments': test_for('pygments'), - 'sqlite3': test_for('sqlite3')} - -#----------------------------------------------------------------------------- -# Test suite definitions -#----------------------------------------------------------------------------- - -test_group_names = ['core', - 'extensions', 'lib', 'terminal', 'testing', 'utils', - ] - -class TestSection(object): - def __init__(self, name, includes): - self.name = name - self.includes = includes - self.excludes = [] - self.dependencies = [] - self.enabled = True - - def exclude(self, module): - if not module.startswith('IPython'): - module = self.includes[0] + "." + module - self.excludes.append(module.replace('.', os.sep)) - - def requires(self, *packages): - self.dependencies.extend(packages) - - @property - def will_run(self): - return self.enabled and all(have[p] for p in self.dependencies) - -# Name -> (include, exclude, dependencies_met) -test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names} - - -# Exclusions and dependencies -# --------------------------- - -# core: -sec = test_sections['core'] -if not have['sqlite3']: - sec.exclude('tests.test_history') - sec.exclude('history') -if not have['matplotlib']: - sec.exclude('pylabtools'), - sec.exclude('tests.test_pylabtools') - -# lib: -sec = test_sections['lib'] -sec.exclude('kernel') -if not have['pygments']: - sec.exclude('tests.test_lexers') -# We do this unconditionally, so that the test suite doesn't import -# gtk, changing the default encoding and masking some unicode bugs. -sec.exclude('inputhookgtk') -# We also do this unconditionally, because wx can interfere with Unix signals. -# There are currently no tests for it anyway. -sec.exclude('inputhookwx') -# Testing inputhook will need a lot of thought, to figure out -# how to have tests that don't lock up with the gui event -# loops in the picture -sec.exclude('inputhook') - -# testing: -sec = test_sections['testing'] -# These have to be skipped on win32 because they use echo, rm, cd, etc. -# See ticket https://github.com/ipython/ipython/issues/87 -if sys.platform == 'win32': - sec.exclude('plugin.test_exampleip') - sec.exclude('plugin.dtexample') - -# don't run jupyter_console tests found via shim -test_sections['terminal'].exclude('console') - -# extensions: -sec = test_sections['extensions'] -# This is deprecated in favour of rpy2 -sec.exclude('rmagic') -# autoreload does some strange stuff, so move it to its own test section -sec.exclude('autoreload') -sec.exclude('tests.test_autoreload') -test_sections['autoreload'] = TestSection('autoreload', - ['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload']) -test_group_names.append('autoreload') - - -#----------------------------------------------------------------------------- -# Functions and classes -#----------------------------------------------------------------------------- - -def check_exclusions_exist(): - from IPython.paths import get_ipython_package_dir - from warnings import warn - parent = os.path.dirname(get_ipython_package_dir()) - for sec in test_sections: - for pattern in sec.exclusions: - fullpath = pjoin(parent, pattern) - if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'): - warn("Excluding nonexistent file: %r" % pattern) - - -class ExclusionPlugin(Plugin): - """A nose plugin to effect our exclusions of files and directories. - """ - name = 'exclusions' - score = 3000 # Should come before any other plugins - - def __init__(self, exclude_patterns=None): - """ - Parameters - ---------- - - exclude_patterns : sequence of strings, optional - Filenames containing these patterns (as raw strings, not as regular - expressions) are excluded from the tests. - """ - self.exclude_patterns = exclude_patterns or [] - super(ExclusionPlugin, self).__init__() - - def options(self, parser, env=os.environ): - Plugin.options(self, parser, env) - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Override nose trying to disable plugin. - self.enabled = True - - def wantFile(self, filename): - """Return whether the given filename should be scanned for tests. - """ - if any(pat in filename for pat in self.exclude_patterns): - return False - return None - - def wantDirectory(self, directory): - """Return whether the given directory should be scanned for tests. - """ - if any(pat in directory for pat in self.exclude_patterns): - return False - return None - - -class StreamCapturer(Thread): - daemon = True # Don't hang if main thread crashes - started = False - def __init__(self, echo=False): - super(StreamCapturer, self).__init__() - self.echo = echo - self.streams = [] - self.buffer = BytesIO() - self.readfd, self.writefd = os.pipe() - self.buffer_lock = Lock() - self.stop = Event() - - def run(self): - self.started = True - - while not self.stop.is_set(): - chunk = os.read(self.readfd, 1024) - - with self.buffer_lock: - self.buffer.write(chunk) - if self.echo: - sys.stdout.write(decode(chunk)) - - os.close(self.readfd) - os.close(self.writefd) - - def reset_buffer(self): - with self.buffer_lock: - self.buffer.truncate(0) - self.buffer.seek(0) - - def get_buffer(self): - with self.buffer_lock: - return self.buffer.getvalue() - - def ensure_started(self): - if not self.started: - self.start() - - def halt(self): - """Safely stop the thread.""" - if not self.started: - return - - self.stop.set() - os.write(self.writefd, b'\0') # Ensure we're not locked in a read() - self.join() - -class SubprocessStreamCapturePlugin(Plugin): - name='subprocstreams' - def __init__(self): - Plugin.__init__(self) - self.stream_capturer = StreamCapturer() - self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture') - # This is ugly, but distant parts of the test machinery need to be able - # to redirect streams, so we make the object globally accessible. - nose.iptest_stdstreams_fileno = self.get_write_fileno - - def get_write_fileno(self): - if self.destination == 'capture': - self.stream_capturer.ensure_started() - return self.stream_capturer.writefd - elif self.destination == 'discard': - return os.open(os.devnull, os.O_WRONLY) - else: - return sys.__stdout__.fileno() - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Override nose trying to disable plugin. - if self.destination == 'capture': - self.enabled = True - - def startTest(self, test): - # Reset log capture - self.stream_capturer.reset_buffer() - - def formatFailure(self, test, err): - # Show output - ec, ev, tb = err - captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace') - if captured.strip(): - ev = safe_str(ev) - out = [ev, '>> begin captured subprocess output <<', - captured, - '>> end captured subprocess output <<'] - return ec, '\n'.join(out), tb - - return err - - formatError = formatFailure - - def finalize(self, result): - self.stream_capturer.halt() - - -def run_iptest(): - """Run the IPython test suite using nose. - - This function is called when this script is **not** called with the form - `iptest all`. It simply calls nose with appropriate command line flags - and accepts all of the standard nose arguments. - """ - # Apply our monkeypatch to Xunit - if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'): - monkeypatch_xunit() - - arg1 = sys.argv[1] - if arg1.startswith('IPython/'): - if arg1.endswith('.py'): - arg1 = arg1[:-3] - sys.argv[1] = arg1.replace('/', '.') - - arg1 = sys.argv[1] - if arg1 in test_sections: - section = test_sections[arg1] - sys.argv[1:2] = section.includes - elif arg1.startswith('IPython.') and arg1[8:] in test_sections: - section = test_sections[arg1[8:]] - sys.argv[1:2] = section.includes - else: - section = TestSection(arg1, includes=[arg1]) - - - argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks - # We add --exe because of setuptools' imbecility (it - # blindly does chmod +x on ALL files). Nose does the - # right thing and it tries to avoid executables, - # setuptools unfortunately forces our hand here. This - # has been discussed on the distutils list and the - # setuptools devs refuse to fix this problem! - '--exe', - ] - if '-a' not in argv and '-A' not in argv: - argv = argv + ['-a', '!crash'] - - if nose.__version__ >= '0.11': - # I don't fully understand why we need this one, but depending on what - # directory the test suite is run from, if we don't give it, 0 tests - # get run. Specifically, if the test suite is run from the source dir - # with an argument (like 'iptest.py IPython.core', 0 tests are run, - # even if the same call done in this directory works fine). It appears - # that if the requested package is in the current dir, nose bails early - # by default. Since it's otherwise harmless, leave it in by default - # for nose >= 0.11, though unfortunately nose 0.10 doesn't support it. - argv.append('--traverse-namespace') - - plugins = [ ExclusionPlugin(section.excludes), KnownFailure(), - SubprocessStreamCapturePlugin() ] - - # we still have some vestigial doctests in core - if (section.name.startswith(('core', 'IPython.core', 'IPython.utils'))): - plugins.append(IPythonDoctest()) - argv.extend([ - '--with-ipdoctest', - '--ipdoctest-tests', - '--ipdoctest-extension=txt', - ]) - - - # Use working directory set by parent process (see iptestcontroller) - if 'IPTEST_WORKING_DIR' in os.environ: - os.chdir(os.environ['IPTEST_WORKING_DIR']) - - # We need a global ipython running in this process, but the special - # in-process group spawns its own IPython kernels, so for *that* group we - # must avoid also opening the global one (otherwise there's a conflict of - # singletons). Ultimately the solution to this problem is to refactor our - # assumptions about what needs to be a singleton and what doesn't (app - # objects should, individual shells shouldn't). But for now, this - # workaround allows the test suite for the inprocess module to complete. - if 'kernel.inprocess' not in section.name: - from IPython.testing import globalipapp - globalipapp.start_ipython() - - # Now nose can run - TestProgram(argv=argv, addplugins=plugins) - -if __name__ == '__main__': - run_iptest() diff --git a/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py b/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py deleted file mode 100644 index b522f60f37..0000000000 --- a/contrib/python/ipython/py3/IPython/testing/iptestcontroller.py +++ /dev/null @@ -1,491 +0,0 @@ -# -*- coding: utf-8 -*- -"""IPython Test Process Controller - -This module runs one or more subprocesses which will actually run the IPython -test suite. - -""" - -# Copyright (c) IPython Development Team. -# Distributed under the terms of the Modified BSD License. - - -import argparse -import multiprocessing.pool -import os -import stat -import shutil -import signal -import sys -import subprocess -import time - -from .iptest import ( - have, test_group_names as py_test_group_names, test_sections, StreamCapturer, -) -from IPython.utils.path import compress_user -from IPython.utils.py3compat import decode -from IPython.utils.sysinfo import get_sys_info -from IPython.utils.tempdir import TemporaryDirectory - -class TestController: - """Run tests in a subprocess - """ - #: str, IPython test suite to be executed. - section = None - #: list, command line arguments to be executed - cmd = None - #: dict, extra environment variables to set for the subprocess - env = None - #: list, TemporaryDirectory instances to clear up when the process finishes - dirs = None - #: subprocess.Popen instance - process = None - #: str, process stdout+stderr - stdout = None - - def __init__(self): - self.cmd = [] - self.env = {} - self.dirs = [] - - def setUp(self): - """Create temporary directories etc. - - This is only called when we know the test group will be run. Things - created here may be cleaned up by self.cleanup(). - """ - pass - - def launch(self, buffer_output=False, capture_output=False): - # print('*** ENV:', self.env) # dbg - # print('*** CMD:', self.cmd) # dbg - env = os.environ.copy() - env.update(self.env) - if buffer_output: - capture_output = True - self.stdout_capturer = c = StreamCapturer(echo=not buffer_output) - c.start() - stdout = c.writefd if capture_output else None - stderr = subprocess.STDOUT if capture_output else None - self.process = subprocess.Popen(self.cmd, stdout=stdout, - stderr=stderr, env=env) - - def wait(self): - self.process.wait() - self.stdout_capturer.halt() - self.stdout = self.stdout_capturer.get_buffer() - return self.process.returncode - - def cleanup_process(self): - """Cleanup on exit by killing any leftover processes.""" - subp = self.process - if subp is None or (subp.poll() is not None): - return # Process doesn't exist, or is already dead. - - try: - print('Cleaning up stale PID: %d' % subp.pid) - subp.kill() - except: # (OSError, WindowsError) ? - # This is just a best effort, if we fail or the process was - # really gone, ignore it. - pass - else: - for i in range(10): - if subp.poll() is None: - time.sleep(0.1) - else: - break - - if subp.poll() is None: - # The process did not die... - print('... failed. Manual cleanup may be required.') - - def cleanup(self): - "Kill process if it's still alive, and clean up temporary directories" - self.cleanup_process() - for td in self.dirs: - td.cleanup() - - __del__ = cleanup - - -class PyTestController(TestController): - """Run Python tests using IPython.testing.iptest""" - #: str, Python command to execute in subprocess - pycmd = None - - def __init__(self, section, options): - """Create new test runner.""" - TestController.__init__(self) - self.section = section - # pycmd is put into cmd[2] in PyTestController.launch() - self.cmd = [sys.executable, '-c', None, section] - self.pycmd = "from IPython.testing.iptest import run_iptest; run_iptest()" - self.options = options - - def setup(self): - ipydir = TemporaryDirectory() - self.dirs.append(ipydir) - self.env['IPYTHONDIR'] = ipydir.name - self.workingdir = workingdir = TemporaryDirectory() - self.dirs.append(workingdir) - self.env['IPTEST_WORKING_DIR'] = workingdir.name - # This means we won't get odd effects from our own matplotlib config - self.env['MPLCONFIGDIR'] = workingdir.name - # For security reasons (http://bugs.python.org/issue16202), use - # a temporary directory to which other users have no access. - self.env['TMPDIR'] = workingdir.name - - # Add a non-accessible directory to PATH (see gh-7053) - noaccess = os.path.join(self.workingdir.name, "_no_access_") - self.noaccess = noaccess - os.mkdir(noaccess, 0) - - PATH = os.environ.get('PATH', '') - if PATH: - PATH = noaccess + os.pathsep + PATH - else: - PATH = noaccess - self.env['PATH'] = PATH - - # From options: - if self.options.xunit: - self.add_xunit() - if self.options.coverage: - self.add_coverage() - self.env['IPTEST_SUBPROC_STREAMS'] = self.options.subproc_streams - self.cmd.extend(self.options.extra_args) - - def cleanup(self): - """ - Make the non-accessible directory created in setup() accessible - again, otherwise deleting the workingdir will fail. - """ - os.chmod(self.noaccess, stat.S_IRWXU) - TestController.cleanup(self) - - @property - def will_run(self): - try: - return test_sections[self.section].will_run - except KeyError: - return True - - def add_xunit(self): - xunit_file = os.path.abspath(self.section + '.xunit.xml') - self.cmd.extend(['--with-xunit', '--xunit-file', xunit_file]) - - def add_coverage(self): - try: - sources = test_sections[self.section].includes - except KeyError: - sources = ['IPython'] - - coverage_rc = ("[run]\n" - "data_file = {data_file}\n" - "source =\n" - " {source}\n" - ).format(data_file=os.path.abspath('.coverage.'+self.section), - source="\n ".join(sources)) - config_file = os.path.join(self.workingdir.name, '.coveragerc') - with open(config_file, 'w') as f: - f.write(coverage_rc) - - self.env['COVERAGE_PROCESS_START'] = config_file - self.pycmd = "import coverage; coverage.process_startup(); " + self.pycmd - - def launch(self, buffer_output=False): - self.cmd[2] = self.pycmd - super(PyTestController, self).launch(buffer_output=buffer_output) - - -def prepare_controllers(options): - """Returns two lists of TestController instances, those to run, and those - not to run.""" - testgroups = options.testgroups - if not testgroups: - testgroups = py_test_group_names - - controllers = [PyTestController(name, options) for name in testgroups] - - to_run = [c for c in controllers if c.will_run] - not_run = [c for c in controllers if not c.will_run] - return to_run, not_run - -def do_run(controller, buffer_output=True): - """Setup and run a test controller. - - If buffer_output is True, no output is displayed, to avoid it appearing - interleaved. In this case, the caller is responsible for displaying test - output on failure. - - Returns - ------- - controller : TestController - The same controller as passed in, as a convenience for using map() type - APIs. - exitcode : int - The exit code of the test subprocess. Non-zero indicates failure. - """ - try: - try: - controller.setup() - controller.launch(buffer_output=buffer_output) - except Exception: - import traceback - traceback.print_exc() - return controller, 1 # signal failure - - exitcode = controller.wait() - return controller, exitcode - - except KeyboardInterrupt: - return controller, -signal.SIGINT - finally: - controller.cleanup() - -def report(): - """Return a string with a summary report of test-related variables.""" - inf = get_sys_info() - out = [] - def _add(name, value): - out.append((name, value)) - - _add('IPython version', inf['ipython_version']) - _add('IPython commit', "{} ({})".format(inf['commit_hash'], inf['commit_source'])) - _add('IPython package', compress_user(inf['ipython_path'])) - _add('Python version', inf['sys_version'].replace('\n','')) - _add('sys.executable', compress_user(inf['sys_executable'])) - _add('Platform', inf['platform']) - - width = max(len(n) for (n,v) in out) - out = ["{:<{width}}: {}\n".format(n, v, width=width) for (n,v) in out] - - avail = [] - not_avail = [] - - for k, is_avail in have.items(): - if is_avail: - avail.append(k) - else: - not_avail.append(k) - - if avail: - out.append('\nTools and libraries available at test time:\n') - avail.sort() - out.append(' ' + ' '.join(avail)+'\n') - - if not_avail: - out.append('\nTools and libraries NOT available at test time:\n') - not_avail.sort() - out.append(' ' + ' '.join(not_avail)+'\n') - - return ''.join(out) - -def run_iptestall(options): - """Run the entire IPython test suite by calling nose and trial. - - This function constructs :class:`IPTester` instances for all IPython - modules and package and then runs each of them. This causes the modules - and packages of IPython to be tested each in their own subprocess using - nose. - - Parameters - ---------- - - All parameters are passed as attributes of the options object. - - testgroups : list of str - Run only these sections of the test suite. If empty, run all the available - sections. - - fast : int or None - Run the test suite in parallel, using n simultaneous processes. If None - is passed, one process is used per CPU core. Default 1 (i.e. sequential) - - inc_slow : bool - Include slow tests. By default, these tests aren't run. - - url : unicode - Address:port to use when running the JS tests. - - xunit : bool - Produce Xunit XML output. This is written to multiple foo.xunit.xml files. - - coverage : bool or str - Measure code coverage from tests. True will store the raw coverage data, - or pass 'html' or 'xml' to get reports. - - extra_args : list - Extra arguments to pass to the test subprocesses, e.g. '-v' - """ - to_run, not_run = prepare_controllers(options) - - def justify(ltext, rtext, width=70, fill='-'): - ltext += ' ' - rtext = (' ' + rtext).rjust(width - len(ltext), fill) - return ltext + rtext - - # Run all test runners, tracking execution time - failed = [] - t_start = time.time() - - print() - if options.fast == 1: - # This actually means sequential, i.e. with 1 job - for controller in to_run: - print('Test group:', controller.section) - sys.stdout.flush() # Show in correct order when output is piped - controller, res = do_run(controller, buffer_output=False) - if res: - failed.append(controller) - if res == -signal.SIGINT: - print("Interrupted") - break - print() - - else: - # Run tests concurrently - try: - pool = multiprocessing.pool.ThreadPool(options.fast) - for (controller, res) in pool.imap_unordered(do_run, to_run): - res_string = 'OK' if res == 0 else 'FAILED' - print(justify('Test group: ' + controller.section, res_string)) - if res: - print(decode(controller.stdout)) - failed.append(controller) - if res == -signal.SIGINT: - print("Interrupted") - break - except KeyboardInterrupt: - return - - for controller in not_run: - print(justify('Test group: ' + controller.section, 'NOT RUN')) - - t_end = time.time() - t_tests = t_end - t_start - nrunners = len(to_run) - nfail = len(failed) - # summarize results - print('_'*70) - print('Test suite completed for system with the following information:') - print(report()) - took = "Took %.3fs." % t_tests - print('Status: ', end='') - if not failed: - print('OK (%d test groups).' % nrunners, took) - else: - # If anything went wrong, point out what command to rerun manually to - # see the actual errors and individual summary - failed_sections = [c.section for c in failed] - print('ERROR - {} out of {} test groups failed ({}).'.format(nfail, - nrunners, ', '.join(failed_sections)), took) - print() - print('You may wish to rerun these, with:') - print(' iptest', *failed_sections) - print() - - if options.coverage: - from coverage import coverage, CoverageException - cov = coverage(data_file='.coverage') - cov.combine() - cov.save() - - # Coverage HTML report - if options.coverage == 'html': - html_dir = 'ipy_htmlcov' - shutil.rmtree(html_dir, ignore_errors=True) - print("Writing HTML coverage report to %s/ ... " % html_dir, end="") - sys.stdout.flush() - - # Custom HTML reporter to clean up module names. - from coverage.html import HtmlReporter - class CustomHtmlReporter(HtmlReporter): - def find_code_units(self, morfs): - super(CustomHtmlReporter, self).find_code_units(morfs) - for cu in self.code_units: - nameparts = cu.name.split(os.sep) - if 'IPython' not in nameparts: - continue - ix = nameparts.index('IPython') - cu.name = '.'.join(nameparts[ix:]) - - # Reimplement the html_report method with our custom reporter - cov.get_data() - cov.config.from_args(omit='*{0}tests{0}*'.format(os.sep), html_dir=html_dir, - html_title='IPython test coverage', - ) - reporter = CustomHtmlReporter(cov, cov.config) - reporter.report(None) - print('done.') - - # Coverage XML report - elif options.coverage == 'xml': - try: - cov.xml_report(outfile='ipy_coverage.xml') - except CoverageException as e: - print('Generating coverage report failed. Are you running javascript tests only?') - import traceback - traceback.print_exc() - - if failed: - # Ensure that our exit code indicates failure - sys.exit(1) - -argparser = argparse.ArgumentParser(description='Run IPython test suite') -argparser.add_argument('testgroups', nargs='*', - help='Run specified groups of tests. If omitted, run ' - 'all tests.') -argparser.add_argument('--all', action='store_true', - help='Include slow tests not run by default.') -argparser.add_argument('-j', '--fast', nargs='?', const=None, default=1, type=int, - help='Run test sections in parallel. This starts as many ' - 'processes as you have cores, or you can specify a number.') -argparser.add_argument('--xunit', action='store_true', - help='Produce Xunit XML results') -argparser.add_argument('--coverage', nargs='?', const=True, default=False, - help="Measure test coverage. Specify 'html' or " - "'xml' to get reports.") -argparser.add_argument('--subproc-streams', default='capture', - help="What to do with stdout/stderr from subprocesses. " - "'capture' (default), 'show' and 'discard' are the options.") - -def default_options(): - """Get an argparse Namespace object with the default arguments, to pass to - :func:`run_iptestall`. - """ - options = argparser.parse_args([]) - options.extra_args = [] - return options - -def main(): - # iptest doesn't work correctly if the working directory is the - # root of the IPython source tree. Tell the user to avoid - # frustration. - if os.path.exists(os.path.join(os.getcwd(), - 'IPython', 'testing', '__main__.py')): - print("Don't run iptest from the IPython source directory", - file=sys.stderr) - sys.exit(1) - # Arguments after -- should be passed through to nose. Argparse treats - # everything after -- as regular positional arguments, so we separate them - # first. - try: - ix = sys.argv.index('--') - except ValueError: - to_parse = sys.argv[1:] - extra_args = [] - else: - to_parse = sys.argv[1:ix] - extra_args = sys.argv[ix+1:] - - options = argparser.parse_args(to_parse) - options.extra_args = extra_args - - run_iptestall(options) - - -if __name__ == '__main__': - main() diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py b/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py index d73cd246fd..68f7016e34 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/dtexample.py @@ -4,6 +4,9 @@ This file just contains doctests both using plain python and IPython prompts. All tests should be loaded by nose. """ +import os + + def pyfunc(): """Some pure python tests... @@ -35,20 +38,8 @@ def ipfunc(): ....: print(i, end=' ') ....: print(i+1, end=' ') ....: - 0 1 1 2 2 3 - - - Examples that access the operating system work: - - In [1]: !echo hello - hello + 0 1 1 2 2 3 - In [2]: !echo hello > /tmp/foo_iptest - - In [3]: !cat /tmp/foo_iptest - hello - - In [4]: rm -f /tmp/foo_iptest It's OK to use '_' for the last result, but do NOT try to use IPython's numbered history of _NN outputs, since those won't exist under the @@ -59,7 +50,7 @@ def ipfunc(): In [8]: print(repr(_)) 'hi' - + In [7]: 3+4 Out[7]: 7 @@ -69,7 +60,26 @@ def ipfunc(): In [9]: ipfunc() Out[9]: 'ipfunc' """ - return 'ipfunc' + return "ipfunc" + + +def ipos(): + """Examples that access the operating system work: + + In [1]: !echo hello + hello + + In [2]: !echo hello > /tmp/foo_iptest + + In [3]: !cat /tmp/foo_iptest + hello + + In [4]: rm -f /tmp/foo_iptest + """ + pass + + +ipos.__skip_doctest__ = os.name == "nt" def ranfunc(): diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py b/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py index 3b8667e72f..52cd8fd3b8 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/ipdoctest.py @@ -19,33 +19,13 @@ Limitations: # Module imports # From the standard library -import builtins as builtin_mod import doctest -import inspect import logging import os import re -import sys -from importlib import import_module -from io import StringIO from testpath import modified_env -from inspect import getmodule - -# We are overriding the default doctest runner, so we need to import a few -# things from doctest directly -from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE, - _unittest_reportflags, DocTestRunner, - _extract_future_flags, pdb, _OutputRedirectingPdb, - _exception_traceback, - linecache) - -# Third-party modules - -from nose.plugins import doctests, Plugin -from nose.util import anyp, tolist - #----------------------------------------------------------------------------- # Module globals and other constants #----------------------------------------------------------------------------- @@ -57,114 +37,16 @@ log = logging.getLogger(__name__) # Classes and functions #----------------------------------------------------------------------------- -def is_extension_module(filename): - """Return whether the given filename is an extension module. - - This simply checks that the extension is either .so or .pyd. - """ - return os.path.splitext(filename)[1].lower() in ('.so','.pyd') - - -class DocTestSkip(object): - """Object wrapper for doctests to be skipped.""" - - ds_skip = """Doctest to skip. - >>> 1 #doctest: +SKIP - """ - - def __init__(self,obj): - self.obj = obj - def __getattribute__(self,key): - if key == '__doc__': - return DocTestSkip.ds_skip - else: - return getattr(object.__getattribute__(self,'obj'),key) - -# Modified version of the one in the stdlib, that fixes a python bug (doctests -# not found in extension modules, http://bugs.python.org/issue3158) class DocTestFinder(doctest.DocTestFinder): + def _get_test(self, obj, name, module, globs, source_lines): + test = super()._get_test(obj, name, module, globs, source_lines) - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is object.__globals__ - elif inspect.isbuiltin(object): - return module.__name__ == object.__module__ - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.ismethod(object): - # This one may be a bug in cython that fails to correctly set the - # __module__ attribute of methods, but since the same error is easy - # to make by extension code writers, having this safety in place - # isn't such a bad idea - return module.__name__ == object.__self__.__class__.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - elif inspect.ismethoddescriptor(object): - # Unbound PyQt signals reach this point in Python 3.4b3, and we want - # to avoid throwing an error. See also http://bugs.python.org/issue3158 - return False - else: - raise ValueError("object must be a class or function, got %r" % object) + if bool(getattr(obj, "__skip_doctest__", False)) and test is not None: + for example in test.examples: + example.options[doctest.SKIP] = True - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - print('_find for:', obj, name, module) # dbg - if hasattr(obj,"skip_doctest"): - #print 'SKIPPING DOCTEST FOR:',obj # dbg - obj = DocTestSkip(obj) - - doctest.DocTestFinder._find(self,tests, obj, name, module, - source_lines, globs, seen) - - # Below we re-run pieces of the above method with manual modifications, - # because the original code is buggy and fails to correctly identify - # doctests in extension modules. - - # Local shorthands - from inspect import isroutine, isclass - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - valname1 = '%s.%s' % (name, valname) - if ( (isroutine(val) or isclass(val)) - and self._from_module(module, val) ): - - self._find(tests, val, valname1, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - #print 'RECURSE into class:',obj # dbg - for valname, val in obj.__dict__.items(): - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = getattr(obj, valname).__func__ - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) + return test class IPDoctestOutputChecker(doctest.OutputChecker): @@ -193,146 +75,11 @@ class IPDoctestOutputChecker(doctest.OutputChecker): return ret -class DocTestCase(doctests.DocTestCase): - """Proxy for DocTestCase: provides an address() method that - returns the correct address for the doctest case. Otherwise - acts as a proxy to the test case. To provide hints for address(), - an obj may also be passed -- this will be used as the test object - for purposes of determining the test address, if it is provided. - """ - - # Note: this method was taken from numpy's nosetester module. - - # Subclass nose.plugins.doctests.DocTestCase to work around a bug in - # its constructor that blocks non-default arguments from being passed - # down into doctest.DocTestCase - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None, obj=None, result_var='_'): - self._result_var = result_var - doctests.DocTestCase.__init__(self, test, - optionflags=optionflags, - setUp=setUp, tearDown=tearDown, - checker=checker) - # Now we must actually copy the original constructor from the stdlib - # doctest class, because we can't call it directly and a bug in nose - # means it never gets passed the right arguments. - - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_test_globs_ori = test.globs - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - # XXX - store this runner once in the object! - runner = IPDocTestRunner(optionflags=optionflags, - checker=checker, verbose=False) - self._dt_runner = runner - - - # Each doctest should remember the directory it was loaded from, so - # things like %run work without too many contortions - self._ori_dir = os.path.dirname(test.filename) - - # Modified runTest from the default stdlib - def runTest(self): - test = self._dt_test - runner = self._dt_runner - - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - try: - # Save our current directory and switch out to the one where the - # test was originally created, in case another doctest did a - # directory change. We'll restore this in the finally clause. - curdir = os.getcwd() - #print 'runTest in dir:', self._ori_dir # dbg - os.chdir(self._ori_dir) - - runner.DIVIDER = "-"*70 - failures, tries = runner.run(test,out=new.write, - clear_globs=False) - finally: - sys.stdout = old - os.chdir(curdir) - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def setUp(self): - """Modified test setup that syncs with ipython namespace""" - #print "setUp test", self._dt_test.examples # dbg - if isinstance(self._dt_test.examples[0], IPExample): - # for IPython examples *only*, we swap the globals with the ipython - # namespace, after updating it with the globals (which doctest - # fills with the necessary info from the module being tested). - self.user_ns_orig = {} - self.user_ns_orig.update(_ip.user_ns) - _ip.user_ns.update(self._dt_test.globs) - # We must remove the _ key in the namespace, so that Python's - # doctest code sets it naturally - _ip.user_ns.pop('_', None) - _ip.user_ns['__builtins__'] = builtin_mod - self._dt_test.globs = _ip.user_ns - - super(DocTestCase, self).setUp() - - def tearDown(self): - - # Undo the test.globs reassignment we made, so that the parent class - # teardown doesn't destroy the ipython namespace - if isinstance(self._dt_test.examples[0], IPExample): - self._dt_test.globs = self._dt_test_globs_ori - _ip.user_ns.clear() - _ip.user_ns.update(self.user_ns_orig) - - # XXX - fperez: I am not sure if this is truly a bug in nose 0.11, but - # it does look like one to me: its tearDown method tries to run - # - # delattr(builtin_mod, self._result_var) - # - # without checking that the attribute really is there; it implicitly - # assumes it should have been set via displayhook. But if the - # displayhook was never called, this doesn't necessarily happen. I - # haven't been able to find a little self-contained example outside of - # ipython that would show the problem so I can report it to the nose - # team, but it does happen a lot in our code. - # - # So here, we just protect as narrowly as possible by trapping an - # attribute error whose message would be the name of self._result_var, - # and letting any other error propagate. - try: - super(DocTestCase, self).tearDown() - except AttributeError as exc: - if exc.args[0] != self._result_var: - raise - - # A simple subclassing of the original with a different class name, so we can # distinguish and treat differently IPython examples from pure python ones. class IPExample(doctest.Example): pass -class IPExternalExample(doctest.Example): - """Doctest examples to be run in an external process.""" - - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Parent constructor - doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options) - - # An EXTRA newline is needed to prevent pexpect hangs - self.source += '\n' - - class IPDocTestParser(doctest.DocTestParser): """ A class used to parse strings containing doctest examples. @@ -378,9 +125,6 @@ class IPDocTestParser(doctest.DocTestParser): # we don't need to modify any other code. _RANDOM_TEST = re.compile(r'#\s*all-random\s+') - # Mark tests to be executed in an external process - currently unsupported. - _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL') - def ip2py(self,source): """Convert input IPython source into valid Python.""" block = _ip.input_transformer_manager.transform_cell(source) @@ -423,27 +167,12 @@ class IPDocTestParser(doctest.DocTestParser): terms = list(self._EXAMPLE_RE_PY.finditer(string)) if terms: # Normal Python example - #print '-'*70 # dbg - #print 'PyExample, Source:\n',string # dbg - #print '-'*70 # dbg Example = doctest.Example else: - # It's an ipython example. Note that IPExamples are run - # in-process, so their syntax must be turned into valid python. - # IPExternalExamples are run out-of-process (via pexpect) so they - # don't need any filtering (a real ipython will be executing them). + # It's an ipython example. terms = list(self._EXAMPLE_RE_IP.finditer(string)) - if self._EXTERNAL_IP.search(string): - #print '-'*70 # dbg - #print 'IPExternalExample, Source:\n',string # dbg - #print '-'*70 # dbg - Example = IPExternalExample - else: - #print '-'*70 # dbg - #print 'IPExample, Source:\n',string # dbg - #print '-'*70 # dbg - Example = IPExample - ip2py = True + Example = IPExample + ip2py = True for m in terms: # Add the pre-example text to `output`. @@ -458,10 +187,6 @@ class IPDocTestParser(doctest.DocTestParser): # cases, it's only non-empty for 'all-random' tests): want += random_marker - if Example is IPExternalExample: - options[doctest.NORMALIZE_WHITESPACE] = True - want += '\n' - # Create an Example, and add it to the list. if not self._IS_BLANK_OR_COMMENT(source): output.append(Example(source, want, exc_msg, @@ -569,193 +294,7 @@ class IPDocTestRunner(doctest.DocTestRunner,object): """ def run(self, test, compileflags=None, out=None, clear_globs=True): - - # Hack: ipython needs access to the execution context of the example, - # so that it can propagate user variables loaded by %run into - # test.globs. We put them here into our modified %run as a function - # attribute. Our new %run will then only make the namespace update - # when called (rather than unconditionally updating test.globs here - # for all examples, most of which won't be calling %run anyway). - #_ip._ipdoctest_test_globs = test.globs - #_ip._ipdoctest_test_filename = test.filename - - test.globs.update(_ip.user_ns) - # Override terminal size to standardise traceback format with modified_env({'COLUMNS': '80', 'LINES': '24'}): return super(IPDocTestRunner,self).run(test, compileflags,out,clear_globs) - - -class DocFileCase(doctest.DocFileCase): - """Overrides to provide filename - """ - def address(self): - return (self._dt_test.filename, None, None) - - -class ExtensionDoctest(doctests.Doctest): - """Nose Plugin that supports doctests in extension modules. - """ - name = 'extdoctest' # call nosetests with --with-extdoctest - enabled = True - - def options(self, parser, env=os.environ): - Plugin.options(self, parser, env) - parser.add_option('--doctest-tests', action='store_true', - dest='doctest_tests', - default=env.get('NOSE_DOCTEST_TESTS',True), - help="Also look for doctests in test modules. " - "Note that classes, methods and functions should " - "have either doctests or non-doctest tests, " - "not both. [NOSE_DOCTEST_TESTS]") - parser.add_option('--doctest-extension', action="append", - dest="doctestExtension", - help="Also look for doctests in files with " - "this extension [NOSE_DOCTEST_EXTENSION]") - # Set the default as a list, if given in env; otherwise - # an additional value set on the command line will cause - # an error. - env_setting = env.get('NOSE_DOCTEST_EXTENSION') - if env_setting is not None: - parser.set_defaults(doctestExtension=tolist(env_setting)) - - - def configure(self, options, config): - Plugin.configure(self, options, config) - # Pull standard doctest plugin out of config; we will do doctesting - config.plugins.plugins = [p for p in config.plugins.plugins - if p.name != 'doctest'] - self.doctest_tests = options.doctest_tests - self.extension = tolist(options.doctestExtension) - - self.parser = doctest.DocTestParser() - self.finder = DocTestFinder() - self.checker = IPDoctestOutputChecker() - self.globs = None - self.extraglobs = None - - - def loadTestsFromExtensionModule(self,filename): - bpath,mod = os.path.split(filename) - modname = os.path.splitext(mod)[0] - try: - sys.path.append(bpath) - module = import_module(modname) - tests = list(self.loadTestsFromModule(module)) - finally: - sys.path.pop() - return tests - - # NOTE: the method below is almost a copy of the original one in nose, with - # a few modifications to control output checking. - - def loadTestsFromModule(self, module): - #print '*** ipdoctest - lTM',module # dbg - - if not self.matches(module.__name__): - log.debug("Doctest doesn't want module %s", module) - return - - tests = self.finder.find(module,globs=self.globs, - extraglobs=self.extraglobs) - if not tests: - return - - # always use whitespace and ellipsis options - optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS - - tests.sort() - module_file = module.__file__ - if module_file[-4:] in ('.pyc', '.pyo'): - module_file = module_file[:-1] - for test in tests: - if not test.examples: - continue - if not test.filename: - test.filename = module_file - - yield DocTestCase(test, - optionflags=optionflags, - checker=self.checker) - - - def loadTestsFromFile(self, filename): - #print "ipdoctest - from file", filename # dbg - if is_extension_module(filename): - for t in self.loadTestsFromExtensionModule(filename): - yield t - else: - if self.extension and anyp(filename.endswith, self.extension): - name = os.path.basename(filename) - with open(filename) as dh: - doc = dh.read() - test = self.parser.get_doctest( - doc, globs={'__file__': filename}, name=name, - filename=filename, lineno=0) - if test.examples: - #print 'FileCase:',test.examples # dbg - yield DocFileCase(test) - else: - yield False # no tests to load - - -class IPythonDoctest(ExtensionDoctest): - """Nose Plugin that supports doctests in extension modules. - """ - name = 'ipdoctest' # call nosetests with --with-ipdoctest - enabled = True - - def makeTest(self, obj, parent): - """Look for doctests in the given object, which will be a - function, method or class. - """ - #print 'Plugin analyzing:', obj, parent # dbg - # always use whitespace and ellipsis options - optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS - - doctests = self.finder.find(obj, module=getmodule(parent)) - if doctests: - for test in doctests: - if len(test.examples) == 0: - continue - - yield DocTestCase(test, obj=obj, - optionflags=optionflags, - checker=self.checker) - - def options(self, parser, env=os.environ): - #print "Options for nose plugin:", self.name # dbg - Plugin.options(self, parser, env) - parser.add_option('--ipdoctest-tests', action='store_true', - dest='ipdoctest_tests', - default=env.get('NOSE_IPDOCTEST_TESTS',True), - help="Also look for doctests in test modules. " - "Note that classes, methods and functions should " - "have either doctests or non-doctest tests, " - "not both. [NOSE_IPDOCTEST_TESTS]") - parser.add_option('--ipdoctest-extension', action="append", - dest="ipdoctest_extension", - help="Also look for doctests in files with " - "this extension [NOSE_IPDOCTEST_EXTENSION]") - # Set the default as a list, if given in env; otherwise - # an additional value set on the command line will cause - # an error. - env_setting = env.get('NOSE_IPDOCTEST_EXTENSION') - if env_setting is not None: - parser.set_defaults(ipdoctest_extension=tolist(env_setting)) - - def configure(self, options, config): - #print "Configuring nose plugin:", self.name # dbg - Plugin.configure(self, options, config) - # Pull standard doctest plugin out of config; we will do doctesting - config.plugins.plugins = [p for p in config.plugins.plugins - if p.name != 'doctest'] - self.doctest_tests = options.ipdoctest_tests - self.extension = tolist(options.ipdoctest_extension) - - self.parser = IPDocTestParser() - self.finder = DocTestFinder(parser=self.parser) - self.checker = IPDoctestOutputChecker() - self.globs = None - self.extraglobs = None diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py b/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py deleted file mode 100644 index e24e22a830..0000000000 --- a/contrib/python/ipython/py3/IPython/testing/plugin/iptest.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -"""Nose-based test runner. -""" - -from nose.core import main -from nose.plugins.builtin import plugins -from nose.plugins.doctests import Doctest - -from . import ipdoctest -from .ipdoctest import IPDocTestRunner - -if __name__ == '__main__': - print('WARNING: this code is incomplete!') - print() - - pp = [x() for x in plugins] # activate all builtin plugins first - main(testRunner=IPDocTestRunner(), - plugins=pp+[ipdoctest.IPythonDoctest(),Doctest()]) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/pytest_ipdoctest.py b/contrib/python/ipython/py3/IPython/testing/plugin/pytest_ipdoctest.py new file mode 100644 index 0000000000..809713d7c8 --- /dev/null +++ b/contrib/python/ipython/py3/IPython/testing/plugin/pytest_ipdoctest.py @@ -0,0 +1,860 @@ +# Based on Pytest doctest.py +# Original license: +# The MIT License (MIT) +# +# Copyright (c) 2004-2021 Holger Krekel and others +"""Discover and run ipdoctests in modules and test files.""" +import builtins +import bdb +import inspect +import os +import platform +import sys +import traceback +import types +import warnings +from contextlib import contextmanager +from pathlib import Path +from typing import Any +from typing import Callable +from typing import Dict +from typing import Generator +from typing import Iterable +from typing import List +from typing import Optional +from typing import Pattern +from typing import Sequence +from typing import Tuple +from typing import Type +from typing import TYPE_CHECKING +from typing import Union + +import pytest +from _pytest import outcomes +from _pytest._code.code import ExceptionInfo +from _pytest._code.code import ReprFileLocation +from _pytest._code.code import TerminalRepr +from _pytest._io import TerminalWriter +from _pytest.compat import safe_getattr +from _pytest.config import Config +from _pytest.config.argparsing import Parser +from _pytest.fixtures import FixtureRequest +from _pytest.nodes import Collector +from _pytest.outcomes import OutcomeException +from _pytest.pathlib import fnmatch_ex +from _pytest.pathlib import import_path +from _pytest.python_api import approx +from _pytest.warning_types import PytestWarning + +if TYPE_CHECKING: + import doctest + +DOCTEST_REPORT_CHOICE_NONE = "none" +DOCTEST_REPORT_CHOICE_CDIFF = "cdiff" +DOCTEST_REPORT_CHOICE_NDIFF = "ndiff" +DOCTEST_REPORT_CHOICE_UDIFF = "udiff" +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = "only_first_failure" + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +# Lazy definition of runner class +RUNNER_CLASS = None +# Lazy definition of output checker class +CHECKER_CLASS: Optional[Type["IPDoctestOutputChecker"]] = None + + +def pytest_addoption(parser: Parser) -> None: + parser.addini( + "ipdoctest_optionflags", + "option flags for ipdoctests", + type="args", + default=["ELLIPSIS"], + ) + parser.addini( + "ipdoctest_encoding", "encoding used for ipdoctest files", default="utf-8" + ) + group = parser.getgroup("collect") + group.addoption( + "--ipdoctest-modules", + action="store_true", + default=False, + help="run ipdoctests in all .py modules", + dest="ipdoctestmodules", + ) + group.addoption( + "--ipdoctest-report", + type=str.lower, + default="udiff", + help="choose another output format for diffs on ipdoctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="ipdoctestreport", + ) + group.addoption( + "--ipdoctest-glob", + action="append", + default=[], + metavar="pat", + help="ipdoctests file matching pattern, default: test*.txt", + dest="ipdoctestglob", + ) + group.addoption( + "--ipdoctest-ignore-import-errors", + action="store_true", + default=False, + help="ignore ipdoctest ImportErrors", + dest="ipdoctest_ignore_import_errors", + ) + group.addoption( + "--ipdoctest-continue-on-failure", + action="store_true", + default=False, + help="for a given ipdoctest, continue to run after the first failure", + dest="ipdoctest_continue_on_failure", + ) + + +def pytest_unconfigure() -> None: + global RUNNER_CLASS + + RUNNER_CLASS = None + + +def pytest_collect_file( + file_path: Path, + parent: Collector, +) -> Optional[Union["IPDoctestModule", "IPDoctestTextfile"]]: + config = parent.config + if file_path.suffix == ".py": + if config.option.ipdoctestmodules and not any( + (_is_setup_py(file_path), _is_main_py(file_path)) + ): + mod: IPDoctestModule = IPDoctestModule.from_parent(parent, path=file_path) + return mod + elif _is_ipdoctest(config, file_path, parent): + txt: IPDoctestTextfile = IPDoctestTextfile.from_parent(parent, path=file_path) + return txt + return None + + +if int(pytest.__version__.split(".")[0]) < 7: + _collect_file = pytest_collect_file + + def pytest_collect_file( + path, + parent: Collector, + ) -> Optional[Union["IPDoctestModule", "IPDoctestTextfile"]]: + return _collect_file(Path(path), parent) + + _import_path = import_path + + def import_path(path, root): + import py.path + + return _import_path(py.path.local(path)) + + +def _is_setup_py(path: Path) -> bool: + if path.name != "setup.py": + return False + contents = path.read_bytes() + return b"setuptools" in contents or b"distutils" in contents + + +def _is_ipdoctest(config: Config, path: Path, parent: Collector) -> bool: + if path.suffix in (".txt", ".rst") and parent.session.isinitpath(path): + return True + globs = config.getoption("ipdoctestglob") or ["test*.txt"] + return any(fnmatch_ex(glob, path) for glob in globs) + + +def _is_main_py(path: Path) -> bool: + return path.name == "__main__.py" + + +class ReprFailDoctest(TerminalRepr): + def __init__( + self, reprlocation_lines: Sequence[Tuple[ReprFileLocation, Sequence[str]]] + ) -> None: + self.reprlocation_lines = reprlocation_lines + + def toterminal(self, tw: TerminalWriter) -> None: + for reprlocation, lines in self.reprlocation_lines: + for line in lines: + tw.line(line) + reprlocation.toterminal(tw) + + +class MultipleDoctestFailures(Exception): + def __init__(self, failures: Sequence["doctest.DocTestFailure"]) -> None: + super().__init__() + self.failures = failures + + +def _init_runner_class() -> Type["IPDocTestRunner"]: + import doctest + from .ipdoctest import IPDocTestRunner + + class PytestDoctestRunner(IPDocTestRunner): + """Runner to collect failures. + + Note that the out variable in this case is a list instead of a + stdout-like object. + """ + + def __init__( + self, + checker: Optional["IPDoctestOutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, + ) -> None: + super().__init__(checker=checker, verbose=verbose, optionflags=optionflags) + self.continue_on_failure = continue_on_failure + + def report_failure( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + got: str, + ) -> None: + failure = doctest.DocTestFailure(test, example, got) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + def report_unexpected_exception( + self, + out, + test: "doctest.DocTest", + example: "doctest.Example", + exc_info: Tuple[Type[BaseException], BaseException, types.TracebackType], + ) -> None: + if isinstance(exc_info[1], OutcomeException): + raise exc_info[1] + if isinstance(exc_info[1], bdb.BdbQuit): + outcomes.exit("Quitting debugger") + failure = doctest.UnexpectedException(test, example, exc_info) + if self.continue_on_failure: + out.append(failure) + else: + raise failure + + return PytestDoctestRunner + + +def _get_runner( + checker: Optional["IPDoctestOutputChecker"] = None, + verbose: Optional[bool] = None, + optionflags: int = 0, + continue_on_failure: bool = True, +) -> "IPDocTestRunner": + # We need this in order to do a lazy import on doctest + global RUNNER_CLASS + if RUNNER_CLASS is None: + RUNNER_CLASS = _init_runner_class() + # Type ignored because the continue_on_failure argument is only defined on + # PytestDoctestRunner, which is lazily defined so can't be used as a type. + return RUNNER_CLASS( # type: ignore + checker=checker, + verbose=verbose, + optionflags=optionflags, + continue_on_failure=continue_on_failure, + ) + + +class IPDoctestItem(pytest.Item): + def __init__( + self, + name: str, + parent: "Union[IPDoctestTextfile, IPDoctestModule]", + runner: Optional["IPDocTestRunner"] = None, + dtest: Optional["doctest.DocTest"] = None, + ) -> None: + super().__init__(name, parent) + self.runner = runner + self.dtest = dtest + self.obj = None + self.fixture_request: Optional[FixtureRequest] = None + + @classmethod + def from_parent( # type: ignore + cls, + parent: "Union[IPDoctestTextfile, IPDoctestModule]", + *, + name: str, + runner: "IPDocTestRunner", + dtest: "doctest.DocTest", + ): + # incompatible signature due to imposed limits on subclass + """The public named constructor.""" + return super().from_parent(name=name, parent=parent, runner=runner, dtest=dtest) + + def setup(self) -> None: + if self.dtest is not None: + self.fixture_request = _setup_fixtures(self) + globs = dict(getfixture=self.fixture_request.getfixturevalue) + for name, value in self.fixture_request.getfixturevalue( + "ipdoctest_namespace" + ).items(): + globs[name] = value + self.dtest.globs.update(globs) + + from .ipdoctest import IPExample + + if isinstance(self.dtest.examples[0], IPExample): + # for IPython examples *only*, we swap the globals with the ipython + # namespace, after updating it with the globals (which doctest + # fills with the necessary info from the module being tested). + self._user_ns_orig = {} + self._user_ns_orig.update(_ip.user_ns) + _ip.user_ns.update(self.dtest.globs) + # We must remove the _ key in the namespace, so that Python's + # doctest code sets it naturally + _ip.user_ns.pop("_", None) + _ip.user_ns["__builtins__"] = builtins + self.dtest.globs = _ip.user_ns + + def teardown(self) -> None: + from .ipdoctest import IPExample + + # Undo the test.globs reassignment we made + if isinstance(self.dtest.examples[0], IPExample): + self.dtest.globs = {} + _ip.user_ns.clear() + _ip.user_ns.update(self._user_ns_orig) + del self._user_ns_orig + + self.dtest.globs.clear() + + def runtest(self) -> None: + assert self.dtest is not None + assert self.runner is not None + _check_all_skipped(self.dtest) + self._disable_output_capturing_for_darwin() + failures: List["doctest.DocTestFailure"] = [] + + # exec(compile(..., "single", ...), ...) puts result in builtins._ + had_underscore_value = hasattr(builtins, "_") + underscore_original_value = getattr(builtins, "_", None) + + # Save our current directory and switch out to the one where the + # test was originally created, in case another doctest did a + # directory change. We'll restore this in the finally clause. + curdir = os.getcwd() + os.chdir(self.fspath.dirname) + try: + # Type ignored because we change the type of `out` from what + # ipdoctest expects. + self.runner.run(self.dtest, out=failures, clear_globs=False) # type: ignore[arg-type] + finally: + os.chdir(curdir) + if had_underscore_value: + setattr(builtins, "_", underscore_original_value) + elif hasattr(builtins, "_"): + delattr(builtins, "_") + + if failures: + raise MultipleDoctestFailures(failures) + + def _disable_output_capturing_for_darwin(self) -> None: + """Disable output capturing. Otherwise, stdout is lost to ipdoctest (pytest#985).""" + if platform.system() != "Darwin": + return + capman = self.config.pluginmanager.getplugin("capturemanager") + if capman: + capman.suspend_global_capture(in_=True) + out, err = capman.read_global_capture() + sys.stdout.write(out) + sys.stderr.write(err) + + # TODO: Type ignored -- breaks Liskov Substitution. + def repr_failure( # type: ignore[override] + self, + excinfo: ExceptionInfo[BaseException], + ) -> Union[str, TerminalRepr]: + import doctest + + failures: Optional[ + Sequence[Union[doctest.DocTestFailure, doctest.UnexpectedException]] + ] = None + if isinstance( + excinfo.value, (doctest.DocTestFailure, doctest.UnexpectedException) + ): + failures = [excinfo.value] + elif isinstance(excinfo.value, MultipleDoctestFailures): + failures = excinfo.value.failures + + if failures is None: + return super().repr_failure(excinfo) + + reprlocation_lines = [] + for failure in failures: + example = failure.example + test = failure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = type(failure).__name__ + # TODO: ReprFileLocation doesn't expect a None lineno. + reprlocation = ReprFileLocation(filename, lineno, message) # type: ignore[arg-type] + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("ipdoctestreport")) + if lineno is not None: + assert failure.test.docstring is not None + lines = failure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + assert test.lineno is not None + lines = [ + "%03d %s" % (i + test.lineno + 1, x) for (i, x) in enumerate(lines) + ] + # trim docstring error lines to 10 + lines = lines[max(example.lineno - 9, 0) : example.lineno + 1] + else: + lines = [ + "EXAMPLE LOCATION UNKNOWN, not showing all tests of that example" + ] + indent = ">>>" + for line in example.source.splitlines(): + lines.append(f"??? {indent} {line}") + indent = "..." + if isinstance(failure, doctest.DocTestFailure): + lines += checker.output_difference( + example, failure.got, report_choice + ).split("\n") + else: + inner_excinfo = ExceptionInfo.from_exc_info(failure.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % repr(inner_excinfo.value)] + lines += [ + x.strip("\n") for x in traceback.format_exception(*failure.exc_info) + ] + reprlocation_lines.append((reprlocation, lines)) + return ReprFailDoctest(reprlocation_lines) + + def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: + assert self.dtest is not None + return self.path, self.dtest.lineno, "[ipdoctest] %s" % self.name + + if int(pytest.__version__.split(".")[0]) < 7: + + @property + def path(self) -> Path: + return Path(self.fspath) + + +def _get_flag_lookup() -> Dict[str, int]: + import doctest + + return dict( + DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + NUMBER=_get_number_flag(), + ) + + +def get_optionflags(parent): + optionflags_str = parent.config.getini("ipdoctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + + +def _get_continue_on_failure(config): + continue_on_failure = config.getvalue("ipdoctest_continue_on_failure") + if continue_on_failure: + # We need to turn off this if we use pdb since we should stop at + # the first failure. + if config.getvalue("usepdb"): + continue_on_failure = False + return continue_on_failure + + +class IPDoctestTextfile(pytest.Module): + obj = None + + def collect(self) -> Iterable[IPDoctestItem]: + import doctest + from .ipdoctest import IPDocTestParser + + # Inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker. + encoding = self.config.getini("ipdoctest_encoding") + text = self.path.read_text(encoding) + filename = str(self.path) + name = self.path.name + globs = {"__name__": "__main__"} + + optionflags = get_optionflags(self) + + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + parser = IPDocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield IPDoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + if int(pytest.__version__.split(".")[0]) < 7: + + @property + def path(self) -> Path: + return Path(self.fspath) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath=None, + path: Optional[Path] = None, + **kw, + ): + if path is not None: + import py.path + + fspath = py.path.local(path) + return super().from_parent(parent=parent, fspath=fspath, **kw) + + +def _check_all_skipped(test: "doctest.DocTest") -> None: + """Raise pytest.skip() if all examples in the given DocTest have the SKIP + option set.""" + import doctest + + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + pytest.skip("all docstests skipped by +SKIP option") + + +def _is_mocked(obj: object) -> bool: + """Return if an object is possibly a mock object by checking the + existence of a highly improbable attribute.""" + return ( + safe_getattr(obj, "pytest_mock_example_attribute_that_shouldnt_exist", None) + is not None + ) + + +@contextmanager +def _patch_unwrap_mock_aware() -> Generator[None, None, None]: + """Context manager which replaces ``inspect.unwrap`` with a version + that's aware of mock objects and doesn't recurse into them.""" + real_unwrap = inspect.unwrap + + def _mock_aware_unwrap( + func: Callable[..., Any], *, stop: Optional[Callable[[Any], Any]] = None + ) -> Any: + try: + if stop is None or stop is _is_mocked: + return real_unwrap(func, stop=_is_mocked) + _stop = stop + return real_unwrap(func, stop=lambda obj: _is_mocked(obj) or _stop(func)) + except Exception as e: + warnings.warn( + "Got %r when unwrapping %r. This is usually caused " + "by a violation of Python's object protocol; see e.g. " + "https://github.com/pytest-dev/pytest/issues/5080" % (e, func), + PytestWarning, + ) + raise + + inspect.unwrap = _mock_aware_unwrap + try: + yield + finally: + inspect.unwrap = real_unwrap + + +class IPDoctestModule(pytest.Module): + def collect(self) -> Iterable[IPDoctestItem]: + import doctest + from .ipdoctest import DocTestFinder, IPDocTestParser + + class MockAwareDocTestFinder(DocTestFinder): + """A hackish ipdoctest finder that overrides stdlib internals to fix a stdlib bug. + + https://github.com/pytest-dev/pytest/issues/3456 + https://bugs.python.org/issue25532 + """ + + def _find_lineno(self, obj, source_lines): + """Doctest code does not take into account `@property`, this + is a hackish way to fix it. https://bugs.python.org/issue17446 + + Wrapped Doctests will need to be unwrapped so the correct + line number is returned. This will be reported upstream. #8796 + """ + if isinstance(obj, property): + obj = getattr(obj, "fget", obj) + + if hasattr(obj, "__wrapped__"): + # Get the main obj in case of it being wrapped + obj = inspect.unwrap(obj) + + # Type ignored because this is a private function. + return super()._find_lineno( # type:ignore[misc] + obj, + source_lines, + ) + + def _find( + self, tests, obj, name, module, source_lines, globs, seen + ) -> None: + if _is_mocked(obj): + return + with _patch_unwrap_mock_aware(): + + # Type ignored because this is a private function. + super()._find( # type:ignore[misc] + tests, obj, name, module, source_lines, globs, seen + ) + + if self.path.name == "conftest.py": + if int(pytest.__version__.split(".")[0]) < 7: + module = self.config.pluginmanager._importconftest( + self.path, + self.config.getoption("importmode"), + ) + else: + module = self.config.pluginmanager._importconftest( + self.path, + self.config.getoption("importmode"), + rootpath=self.config.rootpath, + ) + else: + try: + module = import_path(self.path, root=self.config.rootpath) + except ImportError: + if self.config.getvalue("ipdoctest_ignore_import_errors"): + pytest.skip("unable to import module %r" % self.path) + else: + raise + # Uses internal doctest module parsing mechanism. + finder = MockAwareDocTestFinder(parser=IPDocTestParser()) + optionflags = get_optionflags(self) + runner = _get_runner( + verbose=False, + optionflags=optionflags, + checker=_get_checker(), + continue_on_failure=_get_continue_on_failure(self.config), + ) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty ipdoctests + yield IPDoctestItem.from_parent( + self, name=test.name, runner=runner, dtest=test + ) + + if int(pytest.__version__.split(".")[0]) < 7: + + @property + def path(self) -> Path: + return Path(self.fspath) + + @classmethod + def from_parent( + cls, + parent, + *, + fspath=None, + path: Optional[Path] = None, + **kw, + ): + if path is not None: + import py.path + + fspath = py.path.local(path) + return super().from_parent(parent=parent, fspath=fspath, **kw) + + +def _setup_fixtures(doctest_item: IPDoctestItem) -> FixtureRequest: + """Used by IPDoctestTextfile and IPDoctestItem to setup fixture information.""" + + def func() -> None: + pass + + doctest_item.funcargs = {} # type: ignore[attr-defined] + fm = doctest_item.session._fixturemanager + doctest_item._fixtureinfo = fm.getfixtureinfo( # type: ignore[attr-defined] + node=doctest_item, func=func, cls=None, funcargs=False + ) + fixture_request = FixtureRequest(doctest_item, _ispytest=True) + fixture_request._fillfixtures() + return fixture_request + + +def _init_checker_class() -> Type["IPDoctestOutputChecker"]: + import doctest + import re + from .ipdoctest import IPDoctestOutputChecker + + class LiteralsOutputChecker(IPDoctestOutputChecker): + # Based on doctest_nose_plugin.py from the nltk project + # (https://github.com/nltk/nltk) and on the "numtest" doctest extension + # by Sebastien Boisgerault (https://github.com/boisgera/numtest). + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + _number_re = re.compile( + r""" + (?P<number> + (?P<mantissa> + (?P<integer1> [+-]?\d*)\.(?P<fraction>\d+) + | + (?P<integer2> [+-]?\d+)\. + ) + (?: + [Ee] + (?P<exponent1> [+-]?\d+) + )? + | + (?P<integer3> [+-]?\d+) + (?: + [Ee] + (?P<exponent2> [+-]?\d+) + ) + ) + """, + re.VERBOSE, + ) + + def check_output(self, want: str, got: str, optionflags: int) -> bool: + if super().check_output(want, got, optionflags): + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + allow_number = optionflags & _get_number_flag() + + if not allow_unicode and not allow_bytes and not allow_number: + return False + + def remove_prefixes(regex: Pattern[str], txt: str) -> str: + return re.sub(regex, r"\1\2", txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + + if allow_number: + got = self._remove_unwanted_precision(want, got) + + return super().check_output(want, got, optionflags) + + def _remove_unwanted_precision(self, want: str, got: str) -> str: + wants = list(self._number_re.finditer(want)) + gots = list(self._number_re.finditer(got)) + if len(wants) != len(gots): + return got + offset = 0 + for w, g in zip(wants, gots): + fraction: Optional[str] = w.group("fraction") + exponent: Optional[str] = w.group("exponent1") + if exponent is None: + exponent = w.group("exponent2") + precision = 0 if fraction is None else len(fraction) + if exponent is not None: + precision -= int(exponent) + if float(w.group()) == approx(float(g.group()), abs=10 ** -precision): + # They're close enough. Replace the text we actually + # got with the text we want, so that it will match when we + # check the string literally. + got = ( + got[: g.start() + offset] + w.group() + got[g.end() + offset :] + ) + offset += w.end() - w.start() - (g.end() - g.start()) + return got + + return LiteralsOutputChecker + + +def _get_checker() -> "IPDoctestOutputChecker": + """Return a IPDoctestOutputChecker subclass that supports some + additional options: + + * ALLOW_UNICODE and ALLOW_BYTES options to ignore u'' and b'' + prefixes (respectively) in string literals. Useful when the same + ipdoctest should run in Python 2 and Python 3. + + * NUMBER to ignore floating-point differences smaller than the + precision of the literal number in the ipdoctest. + + An inner class is used to avoid importing "ipdoctest" at the module + level. + """ + global CHECKER_CLASS + if CHECKER_CLASS is None: + CHECKER_CLASS = _init_checker_class() + return CHECKER_CLASS() + + +def _get_allow_unicode_flag() -> int: + """Register and return the ALLOW_UNICODE flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_UNICODE") + + +def _get_allow_bytes_flag() -> int: + """Register and return the ALLOW_BYTES flag.""" + import doctest + + return doctest.register_optionflag("ALLOW_BYTES") + + +def _get_number_flag() -> int: + """Register and return the NUMBER flag.""" + import doctest + + return doctest.register_optionflag("NUMBER") + + +def _get_report_choice(key: str) -> int: + """Return the actual `ipdoctest` module flag value. + + We want to do it as late as possible to avoid importing `ipdoctest` and all + its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +@pytest.fixture(scope="session") +def ipdoctest_namespace() -> Dict[str, Any]: + """Fixture that returns a :py:class:`dict` that will be injected into the + namespace of ipdoctests.""" + return dict() diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py b/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py deleted file mode 100644 index b2c70adfc1..0000000000 --- a/contrib/python/ipython/py3/IPython/testing/plugin/show_refs.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Simple script to show reference holding behavior. - -This is used by a companion test case. -""" - -import gc - -class C(object): - def __del__(self): - pass - #print 'deleting object...' # dbg - -if __name__ == '__main__': - c = C() - - c_refs = gc.get_referrers(c) - ref_ids = list(map(id,c_refs)) - - print('c referrers:',list(map(type,c_refs))) diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/simple.py b/contrib/python/ipython/py3/IPython/testing/plugin/simple.py index 3861977cab..35fbfd2fbd 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/simple.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/simple.py @@ -1,7 +1,7 @@ """Simple example using doctests. This file just contains doctests both using plain python and IPython prompts. -All tests should be loaded by nose. +All tests should be loaded by Pytest. """ def pyfunc(): @@ -24,10 +24,21 @@ def pyfunc(): return 'pyfunc' -def ipyfunc2(): - """Some pure python tests... +def ipyfunc(): + """Some IPython tests... + + In [1]: ipyfunc() + Out[1]: 'ipyfunc' + + In [2]: import os + + In [3]: 2+3 + Out[3]: 5 - >>> 1+1 - 2 + In [4]: for i in range(3): + ...: print(i, end=' ') + ...: print(i+1, end=' ') + ...: + Out[4]: 0 1 1 2 2 3 """ - return 'pyfunc2' + return "ipyfunc" diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt b/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt index 8afcbfdf7d..96b1eae19f 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_exampleip.txt @@ -21,7 +21,7 @@ Another example:: Just like in IPython docstrings, you can use all IPython syntax and features:: - In [9]: !echo "hello" + In [9]: !echo hello hello In [10]: a='hi' diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py b/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py index d8f5991636..2686172bb2 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_ipdoctest.py @@ -74,3 +74,19 @@ def doctest_multiline3(): In [15]: h(0) Out[15]: -1 """ + + +def doctest_builtin_underscore(): + """Defining builtins._ should not break anything outside the doctest + while also should be working as expected inside the doctest. + + In [1]: import builtins + + In [2]: builtins._ = 42 + + In [3]: builtins._ + Out[3]: 42 + + In [4]: _ + Out[4]: 42 + """ diff --git a/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py b/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py index bd7ad8fb3e..b92448be07 100644 --- a/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py +++ b/contrib/python/ipython/py3/IPython/testing/plugin/test_refs.py @@ -37,10 +37,3 @@ def doctest_ivars(): In [6]: zz Out[6]: 1 """ - -def doctest_refs(): - """DocTest reference holding issues when running scripts. - - In [32]: run show_refs.py - c referrers: [<... 'dict'>] - """ diff --git a/contrib/python/ipython/py3/IPython/testing/skipdoctest.py b/contrib/python/ipython/py3/IPython/testing/skipdoctest.py index b0cf83c449..f440ea14b2 100644 --- a/contrib/python/ipython/py3/IPython/testing/skipdoctest.py +++ b/contrib/python/ipython/py3/IPython/testing/skipdoctest.py @@ -15,5 +15,5 @@ def skip_doctest(f): This decorator allows you to mark a function whose docstring you wish to omit from testing, while preserving the docstring for introspection, help, etc.""" - f.skip_doctest = True + f.__skip_doctest__ = True return f diff --git a/contrib/python/ipython/py3/IPython/testing/tools.py b/contrib/python/ipython/py3/IPython/testing/tools.py index e7e7285f49..2ff63a6d4a 100644 --- a/contrib/python/ipython/py3/IPython/testing/tools.py +++ b/contrib/python/ipython/py3/IPython/testing/tools.py @@ -10,6 +10,7 @@ Authors # Distributed under the terms of the Modified BSD License. import os +from pathlib import Path import re import sys import tempfile @@ -20,15 +21,6 @@ from io import StringIO from subprocess import Popen, PIPE from unittest.mock import patch -try: - # These tools are used by parts of the runtime, so we make the nose - # dependency optional at this point. Nose is a hard dependency to run the - # test suite, but NOT to use ipython itself. - import nose.tools as nt - has_nose = True -except ImportError: - has_nose = False - from traitlets.config.loader import Config from IPython.utils.process import get_output_error_code from IPython.utils.text import list_strings @@ -142,7 +134,7 @@ def default_config(): config.TerminalTerminalInteractiveShell.term_title = False, config.TerminalInteractiveShell.autocall = 0 f = tempfile.NamedTemporaryFile(suffix=u'test_hist.sqlite', delete=False) - config.HistoryManager.hist_file = f.name + config.HistoryManager.hist_file = Path(f.name) f.close() config.HistoryManager.db_cache_size = 10000 return config @@ -176,7 +168,7 @@ def ipexec(fname, options=None, commands=()): Parameters ---------- - fname : str + fname : str, Path Name of file to be executed (should have .py or .ipy extension). options : optional, list @@ -189,7 +181,10 @@ def ipexec(fname, options=None, commands=()): ------- ``(stdout, stderr)`` of ipython subprocess. """ - if options is None: options = [] + __tracebackhide__ = True + + if options is None: + options = [] cmdargs = default_argv() + options @@ -204,6 +199,8 @@ def ipexec(fname, options=None, commands=()): # should we keep suppressing warnings here, even after removing shims? env['PYTHONWARNINGS'] = 'ignore' # env.pop('PYTHONWARNINGS', None) # Avoid extraneous warnings appearing on stderr + # Prevent coloring under PyCharm ("\x1b[0m" at the end of the stdout) + env.pop("PYCHARM_HOSTED", None) for k, v in env.items(): # Debug a bizarre failure we've seen on Windows: # TypeError: environment can only contain strings @@ -229,7 +226,7 @@ def ipexec_validate(fname, expected_out, expected_err='', Parameters ---------- - fname : str + fname : str, Path Name of the file to be executed (should have .py or .ipy extension). expected_out : str @@ -245,8 +242,7 @@ def ipexec_validate(fname, expected_out, expected_err='', ------- None """ - - import nose.tools as nt + __tracebackhide__ = True out, err = ipexec(fname, options, commands) #print 'OUT', out # dbg @@ -255,12 +251,16 @@ def ipexec_validate(fname, expected_out, expected_err='', # more informative than simply having an empty stdout. if err: if expected_err: - nt.assert_equal("\n".join(err.strip().splitlines()), "\n".join(expected_err.strip().splitlines())) + assert "\n".join(err.strip().splitlines()) == "\n".join( + expected_err.strip().splitlines() + ) else: raise ValueError('Running file %r produced error: %r' % (fname, err)) # If no errors or output on stderr was expected, match stdout - nt.assert_equal("\n".join(out.strip().splitlines()), "\n".join(expected_out.strip().splitlines())) + assert "\n".join(out.strip().splitlines()) == "\n".join( + expected_out.strip().splitlines() + ) class TempFileMixin(unittest.TestCase): @@ -320,6 +320,8 @@ def check_pairs(func, pairs): None. Raises an AssertionError if any output does not match the expected value. """ + __tracebackhide__ = True + name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>")) for inp, expected in pairs: out = func(inp) @@ -362,6 +364,8 @@ class AssertPrints(object): setattr(sys, self.channel, self.buffer if self.suppress else self.tee) def __exit__(self, etype, value, traceback): + __tracebackhide__ = True + try: if value is not None: # If an error was raised, don't check anything else @@ -389,6 +393,8 @@ class AssertNotPrints(AssertPrints): Counterpart of AssertPrints""" def __exit__(self, etype, value, traceback): + __tracebackhide__ = True + try: if value is not None: # If an error was raised, don't check anything else @@ -420,9 +426,8 @@ def mute_warn(): @contextmanager def make_tempfile(name): - """ Create an empty, named, temporary file for the duration of the context. - """ - open(name, 'w').close() + """Create an empty, named, temporary file for the duration of the context.""" + open(name, "w", encoding="utf-8").close() try: yield finally: @@ -443,8 +448,8 @@ def fake_input(inputs): def mock_input(prompt=''): try: return next(it) - except StopIteration: - raise EOFError('No more inputs given') + except StopIteration as e: + raise EOFError('No more inputs given') from e return patch('builtins.input', mock_input) @@ -452,10 +457,10 @@ def help_output_test(subcommand=''): """test that `ipython [subcommand] -h` works""" cmd = get_ipython_cmd() + [subcommand, '-h'] out, err, rc = get_output_error_code(cmd) - nt.assert_equal(rc, 0, err) - nt.assert_not_in("Traceback", err) - nt.assert_in("Options", out) - nt.assert_in("--help-all", out) + assert rc == 0, err + assert "Traceback" not in err + assert "Options" in out + assert "--help-all" in out return out, err @@ -463,9 +468,9 @@ def help_all_output_test(subcommand=''): """test that `ipython [subcommand] --help-all` works""" cmd = get_ipython_cmd() + [subcommand, '--help-all'] out, err, rc = get_output_error_code(cmd) - nt.assert_equal(rc, 0, err) - nt.assert_not_in("Traceback", err) - nt.assert_in("Options", out) - nt.assert_in("Class", out) + assert rc == 0, err + assert "Traceback" not in err + assert "Options" in out + assert "Class" in out return out, err diff --git a/contrib/python/ipython/py3/IPython/utils/_process_cli.py b/contrib/python/ipython/py3/IPython/utils/_process_cli.py index 89a31c3164..86e918a8d0 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_cli.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_cli.py @@ -19,17 +19,8 @@ import System import os # Import IPython libraries: -from IPython.utils import py3compat from ._process_common import arg_split -def _find_cmd(cmd): - """Find the full path to a command using which.""" - paths = System.Environment.GetEnvironmentVariable("PATH").Split(os.pathsep) - for path in paths: - filename = os.path.join(path, cmd) - if System.IO.File.Exists(filename): - return py3compat.decode(filename) - raise OSError("command %r not found" % cmd) def system(cmd): """ diff --git a/contrib/python/ipython/py3/IPython/utils/_process_common.py b/contrib/python/ipython/py3/IPython/utils/_process_common.py index 2a647dc7fa..2a0b828839 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_common.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_common.py @@ -49,18 +49,16 @@ def process_handler(cmd, callback, stderr=subprocess.PIPE): Parameters ---------- cmd : str or list - A command to be executed by the system, using :class:`subprocess.Popen`. - If a string is passed, it will be run in the system shell. If a list is - passed, it will be used directly as arguments. - + A command to be executed by the system, using :class:`subprocess.Popen`. + If a string is passed, it will be run in the system shell. If a list is + passed, it will be used directly as arguments. callback : callable - A one-argument function that will be called with the Popen object. - + A one-argument function that will be called with the Popen object. stderr : file descriptor number, optional - By default this is set to ``subprocess.PIPE``, but you can also pass the - value ``subprocess.STDOUT`` to force the subprocess' stderr to go into - the same file descriptor as its stdout. This is useful to read stdout - and stderr combined in the order they are generated. + By default this is set to ``subprocess.PIPE``, but you can also pass the + value ``subprocess.STDOUT`` to force the subprocess' stderr to go into + the same file descriptor as its stdout. This is useful to read stdout + and stderr combined in the order they are generated. Returns ------- @@ -117,12 +115,12 @@ def getoutput(cmd): Parameters ---------- cmd : str or list - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- output : str - A string containing the combination of stdout and stderr from the + A string containing the combination of stdout and stderr from the subprocess, in whatever order the subprocess originally wrote to its file descriptors (so the order of the information in this string is the correct order as would be seen if running the command in a terminal). @@ -141,7 +139,7 @@ def getoutputerror(cmd): Parameters ---------- cmd : str or list - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- @@ -159,7 +157,7 @@ def get_output_error_code(cmd): Parameters ---------- cmd : str or list - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- diff --git a/contrib/python/ipython/py3/IPython/utils/_process_posix.py b/contrib/python/ipython/py3/IPython/utils/_process_posix.py index a11cad7697..59b5c23896 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_posix.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_posix.py @@ -24,21 +24,12 @@ import pexpect # Our own from ._process_common import getoutput, arg_split -from IPython.utils import py3compat from IPython.utils.encoding import DEFAULT_ENCODING #----------------------------------------------------------------------------- # Function definitions #----------------------------------------------------------------------------- -def _find_cmd(cmd): - """Find the full path to a command using which.""" - - path = sp.Popen(['/usr/bin/env', 'which', cmd], - stdout=sp.PIPE, stderr=sp.PIPE).communicate()[0] - return py3compat.decode(path) - - class ProcessHandler(object): """Execute subprocesses under the control of pexpect. """ @@ -82,12 +73,12 @@ class ProcessHandler(object): Parameters ---------- cmd : str - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- output : str - A string containing the combination of stdout and stderr from the + A string containing the combination of stdout and stderr from the subprocess, in whatever order the subprocess originally wrote to its file descriptors (so the order of the information in this string is the correct order as would be seen if running the command in a terminal). @@ -103,12 +94,12 @@ class ProcessHandler(object): Parameters ---------- cmd : str - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- output : str - A string containing the combination of stdout and stderr from the + A string containing the combination of stdout and stderr from the subprocess, in whatever order the subprocess originally wrote to its file descriptors (so the order of the information in this string is the correct order as would be seen if running the command in a terminal). @@ -124,7 +115,7 @@ class ProcessHandler(object): Parameters ---------- cmd : str - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- diff --git a/contrib/python/ipython/py3/IPython/utils/_process_win32.py b/contrib/python/ipython/py3/IPython/utils/_process_win32.py index 6d05bdaa12..36fb092d7b 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_win32.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_win32.py @@ -71,27 +71,6 @@ class AvoidUNCPath(object): os.chdir(self.path) -def _find_cmd(cmd): - """Find the full path to a .bat or .exe using the win32api module.""" - try: - from win32api import SearchPath - except ImportError: - raise ImportError('you need to have pywin32 installed for this to work') - else: - PATH = os.environ['PATH'] - extensions = ['.exe', '.com', '.bat', '.py'] - path = None - for ext in extensions: - try: - path = SearchPath(PATH, cmd, ext)[0] - except: - pass - if path is None: - raise OSError("command %r not found" % cmd) - else: - return path - - def _system_body(p): """Callback for _system.""" enc = DEFAULT_ENCODING @@ -128,7 +107,7 @@ def system(cmd): Parameters ---------- cmd : str or list - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- @@ -152,7 +131,7 @@ def getoutput(cmd): Parameters ---------- cmd : str or list - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- @@ -181,7 +160,7 @@ try: This is a special version for windows that use a ctypes call to CommandLineToArgvW to do the argv splitting. The posix parameter is ignored. - + If strict=False, process_common.arg_split(...strict=False) is used instead. """ #CommandLineToArgvW returns path to executable if called with empty string. diff --git a/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py b/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py index c2e2329c45..f8c2a057a8 100644 --- a/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py +++ b/contrib/python/ipython/py3/IPython/utils/_process_win32_controller.py @@ -551,13 +551,13 @@ def system(cmd): Parameters ---------- cmd : str - A command to be executed in the system shell. + A command to be executed in the system shell. Returns ------- None : we explicitly do NOT return the subprocess status code, as this utility is meant to be used extensively in IPython, where any return value - would trigger :func:`sys.displayhook` calls. + would trigger : func:`sys.displayhook` calls. """ with AvoidUNCPath() as path: if path is not None: diff --git a/contrib/python/ipython/py3/IPython/utils/_sysinfo.py b/contrib/python/ipython/py3/IPython/utils/_sysinfo.py index 6c996d02d4..65cb32d0d5 100644 --- a/contrib/python/ipython/py3/IPython/utils/_sysinfo.py +++ b/contrib/python/ipython/py3/IPython/utils/_sysinfo.py @@ -1,2 +1,2 @@ # GENERATED BY setup.py -commit = u"fd4cac190" +commit = u"55e81b920" diff --git a/contrib/python/ipython/py3/IPython/utils/coloransi.py b/contrib/python/ipython/py3/IPython/utils/coloransi.py index bc8e8377f7..e331421802 100644 --- a/contrib/python/ipython/py3/IPython/utils/coloransi.py +++ b/contrib/python/ipython/py3/IPython/utils/coloransi.py @@ -176,9 +176,9 @@ class ColorSchemeTable(dict): scheme_test = scheme.lower() try: scheme_idx = valid_schemes.index(scheme_test) - except ValueError: + except ValueError as e: raise ValueError('Unrecognized color scheme: ' + scheme + \ - '\nValid schemes: '+str(scheme_names).replace("'', ",'')) + '\nValid schemes: '+str(scheme_names).replace("'', ",'')) from e else: active = scheme_names[scheme_idx] self.active_scheme_name = active diff --git a/contrib/python/ipython/py3/IPython/utils/contexts.py b/contrib/python/ipython/py3/IPython/utils/contexts.py index 4d379b0eda..7f95d4419d 100644 --- a/contrib/python/ipython/py3/IPython/utils/contexts.py +++ b/contrib/python/ipython/py3/IPython/utils/contexts.py @@ -58,17 +58,3 @@ class preserve_keys(object): for k in self.to_delete: d.pop(k, None) d.update(self.to_update) - - -class NoOpContext(object): - """ - Deprecated - - Context manager that does nothing.""" - - def __init__(self): - warnings.warn("""NoOpContext is deprecated since IPython 5.0 """, - DeprecationWarning, stacklevel=2) - - def __enter__(self): pass - def __exit__(self, type, value, traceback): pass diff --git a/contrib/python/ipython/py3/IPython/utils/decorators.py b/contrib/python/ipython/py3/IPython/utils/decorators.py index c26485553c..47791d7ca6 100644 --- a/contrib/python/ipython/py3/IPython/utils/decorators.py +++ b/contrib/python/ipython/py3/IPython/utils/decorators.py @@ -50,7 +50,7 @@ def flag_calls(func): def undoc(func): """Mark a function or class as undocumented. - + This is found by inspecting the AST, so for now it must be used directly as @undoc, not as e.g. @decorators.undoc """ diff --git a/contrib/python/ipython/py3/IPython/utils/encoding.py b/contrib/python/ipython/py3/IPython/utils/encoding.py index 69a319ef0e..651ee0c0b5 100644 --- a/contrib/python/ipython/py3/IPython/utils/encoding.py +++ b/contrib/python/ipython/py3/IPython/utils/encoding.py @@ -37,10 +37,10 @@ def get_stream_enc(stream, default=None): # won't need to make changes all over IPython. def getdefaultencoding(prefer_stream=True): """Return IPython's guess for the default encoding for bytes as text. - + If prefer_stream is True (default), asks for stdin.encoding first, to match the calling Terminal, but that is often None for subprocesses. - + Then fall back on locale.getpreferredencoding(), which should be a sensible platform default (that respects LANG environment), and finally to sys.getdefaultencoding() which is the most conservative option, diff --git a/contrib/python/ipython/py3/IPython/utils/frame.py b/contrib/python/ipython/py3/IPython/utils/frame.py index 74c6d4197f..808906bda8 100644 --- a/contrib/python/ipython/py3/IPython/utils/frame.py +++ b/contrib/python/ipython/py3/IPython/utils/frame.py @@ -28,12 +28,10 @@ def extract_vars(*names,**kw): *names : str One or more variable names which will be extracted from the caller's frame. - - depth : integer, optional + **kw : integer, optional How many frames in the stack to walk when looking for your variables. The default is 0, which will use the frame where the call was made. - Examples -------- :: diff --git a/contrib/python/ipython/py3/IPython/utils/generics.py b/contrib/python/ipython/py3/IPython/utils/generics.py index fcada6f44d..3626ca4cc7 100644 --- a/contrib/python/ipython/py3/IPython/utils/generics.py +++ b/contrib/python/ipython/py3/IPython/utils/generics.py @@ -22,7 +22,6 @@ def complete_object(obj, prev_completions): The object to complete. prev_completions : list List of attributes discovered so far. - This should return the list of attributes in obj. If you only wish to add to the attributes already discovered normally, return own_attrs + prev_completions. diff --git a/contrib/python/ipython/py3/IPython/utils/importstring.py b/contrib/python/ipython/py3/IPython/utils/importstring.py index c8e1840eb3..51bfc7b569 100644 --- a/contrib/python/ipython/py3/IPython/utils/importstring.py +++ b/contrib/python/ipython/py3/IPython/utils/importstring.py @@ -16,12 +16,12 @@ def import_item(name): Parameters ---------- name : string - The fully qualified name of the module/package being imported. + The fully qualified name of the module/package being imported. Returns ------- mod : module object - The module that was imported. + The module that was imported. """ parts = name.rsplit('.', 1) @@ -31,8 +31,8 @@ def import_item(name): module = __import__(package, fromlist=[obj]) try: pak = getattr(module, obj) - except AttributeError: - raise ImportError('No module named %s' % obj) + except AttributeError as e: + raise ImportError('No module named %s' % obj) from e return pak else: # called with un-dotted string diff --git a/contrib/python/ipython/py3/IPython/utils/io.py b/contrib/python/ipython/py3/IPython/utils/io.py index fab9bae797..170bc625ac 100644 --- a/contrib/python/ipython/py3/IPython/utils/io.py +++ b/contrib/python/ipython/py3/IPython/utils/io.py @@ -13,87 +13,16 @@ import os import sys import tempfile import warnings +from pathlib import Path from warnings import warn from IPython.utils.decorators import undoc from .capture import CapturedIO, capture_output -@undoc -class IOStream: - - def __init__(self, stream, fallback=None): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - if not hasattr(stream,'write') or not hasattr(stream,'flush'): - if fallback is not None: - stream = fallback - else: - raise ValueError("fallback required, but not specified") - self.stream = stream - self._swrite = stream.write - - # clone all methods not overridden: - def clone(meth): - return not hasattr(self, meth) and not meth.startswith('_') - for meth in filter(clone, dir(stream)): - try: - val = getattr(stream, meth) - except AttributeError: - pass - else: - setattr(self, meth, val) - - def __repr__(self): - cls = self.__class__ - tpl = '{mod}.{cls}({args})' - return tpl.format(mod=cls.__module__, cls=cls.__name__, args=self.stream) - - def write(self,data): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - try: - self._swrite(data) - except: - try: - # print handles some unicode issues which may trip a plain - # write() call. Emulate write() by using an empty end - # argument. - print(data, end='', file=self.stream) - except: - # if we get here, something is seriously broken. - print('ERROR - failed to write data to stream:', self.stream, - file=sys.stderr) - - def writelines(self, lines): - warn('IOStream is deprecated since IPython 5.0, use sys.{stdin,stdout,stderr} instead', - DeprecationWarning, stacklevel=2) - if isinstance(lines, str): - lines = [lines] - for line in lines: - self.write(line) - - # This class used to have a writeln method, but regular files and streams - # in Python don't have this method. We need to keep this completely - # compatible so we removed it. - - @property - def closed(self): - return self.stream.closed - - def close(self): - pass - # setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr -devnull = open(os.devnull, 'w') +devnull = open(os.devnull, "w", encoding="utf-8") atexit.register(devnull.close) -# io.std* are deprecated, but don't show our own deprecation warnings -# during initialization of the deprecated API. -with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - stdin = IOStream(sys.stdin, fallback=devnull) - stdout = IOStream(sys.stdout, fallback=devnull) - stderr = IOStream(sys.stderr, fallback=devnull) class Tee(object): """A class to duplicate an output stream to stdout/err. @@ -112,11 +41,9 @@ class Tee(object): Parameters ---------- file_or_name : filename or open filehandle (writable) - File that will be duplicated - + File that will be duplicated mode : optional, valid mode for open(). - If a filename was give, open with this mode. - + If a filename was give, open with this mode. channel : str, one of ['stdout', 'stderr'] """ if channel not in ['stdout', 'stderr']: @@ -125,7 +52,8 @@ class Tee(object): if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'): self.file = file_or_name else: - self.file = open(file_or_name, mode) + encoding = None if "b" in mode else "utf-8" + self.file = open(file_or_name, mode, encoding=encoding) self.channel = channel self.ostream = getattr(sys, channel) setattr(sys, channel, self) @@ -194,28 +122,21 @@ def temp_pyfile(src, ext='.py'): Parameters ---------- src : string or list of strings (no need for ending newlines if list) - Source code to be written to the file. - + Source code to be written to the file. ext : optional, string - Extension for the generated file. + Extension for the generated file. Returns ------- (filename, open filehandle) - It is the caller's responsibility to close the open file and unlink it. + It is the caller's responsibility to close the open file and unlink it. """ fname = tempfile.mkstemp(ext)[1] - with open(fname,'w') as f: + with open(Path(fname), "w", encoding="utf-8") as f: f.write(src) f.flush() return fname -@undoc -def atomic_writing(*args, **kwargs): - """DEPRECATED: moved to notebook.services.contents.fileio""" - warn("IPython.utils.io.atomic_writing has moved to notebook.services.contents.fileio since IPython 4.0", DeprecationWarning, stacklevel=2) - from notebook.services.contents.fileio import atomic_writing - return atomic_writing(*args, **kwargs) @undoc def raw_print(*args, **kw): @@ -234,15 +155,3 @@ def raw_print_err(*args, **kw): print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'), file=sys.__stderr__) sys.__stderr__.flush() - -# used by IPykernel <- 4.9. Removed during IPython 7-dev period and re-added -# Keep for a version or two then should remove -rprint = raw_print -rprinte = raw_print_err - -@undoc -def unicode_std_stream(stream='stdout'): - """DEPRECATED, moved to nbconvert.utils.io""" - warn("IPython.utils.io.unicode_std_stream has moved to nbconvert.utils.io since IPython 4.0", DeprecationWarning, stacklevel=2) - from nbconvert.utils.io import unicode_std_stream - return unicode_std_stream(stream) diff --git a/contrib/python/ipython/py3/IPython/utils/ipstruct.py b/contrib/python/ipython/py3/IPython/utils/ipstruct.py index e2b3e8fa4c..ed112101a3 100644 --- a/contrib/python/ipython/py3/IPython/utils/ipstruct.py +++ b/contrib/python/ipython/py3/IPython/utils/ipstruct.py @@ -43,14 +43,13 @@ class Struct(dict): Parameters ---------- - args : dict, Struct + *args : dict, Struct Initialize with one dict or Struct - kw : dict + **kw : dict Initialize with key, value pairs. Examples -------- - >>> s = Struct(a=10,b=30) >>> s.a 10 @@ -68,7 +67,6 @@ class Struct(dict): Examples -------- - >>> s = Struct() >>> s['a'] = 10 >>> s.allow_new_attr(False) @@ -95,7 +93,6 @@ class Struct(dict): Examples -------- - >>> s = Struct() >>> s.a = 10 >>> s.a @@ -120,7 +117,7 @@ class Struct(dict): try: self.__setitem__(key, value) except KeyError as e: - raise AttributeError(e) + raise AttributeError(e) from e def __getattr__(self, key): """Get an attr by calling :meth:`dict.__getitem__`. @@ -130,12 +127,11 @@ class Struct(dict): Examples -------- - >>> s = Struct(a=10) >>> s.a 10 >>> type(s.get) - <... 'builtin_function_or_method'> + <...method'> >>> try: ... s.b ... except AttributeError: @@ -145,8 +141,8 @@ class Struct(dict): """ try: result = self[key] - except KeyError: - raise AttributeError(key) + except KeyError as e: + raise AttributeError(key) from e else: return result @@ -155,7 +151,6 @@ class Struct(dict): Examples -------- - >>> s = Struct(a=10,b=30) >>> s2 = Struct(a=20,c=40) >>> s += s2 @@ -170,7 +165,6 @@ class Struct(dict): Examples -------- - >>> s1 = Struct(a=10,b=30) >>> s2 = Struct(a=20,c=40) >>> s = s1 + s2 @@ -186,7 +180,6 @@ class Struct(dict): Examples -------- - >>> s1 = Struct(a=10,b=30) >>> s2 = Struct(a=40) >>> s = s1 - s2 @@ -202,7 +195,6 @@ class Struct(dict): Examples -------- - >>> s1 = Struct(a=10,b=30) >>> s2 = Struct(a=40) >>> s1 -= s2 @@ -236,7 +228,6 @@ class Struct(dict): Examples -------- - >>> s = Struct(a=10,b=30) >>> s2 = s.copy() >>> type(s2) is Struct @@ -251,7 +242,6 @@ class Struct(dict): Examples -------- - >>> s = Struct(a=10) >>> s.hasattr('a') True @@ -284,7 +274,7 @@ class Struct(dict): Parameters ---------- - __loc_data : dict, Struct + __loc_data__ : dict, Struct The data to merge into self __conflict_solve : dict The conflict policy dict. The keys are binary functions used to @@ -292,12 +282,11 @@ class Struct(dict): the keys the conflict resolution function applies to. Instead of a list of strings a space separated string can be used, like 'a b c'. - kw : dict + **kw : dict Additional key, value pairs to merge in Notes ----- - The `__conflict_solve` dict is a dictionary of binary functions which will be used to solve key conflicts. Here is an example:: @@ -338,7 +327,6 @@ class Struct(dict): Examples -------- - This show the default policy: >>> s = Struct(a=10,b=30) diff --git a/contrib/python/ipython/py3/IPython/utils/module_paths.py b/contrib/python/ipython/py3/IPython/utils/module_paths.py index 0570c322e6..f9f7cacc33 100644 --- a/contrib/python/ipython/py3/IPython/utils/module_paths.py +++ b/contrib/python/ipython/py3/IPython/utils/module_paths.py @@ -2,8 +2,6 @@ Utility functions for finding modules on sys.path. -`find_module` returns a path to module or None, given certain conditions. - """ #----------------------------------------------------------------------------- # Copyright (c) 2011, the IPython Development Team. @@ -20,6 +18,7 @@ Utility functions for finding modules on sys.path. # Stdlib imports import importlib import os +import sys # Third-party imports @@ -42,7 +41,7 @@ def find_mod(module_name): """ Find module `module_name` on sys.path, and return the path to module `module_name`. - - If `module_name` refers to a module directory, then return path to __init__ file. + - If `module_name` refers to a module directory, then return path to __init__ file. - If `module_name` is a directory without an __init__file, return None. - If module is missing or does not have a `.py` or `.pyw` extension, return None. - Note that we are not interested in running bytecode. @@ -51,16 +50,18 @@ def find_mod(module_name): Parameters ---------- module_name : str - + Returns ------- module_path : str Path to module `module_name`, its __init__.py, or None, depending on above conditions. """ - loader = importlib.util.find_spec(module_name) - module_path = loader.origin + spec = importlib.util.find_spec(module_name) + module_path = spec.origin if module_path is None: + if spec.loader in sys.meta_path: + return spec.loader return None else: split_path = module_path.split(".") diff --git a/contrib/python/ipython/py3/IPython/utils/openpy.py b/contrib/python/ipython/py3/IPython/utils/openpy.py index c90d2b53a3..297a762c7d 100644 --- a/contrib/python/ipython/py3/IPython/utils/openpy.py +++ b/contrib/python/ipython/py3/IPython/utils/openpy.py @@ -7,6 +7,7 @@ Much of the code is taken from the tokenize module in Python 3.2. import io from io import TextIOWrapper, BytesIO +from pathlib import Path import re from tokenize import open, detect_encoding @@ -59,20 +60,21 @@ def strip_encoding_cookie(filelike): def read_py_file(filename, skip_encoding_cookie=True): """Read a Python file, using the encoding declared inside the file. - + Parameters ---------- filename : str - The path to the file to read. + The path to the file to read. skip_encoding_cookie : bool - If True (the default), and the encoding declaration is found in the first - two lines, that line will be excluded from the output. - + If True (the default), and the encoding declaration is found in the first + two lines, that line will be excluded from the output. + Returns ------- A unicode string containing the contents of the file. """ - with open(filename) as f: # the open function defined in this module. + filepath = Path(filename) + with open(filepath) as f: # the open function defined in this module. if skip_encoding_cookie: return "".join(strip_encoding_cookie(f)) else: @@ -80,18 +82,18 @@ def read_py_file(filename, skip_encoding_cookie=True): def read_py_url(url, errors='replace', skip_encoding_cookie=True): """Read a Python file from a URL, using the encoding declared inside the file. - + Parameters ---------- url : str - The URL from which to fetch the file. + The URL from which to fetch the file. errors : str - How to handle decoding errors in the file. Options are the same as for - bytes.decode(), but here 'replace' is the default. + How to handle decoding errors in the file. Options are the same as for + bytes.decode(), but here 'replace' is the default. skip_encoding_cookie : bool - If True (the default), and the encoding declaration is found in the first - two lines, that line will be excluded from the output. - + If True (the default), and the encoding declaration is found in the first + two lines, that line will be excluded from the output. + Returns ------- A unicode string containing the contents of the file. diff --git a/contrib/python/ipython/py3/IPython/utils/path.py b/contrib/python/ipython/py3/IPython/utils/path.py index 0fb6144e19..3db33e4c43 100644 --- a/contrib/python/ipython/py3/IPython/utils/path.py +++ b/contrib/python/ipython/py3/IPython/utils/path.py @@ -33,14 +33,14 @@ if sys.platform == 'win32': Examples -------- - >>> get_long_path_name('c:\\docume~1') + >>> get_long_path_name('c:\\\\docume~1') 'c:\\\\Documents and Settings' """ try: import ctypes - except ImportError: - raise ImportError('you need to have ctypes installed for this to work') + except ImportError as e: + raise ImportError('you need to have ctypes installed for this to work') from e _GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW _GetLongPathName.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint ] @@ -67,20 +67,6 @@ def get_long_path_name(path): return _get_long_path_name(path) -def unquote_filename(name, win32=(sys.platform=='win32')): - """ On Windows, remove leading and trailing quotes from filenames. - - This function has been deprecated and should not be used any more: - unquoting is now taken care of by :func:`IPython.utils.process.arg_split`. - """ - warn("'unquote_filename' is deprecated since IPython 5.0 and should not " - "be used anymore", DeprecationWarning, stacklevel=2) - if win32: - if name.startswith(("'", '"')) and name.endswith(("'", '"')): - name = name[1:-1] - return name - - def compress_user(path): """Reverse of :func:`os.path.expanduser` """ @@ -89,7 +75,7 @@ def compress_user(path): path = "~" + path[len(home):] return path -def get_py_filename(name, force_win32=None): +def get_py_filename(name): """Return a valid python filename in the current directory. If the given name is not a file, it adds '.py' and searches again. @@ -97,10 +83,6 @@ def get_py_filename(name, force_win32=None): """ name = os.path.expanduser(name) - if force_win32 is not None: - warn("The 'force_win32' argument to 'get_py_filename' is deprecated " - "since IPython 5.0 and should not be used anymore", - DeprecationWarning, stacklevel=2) if not os.path.isfile(name) and not name.endswith('.py'): name += '.py' if os.path.isfile(name): @@ -109,7 +91,7 @@ def get_py_filename(name, force_win32=None): raise IOError('File `%r` not found.' % name) -def filefind(filename, path_dirs=None): +def filefind(filename: str, path_dirs=None) -> str: """Find a file by looking through a sequence of paths. This iterates through a sequence of paths looking for a file and returns @@ -139,7 +121,12 @@ def filefind(filename, path_dirs=None): Returns ------- - Raises :exc:`IOError` or returns absolute path to file. + path : str + returns absolute path to file. + + Raises + ------ + IOError """ # If paths are quoted, abspath gets confused, strip them... @@ -178,7 +165,6 @@ def get_home_dir(require_writable=False) -> str: Parameters ---------- - require_writable : bool [default: False] if True: guarantees the return value is a writable directory, otherwise @@ -205,7 +191,7 @@ def get_home_dir(require_writable=False) -> str: pass if (not require_writable) or _writable_dir(homedir): - assert isinstance(homedir, str), "Homedir shoudl be unicode not bytes" + assert isinstance(homedir, str), "Homedir should be unicode not bytes" return homedir else: raise HomeDirError('%s is not a writable dir, ' @@ -219,7 +205,7 @@ def get_xdg_dir(): env = os.environ - if os.name == 'posix' and sys.platform != 'darwin': + if os.name == "posix": # Linux, Unix, AIX, etc. # use ~/.config if empty OR not set xdg = env.get("XDG_CONFIG_HOME", None) or os.path.join(get_home_dir(), '.config') @@ -238,7 +224,7 @@ def get_xdg_cache_dir(): env = os.environ - if os.name == 'posix' and sys.platform != 'darwin': + if os.name == "posix": # Linux, Unix, AIX, etc. # use ~/.cache if empty OR not set xdg = env.get("XDG_CACHE_HOME", None) or os.path.join(get_home_dir(), '.cache') @@ -249,36 +235,6 @@ def get_xdg_cache_dir(): return None -@undoc -def get_ipython_dir(): - warn("get_ipython_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_dir - return get_ipython_dir() - -@undoc -def get_ipython_cache_dir(): - warn("get_ipython_cache_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_cache_dir - return get_ipython_cache_dir() - -@undoc -def get_ipython_package_dir(): - warn("get_ipython_package_dir has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_package_dir - return get_ipython_package_dir() - -@undoc -def get_ipython_module_path(module_str): - warn("get_ipython_module_path has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import get_ipython_module_path - return get_ipython_module_path(module_str) - -@undoc -def locate_profile(profile='default'): - warn("locate_profile has moved to the IPython.paths module since IPython 4.0.", DeprecationWarning, stacklevel=2) - from IPython.paths import locate_profile - return locate_profile(profile=profile) - def expand_path(s): """Expand $VARS and ~names in a string, like a shell diff --git a/contrib/python/ipython/py3/IPython/utils/pickleutil.py b/contrib/python/ipython/py3/IPython/utils/pickleutil.py deleted file mode 100644 index 785e6f6c80..0000000000 --- a/contrib/python/ipython/py3/IPython/utils/pickleutil.py +++ /dev/null @@ -1,5 +0,0 @@ -from warnings import warn - -warn("IPython.utils.pickleutil has moved to ipykernel.pickleutil", stacklevel=2) - -from ipykernel.pickleutil import * diff --git a/contrib/python/ipython/py3/IPython/utils/py3compat.py b/contrib/python/ipython/py3/IPython/utils/py3compat.py index c758787300..34af4c58f4 100644 --- a/contrib/python/ipython/py3/IPython/utils/py3compat.py +++ b/contrib/python/ipython/py3/IPython/utils/py3compat.py @@ -3,13 +3,8 @@ This file is deprecated and will be removed in a future version. """ -import functools -import os -import sys -import re -import shutil -import types import platform +import builtins as builtin_mod from .encoding import DEFAULT_ENCODING @@ -18,6 +13,7 @@ def decode(s, encoding=None): encoding = encoding or DEFAULT_ENCODING return s.decode(encoding, "replace") + def encode(u, encoding=None): encoding = encoding or DEFAULT_ENCODING return u.encode(encoding, "replace") @@ -28,36 +24,6 @@ def cast_unicode(s, encoding=None): return decode(s, encoding) return s -def cast_bytes(s, encoding=None): - if not isinstance(s, bytes): - return encode(s, encoding) - return s - -def buffer_to_bytes(buf): - """Cast a buffer object to bytes""" - if not isinstance(buf, bytes): - buf = bytes(buf) - return buf - -def _modify_str_or_docstring(str_change_func): - @functools.wraps(str_change_func) - def wrapper(func_or_str): - if isinstance(func_or_str, (str,)): - func = None - doc = func_or_str - else: - func = func_or_str - doc = func.__doc__ - - # PYTHONOPTIMIZE=2 strips docstrings, so they can disappear unexpectedly - if doc is not None: - doc = str_change_func(doc) - - if func: - func.__doc__ = doc - return func - return doc - return wrapper def safe_unicode(e): """unicode(e) with various fallbacks. Used for exceptions, which may not be @@ -73,119 +39,29 @@ def safe_unicode(e): except UnicodeError: pass - return u'Unrecoverably corrupt evalue' - -# shutil.which from Python 3.4 -def _shutil_which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such - file. + return "Unrecoverably corrupt evalue" - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result - of os.environ.get("PATH"), or can be overridden with a custom search - path. - - This is a backport of shutil.which from Python 3.4 - """ - # Check that a given file can be accessed with the correct mode. - # Additionally check that `file` is not a directory, as on Windows - # directories pass the os.access check. - def _access_check(fn, mode): - return (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)) - - # If we're given a path with a directory part, look it up directly rather - # than referring to PATH directories. This includes checking relative to the - # current directory, e.g. ./script - if os.path.dirname(cmd): - if _access_check(cmd, mode): - return cmd - return None - - if path is None: - path = os.environ.get("PATH", os.defpath) - if not path: - return None - path = path.split(os.pathsep) - - if sys.platform == "win32": - # The current directory takes precedence on Windows. - if not os.curdir in path: - path.insert(0, os.curdir) - - # PATHEXT is necessary to check on Windows. - pathext = os.environ.get("PATHEXT", "").split(os.pathsep) - # See if the given file matches any of the expected path extensions. - # This will allow us to short circuit when given "python.exe". - # If it does match, only test that one, otherwise we have to try - # others. - if any(cmd.lower().endswith(ext.lower()) for ext in pathext): - files = [cmd] - else: - files = [cmd + ext for ext in pathext] - else: - # On other platforms you don't have things like PATHEXT to tell you - # what file suffixes are executable, so just pass on cmd as-is. - files = [cmd] - - seen = set() - for dir in path: - normdir = os.path.normcase(dir) - if not normdir in seen: - seen.add(normdir) - for thefile in files: - name = os.path.join(dir, thefile) - if _access_check(name, mode): - return name - return None - -PY3 = True # keep reference to builtin_mod because the kernel overrides that value # to forward requests to a frontend. -def input(prompt=''): +def input(prompt=""): return builtin_mod.input(prompt) -builtin_mod_name = "builtins" -import builtins as builtin_mod - - -which = shutil.which - -def isidentifier(s, dotted=False): - if dotted: - return all(isidentifier(a) for a in s.split(".")) - return s.isidentifier() - -getcwd = os.getcwd - -MethodType = types.MethodType def execfile(fname, glob, loc=None, compiler=None): loc = loc if (loc is not None) else glob - with open(fname, 'rb') as f: + with open(fname, "rb") as f: compiler = compiler or compile - exec(compiler(f.read(), fname, 'exec'), glob, loc) - -# Refactor print statements in doctests. -_print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE) - -# Abstract u'abc' syntax: -@_modify_str_or_docstring -def u_format(s): - """"{u}'abc'" --> "'abc'" (Python 3) + exec(compiler(f.read(), fname, "exec"), glob, loc) - Accepts a string or a function, so it can be used as a decorator.""" - return s.format(u='') - -PY2 = not PY3 PYPY = platform.python_implementation() == "PyPy" # Cython still rely on that as a Dec 28 2019 # See https://github.com/cython/cython/pull/3291 and # https://github.com/ipython/ipython/issues/12068 def no_code(x, encoding=None): - return x -unicode_to_str = cast_bytes_py2 = no_code + return x + +unicode_to_str = cast_bytes_py2 = no_code diff --git a/contrib/python/ipython/py3/IPython/utils/shimmodule.py b/contrib/python/ipython/py3/IPython/utils/shimmodule.py index b70ac135bf..8af44caa98 100644 --- a/contrib/python/ipython/py3/IPython/utils/shimmodule.py +++ b/contrib/python/ipython/py3/IPython/utils/shimmodule.py @@ -3,6 +3,8 @@ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. +import importlib.abc +import importlib.util import sys import types from importlib import import_module @@ -13,41 +15,26 @@ from .importstring import import_item class ShimWarning(Warning): """A warning to show when a module has moved, and a shim is in its place.""" -class ShimImporter(object): + +class ShimImporter(importlib.abc.MetaPathFinder): """Import hook for a shim. - + This ensures that submodule imports return the real target module, not a clone that will confuse `is` and `isinstance` checks. """ def __init__(self, src, mirror): self.src = src self.mirror = mirror - + def _mirror_name(self, fullname): """get the name of the mirrored module""" - - return self.mirror + fullname[len(self.src):] - def find_module(self, fullname, path=None): - """Return self if we should be used to import the module.""" - if fullname.startswith(self.src + '.'): - mirror_name = self._mirror_name(fullname) - try: - mod = import_item(mirror_name) - except ImportError: - return - else: - if not isinstance(mod, types.ModuleType): - # not a module - return None - return self + return self.mirror + fullname[len(self.src) :] - def load_module(self, fullname): - """Import the mirrored module, and insert it into sys.modules""" - mirror_name = self._mirror_name(fullname) - mod = import_item(mirror_name) - sys.modules[fullname] = mod - return mod + def find_spec(self, fullname, path, target=None): + if fullname.startswith(self.src + "."): + mirror_name = self._mirror_name(fullname) + return importlib.util.find_spec(mirror_name) class ShimModule(types.ModuleType): @@ -90,5 +77,13 @@ class ShimModule(types.ModuleType): name = "%s.%s" % (self._mirror, key) try: return import_item(name) - except ImportError: - raise AttributeError(key) + except ImportError as e: + raise AttributeError(key) from e + + def __repr__(self): + # repr on a module can be called during error handling; make sure + # it does not fail, even if the import fails + try: + return self.__getattr__("__repr__")() + except AttributeError: + return f"<ShimModule for {self._mirror!r}>" diff --git a/contrib/python/ipython/py3/IPython/utils/sysinfo.py b/contrib/python/ipython/py3/IPython/utils/sysinfo.py index 07d14fd8a4..857f0cf2d8 100644 --- a/contrib/python/ipython/py3/IPython/utils/sysinfo.py +++ b/contrib/python/ipython/py3/IPython/utils/sysinfo.py @@ -40,15 +40,15 @@ def pkg_commit_hash(pkg_path): Parameters ---------- pkg_path : str - directory containing package - only used for getting commit from active repo + directory containing package + only used for getting commit from active repo Returns ------- hash_from : str - Where we got the hash from - description + Where we got the hash from - description hash_str : str - short form of hash + short form of hash """ # Try and get commit from written commit text file if _sysinfo.commit: @@ -71,12 +71,12 @@ def pkg_info(pkg_path): Parameters ---------- pkg_path : str - path containing __init__.py for package + path containing __init__.py for package Returns ------- context : dict - with named parameters of interest + with named parameters of interest """ src, hsh = pkg_commit_hash(pkg_path) return dict( @@ -118,49 +118,25 @@ def sys_info(): """ return pprint.pformat(get_sys_info()) -def _num_cpus_unix(): - """Return the number of active CPUs on a Unix system.""" - return os.sysconf("SC_NPROCESSORS_ONLN") - - -def _num_cpus_darwin(): - """Return the number of active CPUs on a Darwin system.""" - p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE) - return p.stdout.read() - - -def _num_cpus_windows(): - """Return the number of active CPUs on a Windows system.""" - return os.environ.get("NUMBER_OF_PROCESSORS") - def num_cpus(): - """Return the effective number of CPUs in the system as an integer. - - This cross-platform function makes an attempt at finding the total number of - available CPUs in the system, as returned by various underlying system and - python calls. + """DEPRECATED - If it can't find a sensible answer, it returns 1 (though an error *may* make - it return a large positive number that's actually incorrect). - """ + Return the effective number of CPUs in the system as an integer. - # Many thanks to the Parallel Python project (http://www.parallelpython.com) - # for the names of the keys we needed to look up for this function. This - # code was inspired by their equivalent function. + This cross-platform function makes an attempt at finding the total number of + available CPUs in the system, as returned by various underlying system and + python calls. - ncpufuncs = {'Linux':_num_cpus_unix, - 'Darwin':_num_cpus_darwin, - 'Windows':_num_cpus_windows - } - - ncpufunc = ncpufuncs.get(platform.system(), - # default to unix version (Solaris, AIX, etc) - _num_cpus_unix) + If it can't find a sensible answer, it returns 1 (though an error *may* make + it return a large positive number that's actually incorrect). + """ + import warnings - try: - ncpus = max(1,int(ncpufunc())) - except: - ncpus = 1 - return ncpus + warnings.warn( + "`num_cpus` is deprecated since IPython 8.0. Use `os.cpu_count` instead.", + DeprecationWarning, + stacklevel=2, + ) + return os.cpu_count() or 1 diff --git a/contrib/python/ipython/py3/IPython/utils/syspathcontext.py b/contrib/python/ipython/py3/IPython/utils/syspathcontext.py index bd1c51500d..7af1ab60af 100644 --- a/contrib/python/ipython/py3/IPython/utils/syspathcontext.py +++ b/contrib/python/ipython/py3/IPython/utils/syspathcontext.py @@ -15,12 +15,21 @@ Authors: #----------------------------------------------------------------------------- import sys +import warnings class appended_to_syspath(object): - """A context for appending a directory to sys.path for a second.""" + """ + Deprecated since IPython 8.1, no replacements. + + A context for appending a directory to sys.path for a second.""" def __init__(self, dir): + warnings.warn( + "`appended_to_syspath` is deprecated since IPython 8.1, and has no replacements", + DeprecationWarning, + stacklevel=2, + ) self.dir = dir def __enter__(self): diff --git a/contrib/python/ipython/py3/IPython/utils/tempdir.py b/contrib/python/ipython/py3/IPython/utils/tempdir.py index 98f6aeb3c6..5afc5d6425 100644 --- a/contrib/python/ipython/py3/IPython/utils/tempdir.py +++ b/contrib/python/ipython/py3/IPython/utils/tempdir.py @@ -5,12 +5,12 @@ creating a context manager for the working directory which is also temporary. """ import os as _os +from pathlib import Path from tempfile import TemporaryDirectory class NamedFileInTemporaryDirectory(object): - - def __init__(self, filename, mode='w+b', bufsize=-1, **kwds): + def __init__(self, filename, mode="w+b", bufsize=-1, add_to_syspath=False, **kwds): """ Open a file named `filename` in a temporary directory. @@ -22,8 +22,9 @@ class NamedFileInTemporaryDirectory(object): """ self._tmpdir = TemporaryDirectory(**kwds) - path = _os.path.join(self._tmpdir.name, filename) - self.file = open(path, mode, bufsize) + path = Path(self._tmpdir.name) / filename + encoding = None if "b" in mode else "utf-8" + self.file = open(path, mode, bufsize, encoding=encoding) def cleanup(self): self.file.close() @@ -48,7 +49,7 @@ class TemporaryWorkingDirectory(TemporaryDirectory): ... """ def __enter__(self): - self.old_wd = _os.getcwd() + self.old_wd = Path.cwd() _os.chdir(self.name) return super(TemporaryWorkingDirectory, self).__enter__() diff --git a/contrib/python/ipython/py3/IPython/utils/terminal.py b/contrib/python/ipython/py3/IPython/utils/terminal.py index 4e1800208c..49fd3fe173 100644 --- a/contrib/python/ipython/py3/IPython/utils/terminal.py +++ b/contrib/python/ipython/py3/IPython/utils/terminal.py @@ -45,7 +45,7 @@ def toggle_set_term_title(val): Parameters ---------- - val : bool + val : bool If True, set_term_title() actually writes to the terminal (using the appropriate platform-specific module). If False, it is a no-op. """ diff --git a/contrib/python/ipython/py3/IPython/utils/text.py b/contrib/python/ipython/py3/IPython/utils/text.py index 256fdab584..74bccddf68 100644 --- a/contrib/python/ipython/py3/IPython/utils/text.py +++ b/contrib/python/ipython/py3/IPython/utils/text.py @@ -16,7 +16,6 @@ import textwrap from string import Formatter from pathlib import Path -from IPython.utils import py3compat # datetime.strftime date format for ipython if sys.platform == 'win32': @@ -254,7 +253,6 @@ def indent(instr,nspaces=4, ntabs=0, flatten=False): Parameters ---------- - instr : basestring The string to be indented. nspaces : int (default: 4) @@ -268,7 +266,6 @@ def indent(instr,nspaces=4, ntabs=0, flatten=False): Returns ------- - str|unicode : string indented by ntabs and nspaces. """ @@ -392,7 +389,6 @@ def wrap_paragraphs(text, ncols=80): Returns ------- - list of complete paragraphs, wrapped to fill `ncols` columns. """ paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE) @@ -463,7 +459,7 @@ def strip_email_quotes(text): def strip_ansi(source): """ Remove ansi escape codes from text. - + Parameters ---------- source : str @@ -474,11 +470,11 @@ def strip_ansi(source): class EvalFormatter(Formatter): """A String Formatter that allows evaluation of simple expressions. - - Note that this version interprets a : as specifying a format string (as per + + Note that this version interprets a `:` as specifying a format string (as per standard string formatting), so if slicing is required, you must explicitly create a slice. - + This is to be used in templating cases, such as the parallel batch script templates, where simple arithmetic on arguments is useful. @@ -643,7 +639,6 @@ def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) : Parameters ---------- - items list of strings to columize row_first : (default False) @@ -658,14 +653,11 @@ def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) : Returns ------- - strings_matrix - nested list of string, the outer most list contains as many list as rows, the innermost lists have each as many element as columns. If the total number of elements in `items` does not equal the product of rows*columns, the last element of some lists are filled with `None`. - dict_info some info to make columnize easier: @@ -698,21 +690,18 @@ def compute_item_matrix(items, row_first=False, empty=None, *args, **kwargs) : return ([[_get_or_default(items, c * nrow + r, default=empty) for c in range(ncol)] for r in range(nrow)], info) -def columnize(items, row_first=False, separator=' ', displaywidth=80, spread=False): - """ Transform a list of strings into a single string with columns. +def columnize(items, row_first=False, separator=" ", displaywidth=80, spread=False): + """Transform a list of strings into a single string with columns. Parameters ---------- items : sequence of strings The strings to process. - row_first : (default False) Whether to compute columns for a row-first matrix instead of column-first (default). - separator : str, optional [default is two spaces] The string that separates columns. - displaywidth : int, optional [default is 80] Width of the display in number of characters. diff --git a/contrib/python/ipython/py3/IPython/utils/timing.py b/contrib/python/ipython/py3/IPython/utils/timing.py index 92f6883c4a..3a181ae728 100644 --- a/contrib/python/ipython/py3/IPython/utils/timing.py +++ b/contrib/python/ipython/py3/IPython/utils/timing.py @@ -64,13 +64,14 @@ if resource is not None and hasattr(resource, "getrusage"): else: # There is no distinction of user/system time under windows, so we just use - # time.perff_counter() for everything... - clocku = clocks = clock = time.perf_counter + # time.process_time() for everything... + clocku = clocks = clock = time.process_time + def clock2(): """Under windows, system CPU time can't be measured. - This just returns perf_counter() and zero.""" - return time.perf_counter(),0.0 + This just returns process_time() and zero.""" + return time.process_time(), 0.0 def timings_out(reps,func,*args,**kw): diff --git a/contrib/python/ipython/py3/IPython/utils/tokenutil.py b/contrib/python/ipython/py3/IPython/utils/tokenutil.py index 28f8b6d526..697d2b504a 100644 --- a/contrib/python/ipython/py3/IPython/utils/tokenutil.py +++ b/contrib/python/ipython/py3/IPython/utils/tokenutil.py @@ -23,20 +23,18 @@ def generate_tokens(readline): def line_at_cursor(cell, cursor_pos=0): """Return the line in a cell at a given cursor position - + Used for calling line-based APIs that don't support multi-line input, yet. - + Parameters ---------- - - cell: str + cell : str multiline block of text - cursor_pos: integer + cursor_pos : integer the cursor position - + Returns ------- - (line, offset): (string, integer) The line with the current cursor, and the character offset of the start of the line. """ @@ -58,15 +56,14 @@ def line_at_cursor(cell, cursor_pos=0): def token_at_cursor(cell, cursor_pos=0): """Get the token at a given cursor - + Used for introspection. - + Function calls are prioritized, so the token for the callable will be returned if the cursor is anywhere inside the call. - + Parameters ---------- - cell : unicode A block of Python code cursor_pos : int diff --git a/contrib/python/ipython/py3/IPython/utils/version.py b/contrib/python/ipython/py3/IPython/utils/version.py index 1de0047e6b..8c65c78e15 100644 --- a/contrib/python/ipython/py3/IPython/utils/version.py +++ b/contrib/python/ipython/py3/IPython/utils/version.py @@ -12,15 +12,13 @@ It is a bit ridiculous that we need these. # the file COPYING, distributed as part of this software. #----------------------------------------------------------------------------- -#----------------------------------------------------------------------------- -# Imports -#----------------------------------------------------------------------------- +from warnings import warn -from distutils.version import LooseVersion +warn( + "The `IPython.utils.version` module has been deprecated since IPython 8.0.", + DeprecationWarning, +) -#----------------------------------------------------------------------------- -# Code -#----------------------------------------------------------------------------- def check_version(v, check): """check version string v >= check @@ -29,6 +27,15 @@ def check_version(v, check): it is assumed that the dependency is satisfied. Users on dev branches are responsible for keeping their own packages up to date. """ + warn( + "`check_version` function is deprecated as of IPython 8.0" + "and will be removed in future versions.", + DeprecationWarning, + stacklevel=2, + ) + + from distutils.version import LooseVersion + try: return LooseVersion(v) >= LooseVersion(check) except TypeError: diff --git a/contrib/python/ipython/py3/README.rst b/contrib/python/ipython/py3/README.rst index 940047656f..ec16031166 100644 --- a/contrib/python/ipython/py3/README.rst +++ b/contrib/python/ipython/py3/README.rst @@ -4,8 +4,8 @@ .. image:: https://img.shields.io/pypi/v/IPython.svg :target: https://pypi.python.org/pypi/ipython -.. image:: https://img.shields.io/travis/ipython/ipython.svg - :target: https://travis-ci.org/ipython/ipython +.. image:: https://github.com/ipython/ipython/actions/workflows/test.yml/badge.svg + :target: https://github.com/ipython/ipython/actions/workflows/test.yml) .. image:: https://www.codetriage.com/ipython/ipython/badges/users.svg :target: https://www.codetriage.com/ipython/ipython/ @@ -24,6 +24,7 @@ Overview Welcome to IPython. Our full documentation is available on `ipython.readthedocs.io <https://ipython.readthedocs.io/en/stable/>`_ and contains information on how to install, use, and contribute to the project. +IPython (Interactive Python) is a command shell for interactive computing in multiple programming languages, originally developed for the Python programming language, that offers introspection, rich media, shell syntax, tab completion, and history. **IPython versions and Python Support** @@ -46,7 +47,29 @@ The Notebook, Qt console and a number of other pieces are now parts of *Jupyter* See the `Jupyter installation docs <https://jupyter.readthedocs.io/en/latest/install.html>`__ if you want to use these. +Main features of IPython +======================== +Comprehensive object introspection. +Input history, persistent across sessions. + +Caching of output results during a session with automatically generated references. + +Extensible tab completion, with support by default for completion of python variables and keywords, filenames and function keywords. + +Extensible system of ‘magic’ commands for controlling the environment and performing many tasks related to IPython or the operating system. + +A rich configuration system with easy switching between different setups (simpler than changing $PYTHONSTARTUP environment variables every time). + +Session logging and reloading. + +Extensible syntax processing for special purpose situations. + +Access to the system shell with user-extensible alias system. + +Easily embeddable in other Python programs and GUIs. + +Integrated access to the pdb debugger and the Python profiler. Development and Instant running @@ -126,11 +149,11 @@ Alternatives to IPython IPython may not be to your taste; if that's the case there might be similar project that you might want to use: -- the classic Python REPL. +- The classic Python REPL. - `bpython <https://bpython-interpreter.org/>`_ - `mypython <https://www.asmeurer.com/mypython/>`_ -- `ptpython and ptipython <https://pypi.org/project/ptpython/>` -- `xonsh <https://xon.sh/>` +- `ptpython and ptipython <https://pypi.org/project/ptpython/>`_ +- `Xonsh <https://xon.sh/>`_ Ignoring commits with git blame.ignoreRevsFile ============================================== diff --git a/contrib/python/ipython/py3/patches/01-arcadia.patch b/contrib/python/ipython/py3/patches/01-arcadia.patch index 335f93f18e..eafd4f30f6 100644 --- a/contrib/python/ipython/py3/patches/01-arcadia.patch +++ b/contrib/python/ipython/py3/patches/01-arcadia.patch @@ -1,6 +1,6 @@ --- contrib/python/ipython/py3/IPython/core/completer.py (index) +++ contrib/python/ipython/py3/IPython/core/completer.py (working tree) -@@ -996,6 +996,7 @@ def _make_signature(completion)-> str: +@@ -1025,6 +1025,7 @@ def _make_signature(completion)-> str: """ @@ -8,7 +8,7 @@ # it looks like this might work on jedi 0.17 if hasattr(completion, 'get_signatures'): signatures = completion.get_signatures() -@@ -1388,7 +1389,7 @@ class IPCompleter(Completer): +@@ -1438,7 +1439,7 @@ class IPCompleter(Completer): else: raise ValueError("Don't understand self.omit__names == {}".format(self.omit__names)) @@ -17,7 +17,7 @@ try_jedi = True try: -@@ -1415,7 +1416,7 @@ class IPCompleter(Completer): +@@ -1465,7 +1466,7 @@ class IPCompleter(Completer): if not try_jedi: return [] try: @@ -96,7 +96,7 @@ def module_list(path): """ Return the list containing the names of the modules available in the given -@@ -165,7 +212,8 @@ def try_import(mod: str, only_modules=False) -> List[str]: +@@ -176,7 +223,8 @@ def try_import(mod: str, only_modules=False) -> List[str]: except: return [] @@ -106,9 +106,9 @@ completions = [] if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: -@@ -174,10 +222,10 @@ def try_import(mod: str, only_modules=False) -> List[str]: - - completions.extend(getattr(m, '__all__', [])) +@@ -190,10 +238,10 @@ def try_import(mod: str, only_modules=False) -> List[str]: + completions.extend(m_all) + if m_is_init: - completions.extend(module_list(os.path.dirname(m.__file__))) + completions.extend(arcadia_module_list(mod)) @@ -119,7 +119,7 @@ #----------------------------------------------------------------------------- -@@ -226,10 +274,10 @@ def module_completion(line): +@@ -242,10 +290,10 @@ def module_completion(line): # 'from xy<tab>' or 'import xy<tab>' if nwords < 3 and (words[0] in {'%aimport', 'import', 'from'}) : if nwords == 1: @@ -134,7 +134,7 @@ --- contrib/python/ipython/py3/IPython/core/extensions.py (index) +++ contrib/python/ipython/py3/IPython/core/extensions.py (working tree) -@@ -72,11 +72,11 @@ class ExtensionManager(Configurable): +@@ -84,11 +84,11 @@ class ExtensionManager(Configurable): if module_str in self.loaded: return "already loaded" @@ -151,7 +151,7 @@ print(("Loading extensions from {dir} is deprecated. " --- contrib/python/ipython/py3/IPython/core/profiledir.py (index) +++ contrib/python/ipython/py3/IPython/core/profiledir.py (working tree) -@@ -111,13 +111,11 @@ class ProfileDir(LoggingConfigurable): +@@ -112,13 +112,11 @@ class ProfileDir(LoggingConfigurable): self._mkdir(self.startup_dir) readme = os.path.join(self.startup_dir, 'README') diff --git a/contrib/python/ipython/py3/patches/02-fix-ya.make.patch b/contrib/python/ipython/py3/patches/02-fix-ya.make.patch index 75e0e9b0bb..7ae6f38152 100644 --- a/contrib/python/ipython/py3/patches/02-fix-ya.make.patch +++ b/contrib/python/ipython/py3/patches/02-fix-ya.make.patch @@ -1,17 +1,17 @@ --- contrib/python/ipython/py3/.dist-info/METADATA (index) +++ contrib/python/ipython/py3/.dist-info/METADATA (working tree) -@@ -24,7 +24,7 @@ Classifier: Programming Language :: Python :: 3 :: Only - Classifier: Topic :: System :: Shells - Requires-Python: >=3.7 - Requires-Dist: setuptools (>=18.5) +@@ -28,7 +28,7 @@ Classifier: Programming Language :: Python :: 3 :: Only + License-File: LICENSE + Requires-Dist: backcall + Requires-Dist: decorator -Requires-Dist: jedi (>=0.16) +Requires-Dist: jedi (>=0.13) - Requires-Dist: decorator + Requires-Dist: matplotlib-inline Requires-Dist: pickleshare - Requires-Dist: traitlets (>=4.2) + Requires-Dist: prompt-toolkit (!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0) --- contrib/python/ipython/py3/ya.make (index) +++ contrib/python/ipython/py3/ya.make (working tree) -@@ -14,13 +14,28 @@ PEERDIR( +@@ -14,14 +14,29 @@ PEERDIR( contrib/python/decorator contrib/python/jedi contrib/python/matplotlib-inline @@ -19,6 +19,7 @@ contrib/python/pickleshare contrib/python/prompt-toolkit contrib/python/setuptools + contrib/python/stack-data contrib/python/traitlets ) diff --git a/contrib/python/ipython/py3/patches/03-dissable-backgroud-highlighting.patch b/contrib/python/ipython/py3/patches/03-dissable-backgroud-highlighting.patch new file mode 100644 index 0000000000..bb9b4924d0 --- /dev/null +++ b/contrib/python/ipython/py3/patches/03-dissable-backgroud-highlighting.patch @@ -0,0 +1,11 @@ +--- contrib/python/ipython/py3/IPython/core/ultratb.py (index) ++++ contrib/python/ipython/py3/IPython/core/ultratb.py (working tree) +@@ -842,7 +842,7 @@ class VerboseTB(TBTools): + before = context - after + if self.has_colors: + style = get_style_by_name("default") +- style = stack_data.style_with_executing_node(style, "bg:ansiyellow") ++ style = stack_data.style_with_executing_node(style, "") + formatter = Terminal256Formatter(style=style) + else: + formatter = None diff --git a/contrib/python/ipython/py3/patches/04-fix.patch b/contrib/python/ipython/py3/patches/04-fix.patch new file mode 100644 index 0000000000..913caffe76 --- /dev/null +++ b/contrib/python/ipython/py3/patches/04-fix.patch @@ -0,0 +1,11 @@ +--- contrib/python/ipython/py3/IPython/core/ultratb.py (index) ++++ contrib/python/ipython/py3/IPython/core/ultratb.py (working tree) +@@ -239,7 +239,7 @@ class TBTools(colorable.Colorable): + self.debugger_cls = debugger_cls or debugger.Pdb + + if call_pdb: +- self.pdb = debugger_cls() ++ self.pdb = self.debugger_cls() + else: + self.pdb = None + diff --git a/contrib/python/pure-eval/.dist-info/METADATA b/contrib/python/pure-eval/.dist-info/METADATA new file mode 100644 index 0000000000..931f69c348 --- /dev/null +++ b/contrib/python/pure-eval/.dist-info/METADATA @@ -0,0 +1,229 @@ +Metadata-Version: 2.1 +Name: pure-eval +Version: 0.2.2 +Summary: Safely evaluate AST nodes without side effects +Home-page: http://github.com/alexmojaki/pure_eval +Author: Alex Hall +Author-email: alex.mojaki@gmail.com +License: MIT +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Provides-Extra: tests +Requires-Dist: pytest ; extra == 'tests' + +# `pure_eval` + +[![Build Status](https://travis-ci.org/alexmojaki/pure_eval.svg?branch=master)](https://travis-ci.org/alexmojaki/pure_eval) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/pure_eval/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/pure_eval?branch=master) [![Supports Python versions 3.5+](https://img.shields.io/pypi/pyversions/pure_eval.svg)](https://pypi.python.org/pypi/pure_eval) + +This is a Python package that lets you safely evaluate certain AST nodes without triggering arbitrary code that may have unwanted side effects. + +It can be installed from PyPI: + + pip install pure_eval + +To demonstrate usage, suppose we have an object defined as follows: + +```python +class Rectangle: + def __init__(self, width, height): + self.width = width + self.height = height + + @property + def area(self): + print("Calculating area...") + return self.width * self.height + + +rect = Rectangle(3, 5) +``` + +Given the `rect` object, we want to evaluate whatever expressions we can in this source code: + +```python +source = "(rect.width, rect.height, rect.area)" +``` + +This library works with the AST, so let's parse the source code and peek inside: + +```python +import ast + +tree = ast.parse(source) +the_tuple = tree.body[0].value +for node in the_tuple.elts: + print(ast.dump(node)) +``` + +Output: + +```python +Attribute(value=Name(id='rect', ctx=Load()), attr='width', ctx=Load()) +Attribute(value=Name(id='rect', ctx=Load()), attr='height', ctx=Load()) +Attribute(value=Name(id='rect', ctx=Load()), attr='area', ctx=Load()) +``` + +Now to actually use the library. First construct an Evaluator: + +```python +from pure_eval import Evaluator + +evaluator = Evaluator({"rect": rect}) +``` + +The argument to `Evaluator` should be a mapping from variable names to their values. Or if you have access to the stack frame where `rect` is defined, you can instead use: + +```python +evaluator = Evaluator.from_frame(frame) +``` + +Now to evaluate some nodes, using `evaluator[node]`: + +```python +print("rect.width:", evaluator[the_tuple.elts[0]]) +print("rect:", evaluator[the_tuple.elts[0].value]) +``` + +Output: + +``` +rect.width: 3 +rect: <__main__.Rectangle object at 0x105b0dd30> +``` + +OK, but you could have done the same thing with `eval`. The useful part is that it will refuse to evaluate the property `rect.area` because that would trigger unknown code. If we try, it'll raise a `CannotEval` exception. + +```python +from pure_eval import CannotEval + +try: + print("rect.area:", evaluator[the_tuple.elts[2]]) # fails +except CannotEval as e: + print(e) # prints CannotEval +``` + +To find all the expressions that can be evaluated in a tree: + +```python +for node, value in evaluator.find_expressions(tree): + print(ast.dump(node), value) +``` + +Output: + +```python +Attribute(value=Name(id='rect', ctx=Load()), attr='width', ctx=Load()) 3 +Attribute(value=Name(id='rect', ctx=Load()), attr='height', ctx=Load()) 5 +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +``` + +Note that this includes `rect` three times, once for each appearance in the source code. Since all these nodes are equivalent, we can group them together: + +```python +from pure_eval import group_expressions + +for nodes, values in group_expressions(evaluator.find_expressions(tree)): + print(len(nodes), "nodes with value:", values) +``` + +Output: + +``` +1 nodes with value: 3 +1 nodes with value: 5 +3 nodes with value: <__main__.Rectangle object at 0x10d374d30> +``` + +If we want to list all the expressions in a tree, we may want to filter out certain expressions whose values are obvious. For example, suppose we have a function `foo`: + +```python +def foo(): + pass +``` + +If we refer to `foo` by its name as usual, then that's not interesting: + +```python +from pure_eval import is_expression_interesting + +node = ast.parse('foo').body[0].value +print(ast.dump(node)) +print(is_expression_interesting(node, foo)) +``` + +Output: + +```python +Name(id='foo', ctx=Load()) +False +``` + +But if we refer to it by a different name, then it's interesting: + +```python +node = ast.parse('bar').body[0].value +print(ast.dump(node)) +print(is_expression_interesting(node, foo)) +``` + +Output: + +```python +Name(id='bar', ctx=Load()) +True +``` + +In general `is_expression_interesting` returns False for the following values: +- Literals (e.g. `123`, `'abc'`, `[1, 2, 3]`, `{'a': (), 'b': ([1, 2], [3])}`) +- Variables or attributes whose name is equal to the value's `__name__`, such as `foo` above or `self.foo` if it was a method. +- Builtins (e.g. `len`) referred to by their usual name. + +To make things easier, you can combine finding expressions, grouping them, and filtering out the obvious ones with: + +```python +evaluator.interesting_expressions_grouped(root) +``` + +To get the source code of an AST node, I recommend [asttokens](https://github.com/gristlabs/asttokens). + +Here's a complete example that brings it all together: + +```python +from asttokens import ASTTokens +from pure_eval import Evaluator + +source = """ +x = 1 +d = {x: 2} +y = d[x] +""" + +names = {} +exec(source, names) +atok = ASTTokens(source, parse=True) +for nodes, value in Evaluator(names).interesting_expressions_grouped(atok.tree): + print(atok.get_text(nodes[0]), "=", value) +``` + +Output: + +```python +x = 1 +d = {1: 2} +y = 2 +d[x] = 2 +``` + + diff --git a/contrib/python/pure-eval/.dist-info/top_level.txt b/contrib/python/pure-eval/.dist-info/top_level.txt new file mode 100644 index 0000000000..e50c81f634 --- /dev/null +++ b/contrib/python/pure-eval/.dist-info/top_level.txt @@ -0,0 +1 @@ +pure_eval diff --git a/contrib/python/pure-eval/LICENSE.txt b/contrib/python/pure-eval/LICENSE.txt new file mode 100644 index 0000000000..473e36e246 --- /dev/null +++ b/contrib/python/pure-eval/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Alex Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/contrib/python/pure-eval/README.md b/contrib/python/pure-eval/README.md new file mode 100644 index 0000000000..a4edbfc0a5 --- /dev/null +++ b/contrib/python/pure-eval/README.md @@ -0,0 +1,204 @@ +# `pure_eval` + +[![Build Status](https://travis-ci.org/alexmojaki/pure_eval.svg?branch=master)](https://travis-ci.org/alexmojaki/pure_eval) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/pure_eval/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/pure_eval?branch=master) [![Supports Python versions 3.5+](https://img.shields.io/pypi/pyversions/pure_eval.svg)](https://pypi.python.org/pypi/pure_eval) + +This is a Python package that lets you safely evaluate certain AST nodes without triggering arbitrary code that may have unwanted side effects. + +It can be installed from PyPI: + + pip install pure_eval + +To demonstrate usage, suppose we have an object defined as follows: + +```python +class Rectangle: + def __init__(self, width, height): + self.width = width + self.height = height + + @property + def area(self): + print("Calculating area...") + return self.width * self.height + + +rect = Rectangle(3, 5) +``` + +Given the `rect` object, we want to evaluate whatever expressions we can in this source code: + +```python +source = "(rect.width, rect.height, rect.area)" +``` + +This library works with the AST, so let's parse the source code and peek inside: + +```python +import ast + +tree = ast.parse(source) +the_tuple = tree.body[0].value +for node in the_tuple.elts: + print(ast.dump(node)) +``` + +Output: + +```python +Attribute(value=Name(id='rect', ctx=Load()), attr='width', ctx=Load()) +Attribute(value=Name(id='rect', ctx=Load()), attr='height', ctx=Load()) +Attribute(value=Name(id='rect', ctx=Load()), attr='area', ctx=Load()) +``` + +Now to actually use the library. First construct an Evaluator: + +```python +from pure_eval import Evaluator + +evaluator = Evaluator({"rect": rect}) +``` + +The argument to `Evaluator` should be a mapping from variable names to their values. Or if you have access to the stack frame where `rect` is defined, you can instead use: + +```python +evaluator = Evaluator.from_frame(frame) +``` + +Now to evaluate some nodes, using `evaluator[node]`: + +```python +print("rect.width:", evaluator[the_tuple.elts[0]]) +print("rect:", evaluator[the_tuple.elts[0].value]) +``` + +Output: + +``` +rect.width: 3 +rect: <__main__.Rectangle object at 0x105b0dd30> +``` + +OK, but you could have done the same thing with `eval`. The useful part is that it will refuse to evaluate the property `rect.area` because that would trigger unknown code. If we try, it'll raise a `CannotEval` exception. + +```python +from pure_eval import CannotEval + +try: + print("rect.area:", evaluator[the_tuple.elts[2]]) # fails +except CannotEval as e: + print(e) # prints CannotEval +``` + +To find all the expressions that can be evaluated in a tree: + +```python +for node, value in evaluator.find_expressions(tree): + print(ast.dump(node), value) +``` + +Output: + +```python +Attribute(value=Name(id='rect', ctx=Load()), attr='width', ctx=Load()) 3 +Attribute(value=Name(id='rect', ctx=Load()), attr='height', ctx=Load()) 5 +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +Name(id='rect', ctx=Load()) <__main__.Rectangle object at 0x105568d30> +``` + +Note that this includes `rect` three times, once for each appearance in the source code. Since all these nodes are equivalent, we can group them together: + +```python +from pure_eval import group_expressions + +for nodes, values in group_expressions(evaluator.find_expressions(tree)): + print(len(nodes), "nodes with value:", values) +``` + +Output: + +``` +1 nodes with value: 3 +1 nodes with value: 5 +3 nodes with value: <__main__.Rectangle object at 0x10d374d30> +``` + +If we want to list all the expressions in a tree, we may want to filter out certain expressions whose values are obvious. For example, suppose we have a function `foo`: + +```python +def foo(): + pass +``` + +If we refer to `foo` by its name as usual, then that's not interesting: + +```python +from pure_eval import is_expression_interesting + +node = ast.parse('foo').body[0].value +print(ast.dump(node)) +print(is_expression_interesting(node, foo)) +``` + +Output: + +```python +Name(id='foo', ctx=Load()) +False +``` + +But if we refer to it by a different name, then it's interesting: + +```python +node = ast.parse('bar').body[0].value +print(ast.dump(node)) +print(is_expression_interesting(node, foo)) +``` + +Output: + +```python +Name(id='bar', ctx=Load()) +True +``` + +In general `is_expression_interesting` returns False for the following values: +- Literals (e.g. `123`, `'abc'`, `[1, 2, 3]`, `{'a': (), 'b': ([1, 2], [3])}`) +- Variables or attributes whose name is equal to the value's `__name__`, such as `foo` above or `self.foo` if it was a method. +- Builtins (e.g. `len`) referred to by their usual name. + +To make things easier, you can combine finding expressions, grouping them, and filtering out the obvious ones with: + +```python +evaluator.interesting_expressions_grouped(root) +``` + +To get the source code of an AST node, I recommend [asttokens](https://github.com/gristlabs/asttokens). + +Here's a complete example that brings it all together: + +```python +from asttokens import ASTTokens +from pure_eval import Evaluator + +source = """ +x = 1 +d = {x: 2} +y = d[x] +""" + +names = {} +exec(source, names) +atok = ASTTokens(source, parse=True) +for nodes, value in Evaluator(names).interesting_expressions_grouped(atok.tree): + print(atok.get_text(nodes[0]), "=", value) +``` + +Output: + +```python +x = 1 +d = {1: 2} +y = 2 +d[x] = 2 +``` diff --git a/contrib/python/pure-eval/pure_eval/__init__.py b/contrib/python/pure-eval/pure_eval/__init__.py new file mode 100644 index 0000000000..0040e318a6 --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/__init__.py @@ -0,0 +1,8 @@ +from .core import Evaluator, CannotEval, group_expressions, is_expression_interesting +from .my_getattr_static import getattr_static + +try: + from .version import __version__ +except ImportError: + # version.py is auto-generated with the git tag when building + __version__ = "???" diff --git a/contrib/python/pure-eval/pure_eval/core.py b/contrib/python/pure-eval/pure_eval/core.py new file mode 100644 index 0000000000..748f0518d4 --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/core.py @@ -0,0 +1,449 @@ +import ast +import builtins +import operator +from collections import ChainMap, OrderedDict, deque +from contextlib import suppress +from types import FrameType +from typing import Any, Tuple, Iterable, List, Mapping, Dict, Union, Set + +from pure_eval.my_getattr_static import getattr_static +from pure_eval.utils import ( + CannotEval, + has_ast_name, + copy_ast_without_context, + is_standard_types, + of_standard_types, + is_any, + of_type, + ensure_dict, +) + + +class Evaluator: + def __init__(self, names: Mapping[str, Any]): + """ + Construct a new evaluator with the given variable names. + This is a low level API, typically you will use `Evaluator.from_frame(frame)`. + + :param names: a mapping from variable names to their values. + """ + + self.names = names + self._cache = {} # type: Dict[ast.expr, Any] + + @classmethod + def from_frame(cls, frame: FrameType) -> 'Evaluator': + """ + Construct an Evaluator that can look up variables from the given frame. + + :param frame: a frame object, e.g. from a traceback or `inspect.currentframe().f_back`. + """ + + return cls(ChainMap( + ensure_dict(frame.f_locals), + ensure_dict(frame.f_globals), + ensure_dict(frame.f_builtins), + )) + + def __getitem__(self, node: ast.expr) -> Any: + """ + Find the value of the given node. + If it cannot be evaluated safely, this raises `CannotEval`. + The result is cached either way. + + :param node: an AST expression to evaluate + :return: the value of the node + """ + + if not isinstance(node, ast.expr): + raise TypeError("node should be an ast.expr, not {!r}".format(type(node).__name__)) + + with suppress(KeyError): + result = self._cache[node] + if result is CannotEval: + raise CannotEval + else: + return result + + try: + self._cache[node] = result = self._handle(node) + return result + except CannotEval: + self._cache[node] = CannotEval + raise + + def _handle(self, node: ast.expr) -> Any: + """ + This is where the evaluation happens. + Users should use `__getitem__`, i.e. `evaluator[node]`, + as it provides caching. + + :param node: an AST expression to evaluate + :return: the value of the node + """ + + with suppress(Exception): + return ast.literal_eval(node) + + if isinstance(node, ast.Name): + try: + return self.names[node.id] + except KeyError: + raise CannotEval + elif isinstance(node, ast.Attribute): + value = self[node.value] + attr = node.attr + return getattr_static(value, attr) + elif isinstance(node, ast.Subscript): + return self._handle_subscript(node) + elif isinstance(node, (ast.List, ast.Tuple, ast.Set, ast.Dict)): + return self._handle_container(node) + elif isinstance(node, ast.UnaryOp): + return self._handle_unary(node) + elif isinstance(node, ast.BinOp): + return self._handle_binop(node) + elif isinstance(node, ast.BoolOp): + return self._handle_boolop(node) + elif isinstance(node, ast.Compare): + return self._handle_compare(node) + elif isinstance(node, ast.Call): + return self._handle_call(node) + raise CannotEval + + def _handle_call(self, node): + if node.keywords: + raise CannotEval + func = self[node.func] + args = [self[arg] for arg in node.args] + + if ( + is_any( + func, + slice, + int, + range, + round, + complex, + list, + tuple, + abs, + hex, + bin, + oct, + bool, + ord, + float, + len, + chr, + ) + or len(args) == 0 + and is_any(func, set, dict, str, frozenset, bytes, bytearray, object) + or len(args) >= 2 + and is_any(func, str, divmod, bytes, bytearray, pow) + ): + args = [ + of_standard_types(arg, check_dict_values=False, deep=False) + for arg in args + ] + try: + return func(*args) + except Exception as e: + raise CannotEval from e + + if len(args) == 1: + arg = args[0] + if is_any(func, id, type): + try: + return func(arg) + except Exception as e: + raise CannotEval from e + if is_any(func, all, any, sum): + of_type(arg, tuple, frozenset, list, set, dict, OrderedDict, deque) + for x in arg: + of_standard_types(x, check_dict_values=False, deep=False) + try: + return func(arg) + except Exception as e: + raise CannotEval from e + + if is_any( + func, sorted, min, max, hash, set, dict, ascii, str, repr, frozenset + ): + of_standard_types(arg, check_dict_values=True, deep=True) + try: + return func(arg) + except Exception as e: + raise CannotEval from e + raise CannotEval + + def _handle_compare(self, node): + left = self[node.left] + result = True + + for op, right in zip(node.ops, node.comparators): + right = self[right] + + op_type = type(op) + op_func = { + ast.Eq: operator.eq, + ast.NotEq: operator.ne, + ast.Lt: operator.lt, + ast.LtE: operator.le, + ast.Gt: operator.gt, + ast.GtE: operator.ge, + ast.Is: operator.is_, + ast.IsNot: operator.is_not, + ast.In: (lambda a, b: a in b), + ast.NotIn: (lambda a, b: a not in b), + }[op_type] + + if op_type not in (ast.Is, ast.IsNot): + of_standard_types(left, check_dict_values=False, deep=True) + of_standard_types(right, check_dict_values=False, deep=True) + + try: + result = op_func(left, right) + except Exception as e: + raise CannotEval from e + if not result: + return result + left = right + + return result + + def _handle_boolop(self, node): + left = of_standard_types( + self[node.values[0]], check_dict_values=False, deep=False + ) + + for right in node.values[1:]: + # We need short circuiting so that the whole operation can be evaluated + # even if the right operand can't + if isinstance(node.op, ast.Or): + left = left or of_standard_types( + self[right], check_dict_values=False, deep=False + ) + else: + assert isinstance(node.op, ast.And) + left = left and of_standard_types( + self[right], check_dict_values=False, deep=False + ) + return left + + def _handle_binop(self, node): + op_type = type(node.op) + op = { + ast.Add: operator.add, + ast.Sub: operator.sub, + ast.Mult: operator.mul, + ast.Div: operator.truediv, + ast.FloorDiv: operator.floordiv, + ast.Mod: operator.mod, + ast.Pow: operator.pow, + ast.LShift: operator.lshift, + ast.RShift: operator.rshift, + ast.BitOr: operator.or_, + ast.BitXor: operator.xor, + ast.BitAnd: operator.and_, + }.get(op_type) + if not op: + raise CannotEval + left = self[node.left] + hash_type = is_any(type(left), set, frozenset, dict, OrderedDict) + left = of_standard_types(left, check_dict_values=False, deep=hash_type) + formatting = type(left) in (str, bytes) and op_type == ast.Mod + + right = of_standard_types( + self[node.right], + check_dict_values=formatting, + deep=formatting or hash_type, + ) + try: + return op(left, right) + except Exception as e: + raise CannotEval from e + + def _handle_unary(self, node: ast.UnaryOp): + value = of_standard_types( + self[node.operand], check_dict_values=False, deep=False + ) + op_type = type(node.op) + op = { + ast.USub: operator.neg, + ast.UAdd: operator.pos, + ast.Not: operator.not_, + ast.Invert: operator.invert, + }[op_type] + try: + return op(value) + except Exception as e: + raise CannotEval from e + + def _handle_subscript(self, node): + value = self[node.value] + of_standard_types( + value, check_dict_values=False, deep=is_any(type(value), dict, OrderedDict) + ) + index = node.slice + if isinstance(index, ast.Slice): + index = slice( + *[ + None if p is None else self[p] + for p in [index.lower, index.upper, index.step] + ] + ) + elif isinstance(index, ast.ExtSlice): + raise CannotEval + else: + if isinstance(index, ast.Index): + index = index.value + index = self[index] + of_standard_types(index, check_dict_values=False, deep=True) + + try: + return value[index] + except Exception: + raise CannotEval + + def _handle_container( + self, + node: Union[ast.List, ast.Tuple, ast.Set, ast.Dict] + ) -> Union[List, Tuple, Set, Dict]: + """Handle container nodes, including List, Set, Tuple and Dict""" + if isinstance(node, ast.Dict): + elts = node.keys + if None in elts: # ** unpacking inside {}, not yet supported + raise CannotEval + else: + elts = node.elts + elts = [self[elt] for elt in elts] + if isinstance(node, ast.List): + return elts + if isinstance(node, ast.Tuple): + return tuple(elts) + + # Set and Dict + if not all( + is_standard_types(elt, check_dict_values=False, deep=True) for elt in elts + ): + raise CannotEval + + if isinstance(node, ast.Set): + try: + return set(elts) + except TypeError: + raise CannotEval + + assert isinstance(node, ast.Dict) + + pairs = [(elt, self[val]) for elt, val in zip(elts, node.values)] + try: + return dict(pairs) + except TypeError: + raise CannotEval + + def find_expressions(self, root: ast.AST) -> Iterable[Tuple[ast.expr, Any]]: + """ + Find all expressions in the given tree that can be safely evaluated. + This is a low level API, typically you will use `interesting_expressions_grouped`. + + :param root: any AST node + :return: generator of pairs (tuples) of expression nodes and their corresponding values. + """ + + for node in ast.walk(root): + if not isinstance(node, ast.expr): + continue + + try: + value = self[node] + except CannotEval: + continue + + yield node, value + + def interesting_expressions_grouped(self, root: ast.AST) -> List[Tuple[List[ast.expr], Any]]: + """ + Find all interesting expressions in the given tree that can be safely evaluated, + grouping equivalent nodes together. + + For more control and details, see: + - Evaluator.find_expressions + - is_expression_interesting + - group_expressions + + :param root: any AST node + :return: A list of pairs (tuples) containing: + - A list of equivalent AST expressions + - The value of the first expression node + (which should be the same for all nodes, unless threads are involved) + """ + + return group_expressions( + pair + for pair in self.find_expressions(root) + if is_expression_interesting(*pair) + ) + + +def is_expression_interesting(node: ast.expr, value: Any) -> bool: + """ + Determines if an expression is potentially interesting, at least in my opinion. + Returns False for the following expressions whose value is generally obvious: + - Literals (e.g. 123, 'abc', [1, 2, 3], {'a': (), 'b': ([1, 2], [3])}) + - Variables or attributes whose name is equal to the value's __name__. + For example, a function `def foo(): ...` is not interesting when referred to + as `foo` as it usually would, but `bar` can be interesting if `bar is foo`. + Similarly the method `self.foo` is not interesting. + - Builtins (e.g. `len`) referred to by their usual name. + + This is a low level API, typically you will use `interesting_expressions_grouped`. + + :param node: an AST expression + :param value: the value of the node + :return: a boolean: True if the expression is interesting, False otherwise + """ + + with suppress(ValueError): + ast.literal_eval(node) + return False + + # TODO exclude inner modules, e.g. numpy.random.__name__ == 'numpy.random' != 'random' + # TODO exclude common module abbreviations, e.g. numpy as np, pandas as pd + if has_ast_name(value, node): + return False + + if ( + isinstance(node, ast.Name) + and getattr(builtins, node.id, object()) is value + ): + return False + + return True + + +def group_expressions(expressions: Iterable[Tuple[ast.expr, Any]]) -> List[Tuple[List[ast.expr], Any]]: + """ + Organise expression nodes and their values such that equivalent nodes are together. + Two nodes are considered equivalent if they have the same structure, + ignoring context (Load, Store, or Delete) and location (lineno, col_offset). + For example, this will group together the same variable name mentioned multiple times in an expression. + + This will not check the values of the nodes. Equivalent nodes should have the same values, + unless threads are involved. + + This is a low level API, typically you will use `interesting_expressions_grouped`. + + :param expressions: pairs of AST expressions and their values, as obtained from + `Evaluator.find_expressions`, or `(node, evaluator[node])`. + :return: A list of pairs (tuples) containing: + - A list of equivalent AST expressions + - The value of the first expression node + (which should be the same for all nodes, unless threads are involved) + """ + + result = {} + for node, value in expressions: + dump = ast.dump(copy_ast_without_context(node)) + result.setdefault(dump, ([], value))[0].append(node) + return list(result.values()) diff --git a/contrib/python/pure-eval/pure_eval/my_getattr_static.py b/contrib/python/pure-eval/pure_eval/my_getattr_static.py new file mode 100644 index 0000000000..c750b1acc3 --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/my_getattr_static.py @@ -0,0 +1,138 @@ +import types + +from pure_eval.utils import of_type, CannotEval + +_sentinel = object() + + +def _static_getmro(klass): + return type.__dict__['__mro__'].__get__(klass) + + +def _check_instance(obj, attr): + instance_dict = {} + try: + instance_dict = object.__getattribute__(obj, "__dict__") + except AttributeError: + pass + return dict.get(instance_dict, attr, _sentinel) + + +def _check_class(klass, attr): + for entry in _static_getmro(klass): + if _shadowed_dict(type(entry)) is _sentinel: + try: + return entry.__dict__[attr] + except KeyError: + pass + else: + break + return _sentinel + + +def _is_type(obj): + try: + _static_getmro(obj) + except TypeError: + return False + return True + + +def _shadowed_dict(klass): + dict_attr = type.__dict__["__dict__"] + for entry in _static_getmro(klass): + try: + class_dict = dict_attr.__get__(entry)["__dict__"] + except KeyError: + pass + else: + if not (type(class_dict) is types.GetSetDescriptorType and + class_dict.__name__ == "__dict__" and + class_dict.__objclass__ is entry): + return class_dict + return _sentinel + + +def getattr_static(obj, attr): + """Retrieve attributes without triggering dynamic lookup via the + descriptor protocol, __getattr__ or __getattribute__. + + Note: this function may not be able to retrieve all attributes + that getattr can fetch (like dynamically created attributes) + and may find attributes that getattr can't (like descriptors + that raise AttributeError). It can also return descriptor objects + instead of instance members in some cases. See the + documentation for details. + """ + instance_result = _sentinel + if not _is_type(obj): + klass = type(obj) + dict_attr = _shadowed_dict(klass) + if (dict_attr is _sentinel or + type(dict_attr) is types.MemberDescriptorType): + instance_result = _check_instance(obj, attr) + else: + raise CannotEval + else: + klass = obj + + klass_result = _check_class(klass, attr) + + if instance_result is not _sentinel and klass_result is not _sentinel: + if (_check_class(type(klass_result), '__get__') is not _sentinel and + _check_class(type(klass_result), '__set__') is not _sentinel): + return _resolve_descriptor(klass_result, obj, klass) + + if instance_result is not _sentinel: + return instance_result + if klass_result is not _sentinel: + get = _check_class(type(klass_result), '__get__') + if get is _sentinel: + return klass_result + else: + if obj is klass: + instance = None + else: + instance = obj + return _resolve_descriptor(klass_result, instance, klass) + + if obj is klass: + # for types we check the metaclass too + for entry in _static_getmro(type(klass)): + if _shadowed_dict(type(entry)) is _sentinel: + try: + result = entry.__dict__[attr] + get = _check_class(type(result), '__get__') + if get is not _sentinel: + raise CannotEval + return result + except KeyError: + pass + raise CannotEval + + +class _foo: + __slots__ = ['foo'] + method = lambda: 0 + + +slot_descriptor = _foo.foo +wrapper_descriptor = str.__dict__['__add__'] +method_descriptor = str.__dict__['startswith'] +user_method_descriptor = _foo.__dict__['method'] + +safe_descriptors_raw = [ + slot_descriptor, + wrapper_descriptor, + method_descriptor, + user_method_descriptor, +] + +safe_descriptor_types = list(map(type, safe_descriptors_raw)) + + +def _resolve_descriptor(d, instance, owner): + try: + return type(of_type(d, *safe_descriptor_types)).__get__(d, instance, owner) + except AttributeError as e: + raise CannotEval from e diff --git a/contrib/python/pure-eval/pure_eval/py.typed b/contrib/python/pure-eval/pure_eval/py.typed new file mode 100644 index 0000000000..298c64a904 --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. The pure_eval package uses inline types. diff --git a/contrib/python/pure-eval/pure_eval/utils.py b/contrib/python/pure-eval/pure_eval/utils.py new file mode 100644 index 0000000000..a8a37302da --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/utils.py @@ -0,0 +1,201 @@ +from collections import OrderedDict, deque +from datetime import date, time, datetime +from decimal import Decimal +from fractions import Fraction +import ast +import enum +import typing + + +class CannotEval(Exception): + def __repr__(self): + return self.__class__.__name__ + + __str__ = __repr__ + + +def is_any(x, *args): + return any( + x is arg + for arg in args + ) + + +def of_type(x, *types): + if is_any(type(x), *types): + return x + else: + raise CannotEval + + +def of_standard_types(x, *, check_dict_values: bool, deep: bool): + if is_standard_types(x, check_dict_values=check_dict_values, deep=deep): + return x + else: + raise CannotEval + + +def is_standard_types(x, *, check_dict_values: bool, deep: bool): + try: + return _is_standard_types_deep(x, check_dict_values, deep)[0] + except RecursionError: + return False + + +def _is_standard_types_deep(x, check_dict_values: bool, deep: bool): + typ = type(x) + if is_any( + typ, + str, + int, + bool, + float, + bytes, + complex, + date, + time, + datetime, + Fraction, + Decimal, + type(None), + object, + ): + return True, 0 + + if is_any(typ, tuple, frozenset, list, set, dict, OrderedDict, deque, slice): + if typ in [slice]: + length = 0 + else: + length = len(x) + assert isinstance(deep, bool) + if not deep: + return True, length + + if check_dict_values and typ in (dict, OrderedDict): + items = (v for pair in x.items() for v in pair) + elif typ is slice: + items = [x.start, x.stop, x.step] + else: + items = x + for item in items: + if length > 100000: + return False, length + is_standard, item_length = _is_standard_types_deep( + item, check_dict_values, deep + ) + if not is_standard: + return False, length + length += item_length + return True, length + + return False, 0 + + +class _E(enum.Enum): + pass + + +class _C: + def foo(self): pass # pragma: nocover + + def bar(self): pass # pragma: nocover + + @classmethod + def cm(cls): pass # pragma: nocover + + @staticmethod + def sm(): pass # pragma: nocover + + +safe_name_samples = { + "len": len, + "append": list.append, + "__add__": list.__add__, + "insert": [].insert, + "__mul__": [].__mul__, + "fromkeys": dict.__dict__['fromkeys'], + "is_any": is_any, + "__repr__": CannotEval.__repr__, + "foo": _C().foo, + "bar": _C.bar, + "cm": _C.cm, + "sm": _C.sm, + "ast": ast, + "CannotEval": CannotEval, + "_E": _E, +} + +typing_annotation_samples = { + name: getattr(typing, name) + for name in "List Dict Tuple Set Callable Mapping".split() +} + +safe_name_types = tuple({ + type(f) + for f in safe_name_samples.values() +}) + + +typing_annotation_types = tuple({ + type(f) + for f in typing_annotation_samples.values() +}) + + +def eq_checking_types(a, b): + return type(a) is type(b) and a == b + + +def ast_name(node): + if isinstance(node, ast.Name): + return node.id + elif isinstance(node, ast.Attribute): + return node.attr + else: + return None + + +def safe_name(value): + typ = type(value) + if is_any(typ, *safe_name_types): + return value.__name__ + elif value is typing.Optional: + return "Optional" + elif value is typing.Union: + return "Union" + elif is_any(typ, *typing_annotation_types): + return getattr(value, "__name__", None) or getattr(value, "_name", None) + else: + return None + + +def has_ast_name(value, node): + value_name = safe_name(value) + if type(value_name) is not str: + return False + return eq_checking_types(ast_name(node), value_name) + + +def copy_ast_without_context(x): + if isinstance(x, ast.AST): + kwargs = { + field: copy_ast_without_context(getattr(x, field)) + for field in x._fields + if field != 'ctx' + if hasattr(x, field) + } + return type(x)(**kwargs) + elif isinstance(x, list): + return list(map(copy_ast_without_context, x)) + else: + return x + + +def ensure_dict(x): + """ + Handles invalid non-dict inputs + """ + try: + return dict(x) + except Exception: + return {} diff --git a/contrib/python/pure-eval/pure_eval/version.py b/contrib/python/pure-eval/pure_eval/version.py new file mode 100644 index 0000000000..9dd16a3451 --- /dev/null +++ b/contrib/python/pure-eval/pure_eval/version.py @@ -0,0 +1 @@ +__version__ = '0.2.2'
\ No newline at end of file diff --git a/contrib/python/stack-data/.dist-info/METADATA b/contrib/python/stack-data/.dist-info/METADATA new file mode 100644 index 0000000000..e77dd5f3c8 --- /dev/null +++ b/contrib/python/stack-data/.dist-info/METADATA @@ -0,0 +1,442 @@ +Metadata-Version: 2.1 +Name: stack-data +Version: 0.2.0 +Summary: Extract data from python stack frames and tracebacks for informative displays +Home-page: http://github.com/alexmojaki/stack_data +Author: Alex Hall +Author-email: alex.mojaki@gmail.com +License: MIT +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Topic :: Software Development :: Debuggers +Description-Content-Type: text/markdown +License-File: LICENSE.txt +Requires-Dist: executing +Requires-Dist: asttokens +Requires-Dist: pure-eval +Provides-Extra: tests +Requires-Dist: pytest ; extra == 'tests' +Requires-Dist: typeguard ; extra == 'tests' +Requires-Dist: pygments ; extra == 'tests' +Requires-Dist: littleutils ; extra == 'tests' +Requires-Dist: cython ; extra == 'tests' + +# stack_data + +[![Tests](https://github.com/alexmojaki/stack_data/actions/workflows/pytest.yml/badge.svg)](https://github.com/alexmojaki/stack_data/actions/workflows/pytest.yml) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/stack_data/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/stack_data?branch=master) [![Supports Python versions 3.5+](https://img.shields.io/pypi/pyversions/stack_data.svg)](https://pypi.python.org/pypi/stack_data) + +This is a library that extracts data from stack frames and tracebacks, particularly to display more useful tracebacks than the default. + +You can install it from PyPI: + + pip install stack_data + +## Basic usage + +Here's some code we'd like to inspect: + +```python +def foo(): + result = [] + for i in range(5): + row = [] + result.append(row) + print_stack() + for j in range(5): + row.append(i * j) + return result +``` + +Note that `foo` calls a function `print_stack()`. In reality we can imagine that an exception was raised at this line, or a debugger stopped there, but this is easy to play with directly. Here's a basic implementation: + +```python +import inspect +import stack_data + + +def print_stack(): + frame = inspect.currentframe().f_back + frame_info = stack_data.FrameInfo(frame) + print(f"{frame_info.code.co_name} at line {frame_info.lineno}") + print("-----------") + for line in frame_info.lines: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") +``` + +(Beware that this has a major bug - it doesn't account for line gaps, which we'll learn about later) + +The output of one call to `print_stack()` looks like: + +``` +foo at line 9 +----------- + 6 | for i in range(5): + 7 | row = [] + 8 | result.append(row) +--> 9 | print_stack() + 10 | for j in range(5): +``` + +The code for `print_stack()` is fairly self-explanatory. If you want to learn more details about a particular class or method I suggest looking through some docstrings. `FrameInfo` is a class that accepts either a frame or a traceback object and provides a bunch of nice attributes and properties (which are cached so you don't need to worry about performance). In particular `frame_info.lines` is a list of `Line` objects. `line.render()` returns the source code of that line suitable for display. Without any arguments it simply strips any common leading indentation. Later on we'll see a more powerful use for it. + +You can see that `frame_info.lines` includes some lines of surrounding context. By default it includes 3 pieces of context before the main line and 1 piece after. We can configure the amount of context by passing options: + +```python +options = stack_data.Options(before=1, after=0) +frame_info = stack_data.FrameInfo(frame, options) +``` + +Then the output looks like: + +``` +foo at line 9 +----------- + 8 | result.append(row) +--> 9 | print_stack() +``` + +Note that these parameters are not the number of *lines* before and after to include, but the number of *pieces*. A piece is a range of one or more lines in a file that should logically be grouped together. A piece contains either a single simple statement or a part of a compound statement (loops, if, try/except, etc) that doesn't contain any other statements. Most pieces are a single line, but a multi-line statement or `if` condition is a single piece. In the example above, all pieces are one line, because nothing is spread across multiple lines. If we change our code to include some multiline bits: + + +```python +def foo(): + result = [] + for i in range(5): + row = [] + result.append( + row + ) + print_stack() + for j in range( + 5 + ): + row.append(i * j) + return result +``` + +and then run the original code with the default options, then the output is: + +``` +foo at line 11 +----------- + 6 | for i in range(5): + 7 | row = [] + 8 | result.append( + 9 | row + 10 | ) +--> 11 | print_stack() + 12 | for j in range( + 13 | 5 + 14 | ): +``` + +Now lines 8-10 and lines 12-14 are each a single piece. Note that the output is essentially the same as the original in terms of the amount of code. The division of files into pieces means that the edge of the context is intuitive and doesn't crop out parts of statements or expressions. For example, if context was measured in lines instead of pieces, the last line of the above would be `for j in range(` which is much less useful. + +However, if a piece is very long, including all of it could be cumbersome. For this, `Options` has a parameter `max_lines_per_piece`, which is 6 by default. Suppose we have a piece in our code that's longer than that: + +```python + row = [ + 1, + 2, + 3, + 4, + 5, + ] +``` + +`frame_info.lines` will truncate this piece so that instead of 7 `Line` objects it will produce 5 `Line` objects and one `LINE_GAP` in the middle, making 6 objects in total for the piece. Our code doesn't currently handle gaps, so it will raise an exception. We can modify it like so: + +```python + for line in frame_info.lines: + if line is stack_data.LINE_GAP: + print(" (...)") + else: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") +``` + +Now the output looks like: + +``` +foo at line 15 +----------- + 6 | for i in range(5): + 7 | row = [ + 8 | 1, + 9 | 2, + (...) + 12 | 5, + 13 | ] + 14 | result.append(row) +--> 15 | print_stack() + 16 | for j in range(5): +``` + +Alternatively, you can flip the condition around and check `if isinstance(line, stack_data.Line):`. Either way, you should always check for line gaps, or your code may appear to work at first but fail when it encounters a long piece. + +Note that the executing piece, i.e. the piece containing the current line being executed (line 15 in this case) is never truncated, no matter how long it is. + +The lines of context never stray outside `frame_info.scope`, which is the innermost function or class definition containing the current line. For example, this is the output for a short function which has neither 3 lines before nor 1 line after the current line: + +``` +bar at line 6 +----------- + 4 | def bar(): + 5 | foo() +--> 6 | print_stack() +``` + +Sometimes it's nice to ensure that the function signature is always showing. This can be done with `Options(include_signature=True)`. The result looks like this: + +``` +foo at line 14 +----------- + 9 | def foo(): + (...) + 11 | for i in range(5): + 12 | row = [] + 13 | result.append(row) +--> 14 | print_stack() + 15 | for j in range(5): +``` + +To avoid wasting space, pieces never start or end with a blank line, and blank lines between pieces are excluded. So if our code looks like this: + + +```python + for i in range(5): + row = [] + + result.append(row) + print_stack() + + for j in range(5): +``` + +The output doesn't change much, except you can see jumps in the line numbers: + +``` + 11 | for i in range(5): + 12 | row = [] + 14 | result.append(row) +--> 15 | print_stack() + 17 | for j in range(5): +``` + +## Variables + +You can also inspect variables and other expressions in a frame, e.g: + +```python + for var in frame_info.variables: + print(f"{var.name} = {repr(var.value)}") +``` + +which may output: + +```python +result = [[0, 0, 0, 0, 0], [0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [0, 3, 6, 9, 12], []] +i = 4 +row = [] +j = 4 +``` + +`frame_info.variables` returns a list of `Variable` objects, which have attributes `name`, `value`, and `nodes`, which is a list of all AST representing that expression. + +A `Variable` may refer to an expression other than a simple variable name. It can be any expression evaluated by the library [`pure_eval`](https://github.com/alexmojaki/pure_eval) which it deems 'interesting' (see those docs for more info). This includes expressions like `foo.bar` or `foo[bar]`. In these cases `name` is the source code of that expression. `pure_eval` ensures that it only evaluates expressions that won't have any side effects, e.g. where `foo.bar` is a normal attribute rather than a descriptor such as a property. + +`frame_info.variables` is a list of all the interesting expressions found in `frame_info.scope`, e.g. the current function, which may include expressions not visible in `frame_info.lines`. You can restrict the list by using `frame_info.variables_in_lines` or even `frame_info.variables_in_executing_piece`. For more control you can use `frame_info.variables_by_lineno`. See the docstrings for more information. + +## Rendering lines with ranges and markers + +Sometimes you may want to insert special characters into the text for display purposes, e.g. HTML or ANSI color codes. `stack_data` provides a few tools to make this easier. + +Let's say we have a `Line` object where `line.text` (the original raw source code of that line) is `"foo = bar"`, so `line.text[6:9]` is `"bar"`, and we want to emphasise that part by inserting HTML at positions 6 and 9 in the text. Here's how we can do that directly: + +```python +markers = [ + stack_data.MarkerInLine(position=6, is_start=True, string="<b>"), + stack_data.MarkerInLine(position=9, is_start=False, string="</b>"), +] +line.render(markers) # returns "foo = <b>bar</b>" +``` + +Here `is_start=True` indicates that the marker is the first of a pair. This helps `line.render()` sort and insert the markers correctly so you don't end up with malformed HTML like `foo<b>.<i></b>bar</i>` where tags overlap. + +Since we're inserting HTML, we should actually use `line.render(markers, escape_html=True)` which will escape special HTML characters in the Python source (but not the markers) so for example `foo = bar < spam` would be rendered as `foo = <b>bar</b> < spam`. + +Usually though you wouldn't create markers directly yourself. Instead you would start with one or more ranges and then convert them, like so: + +```python +ranges = [ + stack_data.RangeInLine(start=0, end=3, data="foo"), + stack_data.RangeInLine(start=6, end=9, data="bar"), +] + +def convert_ranges(r): + if r.data == "bar": + return "<b>", "</b>" + +# This results in `markers` being the same as in the above example. +markers = stack_data.markers_from_ranges(ranges, convert_ranges) +``` + +`RangeInLine` has a `data` attribute which can be any object. `markers_from_ranges` accepts a converter function to which it passes all the `RangeInLine` objects. If the converter function returns a pair of strings, it creates two markers from them. Otherwise it should return `None` to indicate that the range should be ignored, as with the first range containing `"foo"` in this example. + +The reason this is useful is because there are built in tools to create these ranges for you. For example, if we change our `print_stack()` function to contain this: + +```python +def convert_variable_ranges(r): + variable, _node = r.data + return f'<span data-value="{repr(variable.value)}">', '</span>' + +markers = stack_data.markers_from_ranges(line.variable_ranges, convert_variable_ranges) +print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render(markers, escape_html=True)}") +``` + +Then the output becomes: + +``` +foo at line 15 +----------- + 9 | def foo(): + (...) + 11 | for <span data-value="4">i</span> in range(5): + 12 | <span data-value="[]">row</span> = [] + 14 | <span data-value="[[0, 0, 0, 0, 0], [0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [0, 3, 6, 9, 12], []]">result</span>.append(<span data-value="[]">row</span>) +--> 15 | print_stack() + 17 | for <span data-value="4">j</span> in range(5): +``` + +`line.variable_ranges` is a list of RangeInLines for each Variable that appears at least partially in this line. The data attribute of the range is a pair `(variable, node)` where node is the particular AST node from the list `variable.nodes` that corresponds to this range. + +You can also use `line.token_ranges` (e.g. if you want to do your own syntax highlighting) or `line.executing_node_ranges` if you want to highlight the currently executing node identified by the [`executing`](https://github.com/alexmojaki/executing) library. Or if you want to make your own range from an AST node, use `line.range_from_node(node, data)`. See the docstrings for more info. + +### Syntax highlighting with Pygments + +If you'd like pretty colored text without the work, you can let [Pygments](https://pygments.org/) do it for you. Just follow these steps: + +1. `pip install pygments` separately as it's not a dependency of `stack_data`. +2. Create a pygments formatter object such as `HtmlFormatter` or `Terminal256Formatter`. +3. Pass the formatter to `Options` in the argument `pygments_formatter`. +4. Use `line.render(pygmented=True)` to get your formatted text. In this case you can't pass any markers to `render`. + +If you want, you can also highlight the executing node in the frame in combination with the pygments syntax highlighting. For this you will need: + +1. A pygments style - either a style class or a string that names it. See the [documentation on styles](https://pygments.org/docs/styles/) and the [styles gallery](https://blog.yjl.im/2015/08/pygments-styles-gallery.html). +2. A modification to make to the style for the executing node, which is a string such as `"bold"` or `"bg:#ffff00"` (yellow background). See the [documentation on style rules](https://pygments.org/docs/styles/#style-rules). +3. Pass these two things to `stack_data.style_with_executing_node(style, modifier)` to get a new style class. +4. Pass the new style to your formatter when you create it. + +Note that this doesn't work with `TerminalFormatter` which just uses the basic ANSI colors and doesn't use the style passed to it in general. + +## Getting the full stack + +Currently `print_stack()` doesn't actually print the stack, it just prints one frame. Instead of `frame_info = FrameInfo(frame, options)`, let's do this: + +```python +for frame_info in FrameInfo.stack_data(frame, options): +``` + +Now the output looks something like this: + +``` +<module> at line 18 +----------- + 14 | for j in range(5): + 15 | row.append(i * j) + 16 | return result +--> 18 | bar() + +bar at line 5 +----------- + 4 | def bar(): +--> 5 | foo() + +foo at line 13 +----------- + 10 | for i in range(5): + 11 | row = [] + 12 | result.append(row) +--> 13 | print_stack() + 14 | for j in range(5): +``` + +However, just as `frame_info.lines` doesn't always yield `Line` objects, `FrameInfo.stack_data` doesn't always yield `FrameInfo` objects, and we must modify our code to handle that. Let's look at some different sample code: + +```python +def factorial(x): + return x * factorial(x - 1) + + +try: + print(factorial(5)) +except: + print_stack() +``` + +In this code we've forgotten to include a base case in our `factorial` function so it will fail with a `RecursionError` and there'll be many frames with similar information. Similar to the built in Python traceback, `stack_data` avoids showing all of these frames. Instead you will get a `RepeatedFrames` object which summarises the information. See its docstring for more details. + +Here is our updated implementation: + +```python +def print_stack(): + for frame_info in FrameInfo.stack_data(sys.exc_info()[2]): + if isinstance(frame_info, FrameInfo): + print(f"{frame_info.code.co_name} at line {frame_info.lineno}") + print("-----------") + for line in frame_info.lines: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") + + for var in frame_info.variables: + print(f"{var.name} = {repr(var.value)}") + + print() + else: + print(f"... {frame_info.description} ...\n") +``` + +And the output: + +``` +<module> at line 9 +----------- + 4 | def factorial(x): + 5 | return x * factorial(x - 1) + 8 | try: +--> 9 | print(factorial(5)) + 10 | except: + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = 5 + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = 4 + +... factorial at line 5 (996 times) ... + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = -993 +``` + +In addition to handling repeated frames, we've passed a traceback object to `FrameInfo.stack_data` instead of a frame. + +If you want, you can pass `collapse_repeated_frames=False` to `FrameInfo.stack_data` (not to `Options`) and it will just yield `FrameInfo` objects for the full stack. + + diff --git a/contrib/python/stack-data/.dist-info/top_level.txt b/contrib/python/stack-data/.dist-info/top_level.txt new file mode 100644 index 0000000000..09e7428c13 --- /dev/null +++ b/contrib/python/stack-data/.dist-info/top_level.txt @@ -0,0 +1 @@ +stack_data diff --git a/contrib/python/stack-data/LICENSE.txt b/contrib/python/stack-data/LICENSE.txt new file mode 100644 index 0000000000..473e36e246 --- /dev/null +++ b/contrib/python/stack-data/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Alex Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/contrib/python/stack-data/README.md b/contrib/python/stack-data/README.md new file mode 100644 index 0000000000..a8bc532643 --- /dev/null +++ b/contrib/python/stack-data/README.md @@ -0,0 +1,409 @@ +# stack_data + +[![Tests](https://github.com/alexmojaki/stack_data/actions/workflows/pytest.yml/badge.svg)](https://github.com/alexmojaki/stack_data/actions/workflows/pytest.yml) [![Coverage Status](https://coveralls.io/repos/github/alexmojaki/stack_data/badge.svg?branch=master)](https://coveralls.io/github/alexmojaki/stack_data?branch=master) [![Supports Python versions 3.5+](https://img.shields.io/pypi/pyversions/stack_data.svg)](https://pypi.python.org/pypi/stack_data) + +This is a library that extracts data from stack frames and tracebacks, particularly to display more useful tracebacks than the default. + +You can install it from PyPI: + + pip install stack_data + +## Basic usage + +Here's some code we'd like to inspect: + +```python +def foo(): + result = [] + for i in range(5): + row = [] + result.append(row) + print_stack() + for j in range(5): + row.append(i * j) + return result +``` + +Note that `foo` calls a function `print_stack()`. In reality we can imagine that an exception was raised at this line, or a debugger stopped there, but this is easy to play with directly. Here's a basic implementation: + +```python +import inspect +import stack_data + + +def print_stack(): + frame = inspect.currentframe().f_back + frame_info = stack_data.FrameInfo(frame) + print(f"{frame_info.code.co_name} at line {frame_info.lineno}") + print("-----------") + for line in frame_info.lines: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") +``` + +(Beware that this has a major bug - it doesn't account for line gaps, which we'll learn about later) + +The output of one call to `print_stack()` looks like: + +``` +foo at line 9 +----------- + 6 | for i in range(5): + 7 | row = [] + 8 | result.append(row) +--> 9 | print_stack() + 10 | for j in range(5): +``` + +The code for `print_stack()` is fairly self-explanatory. If you want to learn more details about a particular class or method I suggest looking through some docstrings. `FrameInfo` is a class that accepts either a frame or a traceback object and provides a bunch of nice attributes and properties (which are cached so you don't need to worry about performance). In particular `frame_info.lines` is a list of `Line` objects. `line.render()` returns the source code of that line suitable for display. Without any arguments it simply strips any common leading indentation. Later on we'll see a more powerful use for it. + +You can see that `frame_info.lines` includes some lines of surrounding context. By default it includes 3 pieces of context before the main line and 1 piece after. We can configure the amount of context by passing options: + +```python +options = stack_data.Options(before=1, after=0) +frame_info = stack_data.FrameInfo(frame, options) +``` + +Then the output looks like: + +``` +foo at line 9 +----------- + 8 | result.append(row) +--> 9 | print_stack() +``` + +Note that these parameters are not the number of *lines* before and after to include, but the number of *pieces*. A piece is a range of one or more lines in a file that should logically be grouped together. A piece contains either a single simple statement or a part of a compound statement (loops, if, try/except, etc) that doesn't contain any other statements. Most pieces are a single line, but a multi-line statement or `if` condition is a single piece. In the example above, all pieces are one line, because nothing is spread across multiple lines. If we change our code to include some multiline bits: + + +```python +def foo(): + result = [] + for i in range(5): + row = [] + result.append( + row + ) + print_stack() + for j in range( + 5 + ): + row.append(i * j) + return result +``` + +and then run the original code with the default options, then the output is: + +``` +foo at line 11 +----------- + 6 | for i in range(5): + 7 | row = [] + 8 | result.append( + 9 | row + 10 | ) +--> 11 | print_stack() + 12 | for j in range( + 13 | 5 + 14 | ): +``` + +Now lines 8-10 and lines 12-14 are each a single piece. Note that the output is essentially the same as the original in terms of the amount of code. The division of files into pieces means that the edge of the context is intuitive and doesn't crop out parts of statements or expressions. For example, if context was measured in lines instead of pieces, the last line of the above would be `for j in range(` which is much less useful. + +However, if a piece is very long, including all of it could be cumbersome. For this, `Options` has a parameter `max_lines_per_piece`, which is 6 by default. Suppose we have a piece in our code that's longer than that: + +```python + row = [ + 1, + 2, + 3, + 4, + 5, + ] +``` + +`frame_info.lines` will truncate this piece so that instead of 7 `Line` objects it will produce 5 `Line` objects and one `LINE_GAP` in the middle, making 6 objects in total for the piece. Our code doesn't currently handle gaps, so it will raise an exception. We can modify it like so: + +```python + for line in frame_info.lines: + if line is stack_data.LINE_GAP: + print(" (...)") + else: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") +``` + +Now the output looks like: + +``` +foo at line 15 +----------- + 6 | for i in range(5): + 7 | row = [ + 8 | 1, + 9 | 2, + (...) + 12 | 5, + 13 | ] + 14 | result.append(row) +--> 15 | print_stack() + 16 | for j in range(5): +``` + +Alternatively, you can flip the condition around and check `if isinstance(line, stack_data.Line):`. Either way, you should always check for line gaps, or your code may appear to work at first but fail when it encounters a long piece. + +Note that the executing piece, i.e. the piece containing the current line being executed (line 15 in this case) is never truncated, no matter how long it is. + +The lines of context never stray outside `frame_info.scope`, which is the innermost function or class definition containing the current line. For example, this is the output for a short function which has neither 3 lines before nor 1 line after the current line: + +``` +bar at line 6 +----------- + 4 | def bar(): + 5 | foo() +--> 6 | print_stack() +``` + +Sometimes it's nice to ensure that the function signature is always showing. This can be done with `Options(include_signature=True)`. The result looks like this: + +``` +foo at line 14 +----------- + 9 | def foo(): + (...) + 11 | for i in range(5): + 12 | row = [] + 13 | result.append(row) +--> 14 | print_stack() + 15 | for j in range(5): +``` + +To avoid wasting space, pieces never start or end with a blank line, and blank lines between pieces are excluded. So if our code looks like this: + + +```python + for i in range(5): + row = [] + + result.append(row) + print_stack() + + for j in range(5): +``` + +The output doesn't change much, except you can see jumps in the line numbers: + +``` + 11 | for i in range(5): + 12 | row = [] + 14 | result.append(row) +--> 15 | print_stack() + 17 | for j in range(5): +``` + +## Variables + +You can also inspect variables and other expressions in a frame, e.g: + +```python + for var in frame_info.variables: + print(f"{var.name} = {repr(var.value)}") +``` + +which may output: + +```python +result = [[0, 0, 0, 0, 0], [0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [0, 3, 6, 9, 12], []] +i = 4 +row = [] +j = 4 +``` + +`frame_info.variables` returns a list of `Variable` objects, which have attributes `name`, `value`, and `nodes`, which is a list of all AST representing that expression. + +A `Variable` may refer to an expression other than a simple variable name. It can be any expression evaluated by the library [`pure_eval`](https://github.com/alexmojaki/pure_eval) which it deems 'interesting' (see those docs for more info). This includes expressions like `foo.bar` or `foo[bar]`. In these cases `name` is the source code of that expression. `pure_eval` ensures that it only evaluates expressions that won't have any side effects, e.g. where `foo.bar` is a normal attribute rather than a descriptor such as a property. + +`frame_info.variables` is a list of all the interesting expressions found in `frame_info.scope`, e.g. the current function, which may include expressions not visible in `frame_info.lines`. You can restrict the list by using `frame_info.variables_in_lines` or even `frame_info.variables_in_executing_piece`. For more control you can use `frame_info.variables_by_lineno`. See the docstrings for more information. + +## Rendering lines with ranges and markers + +Sometimes you may want to insert special characters into the text for display purposes, e.g. HTML or ANSI color codes. `stack_data` provides a few tools to make this easier. + +Let's say we have a `Line` object where `line.text` (the original raw source code of that line) is `"foo = bar"`, so `line.text[6:9]` is `"bar"`, and we want to emphasise that part by inserting HTML at positions 6 and 9 in the text. Here's how we can do that directly: + +```python +markers = [ + stack_data.MarkerInLine(position=6, is_start=True, string="<b>"), + stack_data.MarkerInLine(position=9, is_start=False, string="</b>"), +] +line.render(markers) # returns "foo = <b>bar</b>" +``` + +Here `is_start=True` indicates that the marker is the first of a pair. This helps `line.render()` sort and insert the markers correctly so you don't end up with malformed HTML like `foo<b>.<i></b>bar</i>` where tags overlap. + +Since we're inserting HTML, we should actually use `line.render(markers, escape_html=True)` which will escape special HTML characters in the Python source (but not the markers) so for example `foo = bar < spam` would be rendered as `foo = <b>bar</b> < spam`. + +Usually though you wouldn't create markers directly yourself. Instead you would start with one or more ranges and then convert them, like so: + +```python +ranges = [ + stack_data.RangeInLine(start=0, end=3, data="foo"), + stack_data.RangeInLine(start=6, end=9, data="bar"), +] + +def convert_ranges(r): + if r.data == "bar": + return "<b>", "</b>" + +# This results in `markers` being the same as in the above example. +markers = stack_data.markers_from_ranges(ranges, convert_ranges) +``` + +`RangeInLine` has a `data` attribute which can be any object. `markers_from_ranges` accepts a converter function to which it passes all the `RangeInLine` objects. If the converter function returns a pair of strings, it creates two markers from them. Otherwise it should return `None` to indicate that the range should be ignored, as with the first range containing `"foo"` in this example. + +The reason this is useful is because there are built in tools to create these ranges for you. For example, if we change our `print_stack()` function to contain this: + +```python +def convert_variable_ranges(r): + variable, _node = r.data + return f'<span data-value="{repr(variable.value)}">', '</span>' + +markers = stack_data.markers_from_ranges(line.variable_ranges, convert_variable_ranges) +print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render(markers, escape_html=True)}") +``` + +Then the output becomes: + +``` +foo at line 15 +----------- + 9 | def foo(): + (...) + 11 | for <span data-value="4">i</span> in range(5): + 12 | <span data-value="[]">row</span> = [] + 14 | <span data-value="[[0, 0, 0, 0, 0], [0, 1, 2, 3, 4], [0, 2, 4, 6, 8], [0, 3, 6, 9, 12], []]">result</span>.append(<span data-value="[]">row</span>) +--> 15 | print_stack() + 17 | for <span data-value="4">j</span> in range(5): +``` + +`line.variable_ranges` is a list of RangeInLines for each Variable that appears at least partially in this line. The data attribute of the range is a pair `(variable, node)` where node is the particular AST node from the list `variable.nodes` that corresponds to this range. + +You can also use `line.token_ranges` (e.g. if you want to do your own syntax highlighting) or `line.executing_node_ranges` if you want to highlight the currently executing node identified by the [`executing`](https://github.com/alexmojaki/executing) library. Or if you want to make your own range from an AST node, use `line.range_from_node(node, data)`. See the docstrings for more info. + +### Syntax highlighting with Pygments + +If you'd like pretty colored text without the work, you can let [Pygments](https://pygments.org/) do it for you. Just follow these steps: + +1. `pip install pygments` separately as it's not a dependency of `stack_data`. +2. Create a pygments formatter object such as `HtmlFormatter` or `Terminal256Formatter`. +3. Pass the formatter to `Options` in the argument `pygments_formatter`. +4. Use `line.render(pygmented=True)` to get your formatted text. In this case you can't pass any markers to `render`. + +If you want, you can also highlight the executing node in the frame in combination with the pygments syntax highlighting. For this you will need: + +1. A pygments style - either a style class or a string that names it. See the [documentation on styles](https://pygments.org/docs/styles/) and the [styles gallery](https://blog.yjl.im/2015/08/pygments-styles-gallery.html). +2. A modification to make to the style for the executing node, which is a string such as `"bold"` or `"bg:#ffff00"` (yellow background). See the [documentation on style rules](https://pygments.org/docs/styles/#style-rules). +3. Pass these two things to `stack_data.style_with_executing_node(style, modifier)` to get a new style class. +4. Pass the new style to your formatter when you create it. + +Note that this doesn't work with `TerminalFormatter` which just uses the basic ANSI colors and doesn't use the style passed to it in general. + +## Getting the full stack + +Currently `print_stack()` doesn't actually print the stack, it just prints one frame. Instead of `frame_info = FrameInfo(frame, options)`, let's do this: + +```python +for frame_info in FrameInfo.stack_data(frame, options): +``` + +Now the output looks something like this: + +``` +<module> at line 18 +----------- + 14 | for j in range(5): + 15 | row.append(i * j) + 16 | return result +--> 18 | bar() + +bar at line 5 +----------- + 4 | def bar(): +--> 5 | foo() + +foo at line 13 +----------- + 10 | for i in range(5): + 11 | row = [] + 12 | result.append(row) +--> 13 | print_stack() + 14 | for j in range(5): +``` + +However, just as `frame_info.lines` doesn't always yield `Line` objects, `FrameInfo.stack_data` doesn't always yield `FrameInfo` objects, and we must modify our code to handle that. Let's look at some different sample code: + +```python +def factorial(x): + return x * factorial(x - 1) + + +try: + print(factorial(5)) +except: + print_stack() +``` + +In this code we've forgotten to include a base case in our `factorial` function so it will fail with a `RecursionError` and there'll be many frames with similar information. Similar to the built in Python traceback, `stack_data` avoids showing all of these frames. Instead you will get a `RepeatedFrames` object which summarises the information. See its docstring for more details. + +Here is our updated implementation: + +```python +def print_stack(): + for frame_info in FrameInfo.stack_data(sys.exc_info()[2]): + if isinstance(frame_info, FrameInfo): + print(f"{frame_info.code.co_name} at line {frame_info.lineno}") + print("-----------") + for line in frame_info.lines: + print(f"{'-->' if line.is_current else ' '} {line.lineno:4} | {line.render()}") + + for var in frame_info.variables: + print(f"{var.name} = {repr(var.value)}") + + print() + else: + print(f"... {frame_info.description} ...\n") +``` + +And the output: + +``` +<module> at line 9 +----------- + 4 | def factorial(x): + 5 | return x * factorial(x - 1) + 8 | try: +--> 9 | print(factorial(5)) + 10 | except: + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = 5 + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = 4 + +... factorial at line 5 (996 times) ... + +factorial at line 5 +----------- + 4 | def factorial(x): +--> 5 | return x * factorial(x - 1) +x = -993 +``` + +In addition to handling repeated frames, we've passed a traceback object to `FrameInfo.stack_data` instead of a frame. + +If you want, you can pass `collapse_repeated_frames=False` to `FrameInfo.stack_data` (not to `Options`) and it will just yield `FrameInfo` objects for the full stack. diff --git a/contrib/python/stack-data/stack_data/__init__.py b/contrib/python/stack-data/stack_data/__init__.py new file mode 100644 index 0000000000..e9bc429e62 --- /dev/null +++ b/contrib/python/stack-data/stack_data/__init__.py @@ -0,0 +1,9 @@ +from .core import Source, FrameInfo, markers_from_ranges, Options, LINE_GAP, Line, Variable, RangeInLine, \ + RepeatedFrames, MarkerInLine, style_with_executing_node +from .formatting import Formatter + +try: + from .version import __version__ +except ImportError: + # version.py is auto-generated with the git tag when building + __version__ = "???" diff --git a/contrib/python/stack-data/stack_data/core.py b/contrib/python/stack-data/stack_data/core.py new file mode 100644 index 0000000000..97313fe333 --- /dev/null +++ b/contrib/python/stack-data/stack_data/core.py @@ -0,0 +1,882 @@ +import ast +import html +import os +import sys +from collections import defaultdict, Counter +from textwrap import dedent +from types import FrameType, CodeType, TracebackType +from typing import ( + Iterator, List, Tuple, Optional, NamedTuple, + Any, Iterable, Callable, Union, + Sequence) +from typing import Mapping + +import executing +from asttokens.util import Token +from executing import only +from pure_eval import Evaluator, is_expression_interesting +from stack_data.utils import ( + truncate, unique_in_order, line_range, + frame_and_lineno, iter_stack, collapse_repeated, group_by_key_func, + cached_property, is_frame, _pygmented_with_ranges, assert_) + +RangeInLine = NamedTuple('RangeInLine', + [('start', int), + ('end', int), + ('data', Any)]) +RangeInLine.__doc__ = """ +Represents a range of characters within one line of source code, +and some associated data. + +Typically this will be converted to a pair of markers by markers_from_ranges. +""" + +MarkerInLine = NamedTuple('MarkerInLine', + [('position', int), + ('is_start', bool), + ('string', str)]) +MarkerInLine.__doc__ = """ +A string that is meant to be inserted at a given position in a line of source code. +For example, this could be an ANSI code or the opening or closing of an HTML tag. +is_start should be True if this is the first of a pair such as the opening of an HTML tag. +This will help to sort and insert markers correctly. + +Typically this would be created from a RangeInLine by markers_from_ranges. +Then use Line.render to insert the markers correctly. +""" + + +class Variable( + NamedTuple('_Variable', + [('name', str), + ('nodes', Sequence[ast.AST]), + ('value', Any)]) +): + """ + An expression that appears one or more times in source code and its associated value. + This will usually be a variable but it can be any expression evaluated by pure_eval. + - name is the source text of the expression. + - nodes is a list of equivalent nodes representing the same expression. + - value is the safely evaluated value of the expression. + """ + __hash__ = object.__hash__ + __eq__ = object.__eq__ + + +class Source(executing.Source): + """ + The source code of a single file and associated metadata. + + In addition to the attributes from the base class executing.Source, + if .tree is not None, meaning this is valid Python code, objects have: + - pieces: a list of Piece objects + - tokens_by_lineno: a defaultdict(list) mapping line numbers to lists of tokens. + + Don't construct this class. Get an instance from frame_info.source. + """ + + def __init__(self, *args, **kwargs): + super(Source, self).__init__(*args, **kwargs) + if self.tree: + self.asttokens() + + @cached_property + def pieces(self) -> List[range]: + if not self.tree: + return [ + range(i, i + 1) + for i in range(1, len(self.lines) + 1) + ] + return list(self._clean_pieces()) + + @cached_property + def tokens_by_lineno(self) -> Mapping[int, List[Token]]: + if not self.tree: + raise AttributeError("This file doesn't contain valid Python, so .tokens_by_lineno doesn't exist") + return group_by_key_func( + self.asttokens().tokens, + lambda tok: tok.start[0], + ) + + def _clean_pieces(self) -> Iterator[range]: + pieces = self._raw_split_into_pieces(self.tree, 1, len(self.lines) + 1) + pieces = [ + (start, end) + for (start, end) in pieces + if end > start + ] + + starts = [start for start, end in pieces[1:]] + ends = [end for start, end in pieces[:-1]] + if starts != ends: + joins = list(map(set, zip(starts, ends))) + mismatches = [s for s in joins if len(s) > 1] + raise AssertionError("Pieces mismatches: %s" % mismatches) + + def is_blank(i): + try: + return not self.lines[i - 1].strip() + except IndexError: + return False + + for start, end in pieces: + while is_blank(start): + start += 1 + while is_blank(end - 1): + end -= 1 + if start < end: + yield range(start, end) + + def _raw_split_into_pieces( + self, + stmt: ast.AST, + start: int, + end: int, + ) -> Iterator[Tuple[int, int]]: + self.asttokens() + + for name, body in ast.iter_fields(stmt): + if ( + isinstance(body, list) and body and + isinstance(body[0], (ast.stmt, ast.ExceptHandler)) + ): + for rang, group in sorted(group_by_key_func(body, line_range).items()): + sub_stmt = group[0] + for inner_start, inner_end in self._raw_split_into_pieces(sub_stmt, *rang): + if start < inner_start: + yield start, inner_start + if inner_start < inner_end: + yield inner_start, inner_end + start = inner_end + + yield start, end + + +class Options: + """ + Configuration for FrameInfo, either in the constructor or the .stack_data classmethod. + These all determine which Lines and gaps are produced by FrameInfo.lines. + + before and after are the number of pieces of context to include in a frame + in addition to the executing piece. + + include_signature is whether to include the function signature as a piece in a frame. + + If a piece (other than the executing piece) has more than max_lines_per_piece lines, + it will be truncated with a gap in the middle. + """ + def __init__( + self, *, + before: int = 3, + after: int = 1, + include_signature: bool = False, + max_lines_per_piece: int = 6, + pygments_formatter=None + ): + self.before = before + self.after = after + self.include_signature = include_signature + self.max_lines_per_piece = max_lines_per_piece + self.pygments_formatter = pygments_formatter + + def __repr__(self): + keys = sorted(self.__dict__) + items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) + return "{}({})".format(type(self).__name__, ", ".join(items)) + + +class LineGap(object): + """ + A singleton representing one or more lines of source code that were skipped + in FrameInfo.lines. + + LINE_GAP can be created in two ways: + - by truncating a piece of context that's too long. + - immediately after the signature piece if Options.include_signature is true + and the following piece isn't already part of the included pieces. + """ + def __repr__(self): + return "LINE_GAP" + + +LINE_GAP = LineGap() + + +class Line(object): + """ + A single line of source code for a particular stack frame. + + Typically this is obtained from FrameInfo.lines. + Since that list may also contain LINE_GAP, you should first check + that this is really a Line before using it. + + Attributes: + - frame_info + - lineno: the 1-based line number within the file + - text: the raw source of this line. For displaying text, see .render() instead. + - leading_indent: the number of leading spaces that should probably be stripped. + This attribute is set within FrameInfo.lines. If you construct this class + directly you should probably set it manually (at least to 0). + - is_current: whether this is the line currently being executed by the interpreter + within this frame. + - tokens: a list of source tokens in this line + + There are several helpers for constructing RangeInLines which can be converted to markers + using markers_from_ranges which can be passed to .render(): + - token_ranges + - variable_ranges + - executing_node_ranges + - range_from_node + """ + def __init__( + self, + frame_info: 'FrameInfo', + lineno: int, + ): + self.frame_info = frame_info + self.lineno = lineno + self.text = frame_info.source.lines[lineno - 1] # type: str + self.leading_indent = None # type: Optional[int] + + def __repr__(self): + return "<{self.__class__.__name__} {self.lineno} (current={self.is_current}) " \ + "{self.text!r} of {self.frame_info.filename}>".format(self=self) + + @property + def is_current(self) -> bool: + """ + Whether this is the line currently being executed by the interpreter + within this frame. + """ + return self.lineno == self.frame_info.lineno + + @property + def tokens(self) -> List[Token]: + """ + A list of source tokens in this line. + The tokens are Token objects from asttokens: + https://asttokens.readthedocs.io/en/latest/api-index.html#asttokens.util.Token + """ + return self.frame_info.source.tokens_by_lineno[self.lineno] + + @cached_property + def token_ranges(self) -> List[RangeInLine]: + """ + A list of RangeInLines for each token in .tokens, + where range.data is a Token object from asttokens: + https://asttokens.readthedocs.io/en/latest/api-index.html#asttokens.util.Token + """ + return [ + RangeInLine( + token.start[1], + token.end[1], + token, + ) + for token in self.tokens + ] + + @cached_property + def variable_ranges(self) -> List[RangeInLine]: + """ + A list of RangeInLines for each Variable that appears at least partially in this line. + The data attribute of the range is a pair (variable, node) where node is the particular + AST node from the list variable.nodes that corresponds to this range. + """ + return [ + self.range_from_node(node, (variable, node)) + for variable, node in self.frame_info.variables_by_lineno[self.lineno] + ] + + @cached_property + def executing_node_ranges(self) -> List[RangeInLine]: + """ + A list of one or zero RangeInLines for the executing node of this frame. + The list will have one element if the node can be found and it overlaps this line. + """ + return self._raw_executing_node_ranges( + self.frame_info._executing_node_common_indent + ) + + def _raw_executing_node_ranges(self, common_indent=0) -> List[RangeInLine]: + ex = self.frame_info.executing + node = ex.node + if node: + rang = self.range_from_node(node, ex, common_indent) + if rang: + return [rang] + return [] + + def range_from_node( + self, node: ast.AST, data: Any, common_indent: int = 0 + ) -> Optional[RangeInLine]: + """ + If the given node overlaps with this line, return a RangeInLine + with the correct start and end and the given data. + Otherwise, return None. + """ + start, end = line_range(node) + end -= 1 + if not (start <= self.lineno <= end): + return None + if start == self.lineno: + try: + range_start = node.first_token.start[1] + except AttributeError: + range_start = node.col_offset + else: + range_start = 0 + + range_start = max(range_start, common_indent) + + if end == self.lineno: + try: + range_end = node.last_token.end[1] + except AttributeError: + try: + range_end = node.end_col_offset + except AttributeError: + return None + else: + range_end = len(self.text) + + return RangeInLine(range_start, range_end, data) + + def render( + self, + markers: Iterable[MarkerInLine] = (), + *, + strip_leading_indent: bool = True, + pygmented: bool = False, + escape_html: bool = False + ) -> str: + """ + Produces a string for display consisting of .text + with the .strings of each marker inserted at the correct positions. + If strip_leading_indent is true (the default) then leading spaces + common to all lines in this frame will be excluded. + """ + if pygmented and self.frame_info.scope: + assert_(not markers, ValueError("Cannot use pygmented with markers")) + start_line, lines = self.frame_info._pygmented_scope_lines + result = lines[self.lineno - start_line] + if strip_leading_indent: + result = result.replace(self.text[:self.leading_indent], "", 1) + return result + + text = self.text + + # This just makes the loop below simpler + markers = list(markers) + [MarkerInLine(position=len(text), is_start=False, string='')] + + markers.sort(key=lambda t: t[:2]) + + parts = [] + if strip_leading_indent: + start = self.leading_indent + else: + start = 0 + original_start = start + + for marker in markers: + text_part = text[start:marker.position] + if escape_html: + text_part = html.escape(text_part) + parts.append(text_part) + parts.append(marker.string) + + # Ensure that start >= leading_indent + start = max(marker.position, original_start) + return ''.join(parts) + + +def markers_from_ranges( + ranges: Iterable[RangeInLine], + converter: Callable[[RangeInLine], Optional[Tuple[str, str]]], +) -> List[MarkerInLine]: + """ + Helper to create MarkerInLines given some RangeInLines. + converter should be a function accepting a RangeInLine returning + either None (which is ignored) or a pair of strings which + are used to create two markers included in the returned list. + """ + markers = [] + for rang in ranges: + converted = converter(rang) + if converted is None: + continue + + start_string, end_string = converted + if not (isinstance(start_string, str) and isinstance(end_string, str)): + raise TypeError("converter should return None or a pair of strings") + + markers += [ + MarkerInLine(position=rang.start, is_start=True, string=start_string), + MarkerInLine(position=rang.end, is_start=False, string=end_string), + ] + return markers + + +def style_with_executing_node(style, modifier): + from pygments.styles import get_style_by_name + if isinstance(style, str): + style = get_style_by_name(style) + + class NewStyle(style): + for_executing_node = True + + styles = { + **style.styles, + **{ + k.ExecutingNode: v + " " + modifier + for k, v in style.styles.items() + } + } + + return NewStyle + + +class RepeatedFrames: + """ + A sequence of consecutive stack frames which shouldn't be displayed because + the same code and line number were repeated many times in the stack, e.g. + because of deep recursion. + + Attributes: + - frames: list of raw frame or traceback objects + - frame_keys: list of tuples (frame.f_code, lineno) extracted from the frame objects. + It's this information from the frames that is used to determine + whether two frames should be considered similar (i.e. repeating). + - description: A string briefly describing frame_keys + """ + def __init__( + self, + frames: List[Union[FrameType, TracebackType]], + frame_keys: List[Tuple[CodeType, int]], + ): + self.frames = frames + self.frame_keys = frame_keys + + @cached_property + def description(self) -> str: + """ + A string briefly describing the repeated frames, e.g. + my_function at line 10 (100 times) + """ + counts = sorted(Counter(self.frame_keys).items(), + key=lambda item: (-item[1], item[0][0].co_name)) + return ', '.join( + '{name} at line {lineno} ({count} times)'.format( + name=Source.for_filename(code.co_filename).code_qualname(code), + lineno=lineno, + count=count, + ) + for (code, lineno), count in counts + ) + + def __repr__(self): + return '<{self.__class__.__name__} {self.description}>'.format(self=self) + + +class FrameInfo(object): + """ + Information about a frame! + Pass either a frame object or a traceback object, + and optionally an Options object to configure. + + Or use the classmethod FrameInfo.stack_data() for an iterator of FrameInfo and + RepeatedFrames objects. + + Attributes: + - frame: an actual stack frame object, either frame_or_tb or frame_or_tb.tb_frame + - options + - code: frame.f_code + - source: a Source object + - filename: a hopefully absolute file path derived from code.co_filename + - scope: the AST node of the innermost function, class or module being executed + - lines: a list of Line/LineGap objects to display, determined by options + - executing: an Executing object from the `executing` library, which has: + - .node: the AST node being executed in this frame, or None if it's unknown + - .statements: a set of one or more candidate statements (AST nodes, probably just one) + currently being executed in this frame. + - .code_qualname(): the __qualname__ of the function or class being executed, + or just the code name. + + Properties returning one or more pieces of source code (ranges of lines): + - scope_pieces: all the pieces in the scope + - included_pieces: a subset of scope_pieces determined by options + - executing_piece: the piece currently being executed in this frame + + Properties returning lists of Variable objects: + - variables: all variables in the scope + - variables_by_lineno: variables organised into lines + - variables_in_lines: variables contained within FrameInfo.lines + - variables_in_executing_piece: variables contained within FrameInfo.executing_piece + """ + def __init__( + self, + frame_or_tb: Union[FrameType, TracebackType], + options: Optional[Options] = None, + ): + self.executing = Source.executing(frame_or_tb) + frame, self.lineno = frame_and_lineno(frame_or_tb) + self.frame = frame + self.code = frame.f_code + self.options = options or Options() # type: Options + self.source = self.executing.source # type: Source + + def __repr__(self): + return "{self.__class__.__name__}({self.frame})".format(self=self) + + @classmethod + def stack_data( + cls, + frame_or_tb: Union[FrameType, TracebackType], + options: Optional[Options] = None, + *, + collapse_repeated_frames: bool = True + ) -> Iterator[Union['FrameInfo', RepeatedFrames]]: + """ + An iterator of FrameInfo and RepeatedFrames objects representing + a full traceback or stack. Similar consecutive frames are collapsed into RepeatedFrames + objects, so always check what type of object has been yielded. + + Pass either a frame object or a traceback object, + and optionally an Options object to configure. + """ + stack = list(iter_stack(frame_or_tb)) + + # Reverse the stack from a frame so that it's in the same order + # as the order from a traceback, which is the order of a printed + # traceback when read top to bottom (most recent call last) + if is_frame(frame_or_tb): + stack = stack[::-1] + + def mapper(f): + return cls(f, options) + + if not collapse_repeated_frames: + yield from map(mapper, stack) + return + + def _frame_key(x): + frame, lineno = frame_and_lineno(x) + return frame.f_code, lineno + + yield from collapse_repeated( + stack, + mapper=mapper, + collapser=RepeatedFrames, + key=_frame_key, + ) + + @cached_property + def scope_pieces(self) -> List[range]: + """ + All the pieces (ranges of lines) contained in this object's .scope, + unless there is no .scope (because the source isn't valid Python syntax) + in which case it returns all the pieces in the source file, each containing one line. + """ + if not self.scope: + return self.source.pieces + + scope_start, scope_end = line_range(self.scope) + return [ + piece + for piece in self.source.pieces + if scope_start <= piece.start and piece.stop <= scope_end + ] + + @cached_property + def filename(self) -> str: + """ + A hopefully absolute file path derived from .code.co_filename, + the current working directory, and sys.path. + Code based on ipython. + """ + result = self.code.co_filename + + if ( + os.path.isabs(result) or + ( + result.startswith("<") and + result.endswith(">") + ) + ): + return result + + # Try to make the filename absolute by trying all + # sys.path entries (which is also what linecache does) + # as well as the current working directory + for dirname in ["."] + list(sys.path): + try: + fullname = os.path.join(dirname, result) + if os.path.isfile(fullname): + return os.path.abspath(fullname) + except Exception: + # Just in case that sys.path contains very + # strange entries... + pass + + return result + + @cached_property + def executing_piece(self) -> range: + """ + The piece (range of lines) containing the line currently being executed + by the interpreter in this frame. + """ + return only( + piece + for piece in self.scope_pieces + if self.lineno in piece + ) + + @cached_property + def included_pieces(self) -> List[range]: + """ + The list of pieces (ranges of lines) to display for this frame. + Consists of .executing_piece, surrounding context pieces + determined by .options.before and .options.after, + and the function signature if a function is being executed and + .options.include_signature is True (in which case this might not + be a contiguous range of pieces). + Always a subset of .scope_pieces. + """ + scope_pieces = self.scope_pieces + if not self.scope_pieces: + return [] + + pos = scope_pieces.index(self.executing_piece) + pieces_start = max(0, pos - self.options.before) + pieces_end = pos + 1 + self.options.after + pieces = scope_pieces[pieces_start:pieces_end] + + if ( + self.options.include_signature + and not self.code.co_name.startswith('<') + and isinstance(self.scope, (ast.FunctionDef, ast.AsyncFunctionDef)) + and pieces_start > 0 + ): + pieces.insert(0, scope_pieces[0]) + + return pieces + + @cached_property + def _executing_node_common_indent(self) -> int: + """ + The common minimal indentation shared by the markers intended + for an exception node that spans multiple lines. + + Intended to be used only internally. + """ + indents = [] + lines = [line for line in self.lines if isinstance(line, Line)] + + for line in lines: + for rang in line._raw_executing_node_ranges(): + begin_text = len(line.text) - len(line.text.lstrip()) + indent = max(rang.start, begin_text) + indents.append(indent) + + return min(indents) if indents else 0 + + @cached_property + def lines(self) -> List[Union[Line, LineGap]]: + """ + A list of lines to display, determined by options. + The objects yielded either have type Line or are the singleton LINE_GAP. + Always check the type that you're dealing with when iterating. + + LINE_GAP can be created in two ways: + - by truncating a piece of context that's too long, determined by + .options.max_lines_per_piece + - immediately after the signature piece if Options.include_signature is true + and the following piece isn't already part of the included pieces. + + The Line objects are all within the ranges from .included_pieces. + """ + pieces = self.included_pieces + if not pieces: + return [] + + result = [] + for i, piece in enumerate(pieces): + if ( + i == 1 + and self.scope + and pieces[0] == self.scope_pieces[0] + and pieces[1] != self.scope_pieces[1] + ): + result.append(LINE_GAP) + + lines = [Line(self, i) for i in piece] # type: List[Line] + if piece != self.executing_piece: + lines = truncate( + lines, + max_length=self.options.max_lines_per_piece, + middle=[LINE_GAP], + ) + result.extend(lines) + + real_lines = [ + line + for line in result + if isinstance(line, Line) + ] + + text = "\n".join( + line.text + for line in real_lines + ) + dedented_lines = dedent(text).splitlines() + leading_indent = len(real_lines[0].text) - len(dedented_lines[0]) + for line in real_lines: + line.leading_indent = leading_indent + + return result + + @cached_property + def scope(self) -> Optional[ast.AST]: + """ + The AST node of the innermost function, class or module being executed. + """ + if not self.source.tree or not self.executing.statements: + return None + + stmt = list(self.executing.statements)[0] + while True: + # Get the parent first in case the original statement is already + # a function definition, e.g. if we're calling a decorator + # In that case we still want the surrounding scope, not that function + stmt = stmt.parent + if isinstance(stmt, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module)): + return stmt + + @cached_property + def _pygmented_scope_lines(self) -> Optional[Tuple[int, List[str]]]: + # noinspection PyUnresolvedReferences + from pygments.formatters import HtmlFormatter + + formatter = self.options.pygments_formatter + scope = self.scope + assert_(formatter, ValueError("Must set a pygments formatter in Options")) + assert_(scope) + + if isinstance(formatter, HtmlFormatter): + formatter.nowrap = True + + atok = self.source.asttokens() + node = self.executing.node + if node and getattr(formatter.style, "for_executing_node", False): + scope_start = atok.get_text_range(scope)[0] + start, end = atok.get_text_range(node) + start -= scope_start + end -= scope_start + ranges = [(start, end)] + else: + ranges = [] + + code = atok.get_text(scope) + lines = _pygmented_with_ranges(formatter, code, ranges) + + start_line = line_range(scope)[0] + + return start_line, lines + + @cached_property + def variables(self) -> List[Variable]: + """ + All Variable objects whose nodes are contained within .scope + and whose values could be safely evaluated by pure_eval. + """ + if not self.scope: + return [] + + evaluator = Evaluator.from_frame(self.frame) + scope = self.scope + node_values = [ + pair + for pair in evaluator.find_expressions(scope) + if is_expression_interesting(*pair) + ] # type: List[Tuple[ast.AST, Any]] + + if isinstance(scope, (ast.FunctionDef, ast.AsyncFunctionDef)): + for node in ast.walk(scope.args): + if not isinstance(node, ast.arg): + continue + name = node.arg + try: + value = evaluator.names[name] + except KeyError: + pass + else: + node_values.append((node, value)) + + # Group equivalent nodes together + def get_text(n): + if isinstance(n, ast.arg): + return n.arg + else: + return self.source.asttokens().get_text(n) + + def normalise_node(n): + try: + # Add parens to avoid syntax errors for multiline expressions + return ast.parse('(' + get_text(n) + ')') + except Exception: + return n + + grouped = group_by_key_func( + node_values, + lambda nv: ast.dump(normalise_node(nv[0])), + ) + + result = [] + for group in grouped.values(): + nodes, values = zip(*group) + value = values[0] + text = get_text(nodes[0]) + if not text: + continue + result.append(Variable(text, nodes, value)) + + return result + + @cached_property + def variables_by_lineno(self) -> Mapping[int, List[Tuple[Variable, ast.AST]]]: + """ + A mapping from 1-based line numbers to lists of pairs: + - A Variable object + - A specific AST node from the variable's .nodes list that's + in the line at that line number. + """ + result = defaultdict(list) + for var in self.variables: + for node in var.nodes: + for lineno in range(*line_range(node)): + result[lineno].append((var, node)) + return result + + @cached_property + def variables_in_lines(self) -> List[Variable]: + """ + A list of Variable objects contained within the lines returned by .lines. + """ + return unique_in_order( + var + for line in self.lines + if isinstance(line, Line) + for var, node in self.variables_by_lineno[line.lineno] + ) + + @cached_property + def variables_in_executing_piece(self) -> List[Variable]: + """ + A list of Variable objects contained within the lines + in the range returned by .executing_piece. + """ + return unique_in_order( + var + for lineno in self.executing_piece + for var, node in self.variables_by_lineno[lineno] + ) diff --git a/contrib/python/stack-data/stack_data/formatting.py b/contrib/python/stack-data/stack_data/formatting.py new file mode 100644 index 0000000000..b37da99b62 --- /dev/null +++ b/contrib/python/stack-data/stack_data/formatting.py @@ -0,0 +1,206 @@ +import inspect +import sys +import traceback +from types import FrameType, TracebackType +from typing import Union, Iterable + +from stack_data import style_with_executing_node, Options, Line, FrameInfo, LINE_GAP, Variable, RepeatedFrames +from stack_data.utils import assert_ + + +class Formatter: + def __init__( + self, *, + options=Options(), + pygmented=False, + show_executing_node=True, + pygments_formatter_cls=None, + pygments_formatter_kwargs=None, + pygments_style="monokai", + executing_node_modifier="bg:#005080", + executing_node_underline="^", + current_line_indicator="-->", + line_gap_string="(...)", + show_variables=False, + use_code_qualname=True, + show_linenos=True, + strip_leading_indent=True, + html=False, + chain=True, + collapse_repeated_frames=True + ): + if pygmented and not options.pygments_formatter: + if show_executing_node: + pygments_style = style_with_executing_node( + pygments_style, executing_node_modifier + ) + + if pygments_formatter_cls is None: + from pygments.formatters.terminal256 import Terminal256Formatter \ + as pygments_formatter_cls + + options.pygments_formatter = pygments_formatter_cls( + style=pygments_style, + **pygments_formatter_kwargs or {}, + ) + + self.pygmented = pygmented + self.show_executing_node = show_executing_node + assert_( + len(executing_node_underline) == 1, + ValueError("executing_node_underline must be a single character"), + ) + self.executing_node_underline = executing_node_underline + self.current_line_indicator = current_line_indicator or "" + self.line_gap_string = line_gap_string + self.show_variables = show_variables + self.show_linenos = show_linenos + self.use_code_qualname = use_code_qualname + self.strip_leading_indent = strip_leading_indent + self.html = html + self.chain = chain + self.options = options + self.collapse_repeated_frames = collapse_repeated_frames + + def set_hook(self): + def excepthook(_etype, evalue, _tb): + self.print_exception(evalue) + + sys.excepthook = excepthook + + def print_exception(self, e=None, *, file=None): + self.print_lines(self.format_exception(e), file=file) + + def print_stack(self, frame_or_tb=None, *, file=None): + if frame_or_tb is None: + frame_or_tb = inspect.currentframe().f_back + + self.print_lines(self.format_stack(frame_or_tb), file=file) + + def print_lines(self, lines, *, file=None): + if file is None: + file = sys.stderr + for line in lines: + print(line, file=file, end="") + + def format_exception(self, e=None) -> Iterable[str]: + if e is None: + e = sys.exc_info()[1] + + if self.chain: + if e.__cause__ is not None: + yield from self.format_exception(e.__cause__) + yield traceback._cause_message + elif (e.__context__ is not None + and not e.__suppress_context__): + yield from self.format_exception(e.__context__) + yield traceback._context_message + + yield 'Traceback (most recent call last):\n' + yield from self.format_stack(e.__traceback__) + yield from traceback.format_exception_only(type(e), e) + + def format_stack(self, frame_or_tb=None) -> Iterable[str]: + if frame_or_tb is None: + frame_or_tb = inspect.currentframe().f_back + + yield from self.format_stack_data( + FrameInfo.stack_data( + frame_or_tb, + self.options, + collapse_repeated_frames=self.collapse_repeated_frames, + ) + ) + + def format_stack_data( + self, stack: Iterable[Union[FrameInfo, RepeatedFrames]] + ) -> Iterable[str]: + for item in stack: + if isinstance(item, FrameInfo): + yield from self.format_frame(item) + else: + yield self.format_repeated_frames(item) + + def format_repeated_frames(self, repeated_frames: RepeatedFrames) -> str: + return ' [... skipping similar frames: {}]\n'.format( + repeated_frames.description + ) + + def format_frame(self, frame: Union[FrameInfo, FrameType, TracebackType]) -> Iterable[str]: + if not isinstance(frame, FrameInfo): + frame = FrameInfo(frame, self.options) + + yield self.format_frame_header(frame) + + for line in frame.lines: + if isinstance(line, Line): + yield self.format_line(line) + else: + assert_(line is LINE_GAP) + yield self.line_gap_string + "\n" + + if self.show_variables: + try: + yield from self.format_variables(frame) + except Exception: + pass + + def format_frame_header(self, frame_info: FrameInfo) -> str: + return ' File "{frame_info.filename}", line {frame_info.lineno}, in {name}\n'.format( + frame_info=frame_info, + name=( + frame_info.executing.code_qualname() + if self.use_code_qualname else + frame_info.code.co_name + ), + ) + + def format_line(self, line: Line) -> str: + result = "" + if self.current_line_indicator: + if line.is_current: + result = self.current_line_indicator + else: + result = " " * len(self.current_line_indicator) + result += " " + + if self.show_linenos: + result += "{:4} | ".format(line.lineno) + + result = result or " " + + prefix = result + + result += line.render( + pygmented=self.pygmented, + escape_html=self.html, + strip_leading_indent=self.strip_leading_indent, + ) + "\n" + + if self.show_executing_node and not self.pygmented: + for line_range in line.executing_node_ranges: + start = line_range.start - line.leading_indent + end = line_range.end - line.leading_indent + result += ( + " " * (start + len(prefix)) + + self.executing_node_underline * (end - start) + + "\n" + ) + + return result + + def format_variables(self, frame_info: FrameInfo) -> Iterable[str]: + for var in sorted(frame_info.variables, key=lambda v: v.name): + try: + yield self.format_variable(var) + "\n" + except Exception: + pass + + def format_variable(self, var: Variable) -> str: + return "{} = {}".format( + var.name, + self.format_variable_value(var.value), + ) + + def format_variable_value(self, value) -> str: + return repr(value) diff --git a/contrib/python/stack-data/stack_data/utils.py b/contrib/python/stack-data/stack_data/utils.py new file mode 100644 index 0000000000..71d55eadc1 --- /dev/null +++ b/contrib/python/stack-data/stack_data/utils.py @@ -0,0 +1,172 @@ +import ast +import itertools +import types +from collections import OrderedDict, Counter, defaultdict +from types import FrameType, TracebackType +from typing import ( + Iterator, List, Tuple, Iterable, Callable, Union, + TypeVar, Mapping, +) + +T = TypeVar('T') +R = TypeVar('R') + + +def truncate(seq, max_length: int, middle): + if len(seq) > max_length: + right = (max_length - len(middle)) // 2 + left = max_length - len(middle) - right + seq = seq[:left] + middle + seq[-right:] + return seq + + +def unique_in_order(it: Iterable[T]) -> List[T]: + return list(OrderedDict.fromkeys(it)) + + +def line_range(node: ast.AST) -> Tuple[int, int]: + """ + Returns a pair of numbers representing a half open range + (i.e. suitable as arguments to the `range()` builtin) + of line numbers of the given AST nodes. + """ + try: + return ( + node.first_token.start[0], + node.last_token.end[0] + 1, + ) + except AttributeError: + return ( + node.lineno, + getattr(node, "end_lineno", node.lineno) + 1, + ) + + +def highlight_unique(lst: List[T]) -> Iterator[Tuple[T, bool]]: + counts = Counter(lst) + + for is_common, group in itertools.groupby(lst, key=lambda x: counts[x] > 3): + if is_common: + group = list(group) + highlighted = [False] * len(group) + + def highlight_index(f): + try: + i = f() + except ValueError: + return None + highlighted[i] = True + return i + + for item in set(group): + first = highlight_index(lambda: group.index(item)) + if first is not None: + highlight_index(lambda: group.index(item, first + 1)) + highlight_index(lambda: -1 - group[::-1].index(item)) + else: + highlighted = itertools.repeat(True) + + yield from zip(group, highlighted) + + +def identity(x: T) -> T: + return x + + +def collapse_repeated(lst, *, collapser, mapper=identity, key=identity): + keyed = list(map(key, lst)) + for is_highlighted, group in itertools.groupby( + zip(lst, highlight_unique(keyed)), + key=lambda t: t[1][1], + ): + original_group, highlighted_group = zip(*group) + if is_highlighted: + yield from map(mapper, original_group) + else: + keyed_group, _ = zip(*highlighted_group) + yield collapser(list(original_group), list(keyed_group)) + + +def is_frame(frame_or_tb: Union[FrameType, TracebackType]) -> bool: + assert_(isinstance(frame_or_tb, (types.FrameType, types.TracebackType))) + return isinstance(frame_or_tb, (types.FrameType,)) + + +def iter_stack(frame_or_tb: Union[FrameType, TracebackType]) -> Iterator[Union[FrameType, TracebackType]]: + while frame_or_tb: + yield frame_or_tb + if is_frame(frame_or_tb): + frame_or_tb = frame_or_tb.f_back + else: + frame_or_tb = frame_or_tb.tb_next + + +def frame_and_lineno(frame_or_tb: Union[FrameType, TracebackType]) -> Tuple[FrameType, int]: + if is_frame(frame_or_tb): + return frame_or_tb, frame_or_tb.f_lineno + else: + return frame_or_tb.tb_frame, frame_or_tb.tb_lineno + + +def group_by_key_func(iterable: Iterable[T], key_func: Callable[[T], R]) -> Mapping[R, List[T]]: + # noinspection PyUnresolvedReferences + """ + Create a dictionary from an iterable such that the keys are the result of evaluating a key function on elements + of the iterable and the values are lists of elements all of which correspond to the key. + + >>> def si(d): return sorted(d.items()) + >>> si(group_by_key_func("a bb ccc d ee fff".split(), len)) + [(1, ['a', 'd']), (2, ['bb', 'ee']), (3, ['ccc', 'fff'])] + >>> si(group_by_key_func([-1, 0, 1, 3, 6, 8, 9, 2], lambda x: x % 2)) + [(0, [0, 6, 8, 2]), (1, [-1, 1, 3, 9])] + """ + result = defaultdict(list) + for item in iterable: + result[key_func(item)].append(item) + return result + + +class cached_property(object): + """ + A property that is only computed once per instance and then replaces itself + with an ordinary attribute. Deleting the attribute resets the property. + + Based on https://github.com/pydanny/cached-property/blob/master/cached_property.py + """ + + def __init__(self, func): + self.__doc__ = func.__doc__ + self.func = func + + def cached_property_wrapper(self, obj, _cls): + if obj is None: + return self + + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + __get__ = cached_property_wrapper + + +def _pygmented_with_ranges(formatter, code, ranges): + import pygments + from pygments.lexers import get_lexer_by_name + + class MyLexer(type(get_lexer_by_name("python3"))): + def get_tokens(self, text): + length = 0 + for ttype, value in super().get_tokens(text): + if any(start <= length < end for start, end in ranges): + ttype = ttype.ExecutingNode + length += len(value) + yield ttype, value + + lexer = MyLexer(stripnl=False) + return pygments.highlight(code, lexer, formatter).splitlines() + + +def assert_(condition, error=""): + if not condition: + if isinstance(error, str): + error = AssertionError(error) + raise error diff --git a/contrib/python/stack-data/stack_data/version.py b/contrib/python/stack-data/stack_data/version.py new file mode 100644 index 0000000000..7fd229a32b --- /dev/null +++ b/contrib/python/stack-data/stack_data/version.py @@ -0,0 +1 @@ +__version__ = '0.2.0' |